2017-11-25 21:25:01 +00:00
|
|
|
const parseURL = require("url").parse
|
|
|
|
const startOfDay = require("date-fns/start_of_day")
|
|
|
|
const addDays = require("date-fns/add_days")
|
|
|
|
const parsePackageURL = require("./utils/parsePackageURL")
|
|
|
|
const CloudflareAPI = require("./CloudflareAPI")
|
|
|
|
const StatsAPI = require("./StatsAPI")
|
2017-11-08 19:07:48 +00:00
|
|
|
|
2017-11-25 21:25:01 +00:00
|
|
|
const db = require("./RedisClient")
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-05-23 22:00:09 +00:00
|
|
|
/**
|
|
|
|
* Domains we want to analyze.
|
|
|
|
*/
|
|
|
|
const DomainNames = [
|
2017-11-25 21:25:01 +00:00
|
|
|
"unpkg.com"
|
2017-05-24 23:51:03 +00:00
|
|
|
//'npmcdn.com' // We don't have log data on npmcdn.com yet :/
|
2017-05-23 22:00:09 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The window of time to download in a single fetch.
|
|
|
|
*/
|
|
|
|
const LogWindowSeconds = 30
|
|
|
|
|
2017-08-17 18:24:40 +00:00
|
|
|
function getSeconds(date) {
|
|
|
|
return Math.floor(date.getTime() / 1000)
|
2017-08-12 16:18:54 +00:00
|
|
|
}
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-08-12 16:18:54 +00:00
|
|
|
function stringifySeconds(seconds) {
|
|
|
|
return new Date(seconds * 1000).toISOString()
|
|
|
|
}
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-08-17 18:24:40 +00:00
|
|
|
function toSeconds(millis) {
|
|
|
|
return Math.floor(millis / 1000)
|
2017-05-20 05:41:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const oneSecond = 1000
|
|
|
|
const oneMinute = oneSecond * 60
|
|
|
|
const oneHour = oneMinute * 60
|
|
|
|
|
2017-08-12 16:18:54 +00:00
|
|
|
function computeCounters(stream) {
|
2017-11-08 18:14:46 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-05-24 16:25:00 +00:00
|
|
|
const counters = {}
|
2017-05-29 05:41:01 +00:00
|
|
|
const expireat = {}
|
2017-05-24 16:25:00 +00:00
|
|
|
|
2017-08-17 18:24:40 +00:00
|
|
|
function incr(key, member, by, expiry) {
|
|
|
|
counters[key] = counters[key] || {}
|
|
|
|
counters[key][member] = (counters[key][member] || 0) + by
|
|
|
|
expireat[key] = expiry
|
2017-05-24 16:25:00 +00:00
|
|
|
}
|
2017-05-23 22:00:09 +00:00
|
|
|
|
|
|
|
stream
|
2017-11-25 21:25:01 +00:00
|
|
|
.on("error", reject)
|
|
|
|
.on("data", function(entry) {
|
2017-05-23 22:00:09 +00:00
|
|
|
const date = new Date(Math.round(entry.timestamp / 1000000))
|
2017-08-17 18:24:40 +00:00
|
|
|
|
2017-05-29 05:41:01 +00:00
|
|
|
const nextDay = startOfDay(addDays(date, 1))
|
2017-08-17 18:24:40 +00:00
|
|
|
const sevenDaysLater = getSeconds(addDays(nextDay, 7))
|
2017-05-29 05:41:01 +00:00
|
|
|
const thirtyDaysLater = getSeconds(addDays(nextDay, 30))
|
2017-11-08 19:07:48 +00:00
|
|
|
const dayKey = StatsAPI.createDayKey(date)
|
2017-05-23 22:00:09 +00:00
|
|
|
|
|
|
|
const clientRequest = entry.clientRequest
|
2017-05-24 19:07:29 +00:00
|
|
|
const edgeResponse = entry.edgeResponse
|
2017-05-23 22:00:09 +00:00
|
|
|
|
2017-08-17 18:24:40 +00:00
|
|
|
if (edgeResponse.status === 200) {
|
|
|
|
// Q: How many requests do we serve for a package per day?
|
|
|
|
// Q: How many bytes do we serve for a package per day?
|
2017-08-19 18:44:01 +00:00
|
|
|
const url = parsePackageURL(parseURL(clientRequest.uri).pathname)
|
2017-08-17 18:24:40 +00:00
|
|
|
const packageName = url && url.packageName
|
2017-05-29 05:41:01 +00:00
|
|
|
|
2017-11-15 00:44:53 +00:00
|
|
|
if (packageName) {
|
2017-11-25 21:25:01 +00:00
|
|
|
incr(`stats-packageRequests-${dayKey}`, packageName, 1, thirtyDaysLater)
|
|
|
|
incr(`stats-packageBytes-${dayKey}`, packageName, edgeResponse.bytes, thirtyDaysLater)
|
2017-08-17 18:24:40 +00:00
|
|
|
}
|
2017-05-24 19:07:29 +00:00
|
|
|
}
|
2017-05-23 22:00:09 +00:00
|
|
|
|
2017-08-17 18:24:40 +00:00
|
|
|
// Q: How many requests per day do we receive via a protocol?
|
2017-05-23 22:00:09 +00:00
|
|
|
const protocol = clientRequest.httpProtocol
|
|
|
|
|
2017-11-25 21:25:01 +00:00
|
|
|
if (protocol) incr(`stats-protocolRequests-${dayKey}`, protocol, 1, thirtyDaysLater)
|
2017-05-23 22:00:09 +00:00
|
|
|
|
2017-05-24 19:07:29 +00:00
|
|
|
// Q: How many requests do we receive from a hostname per day?
|
|
|
|
// Q: How many bytes do we serve to a hostname per day?
|
2017-05-23 22:00:09 +00:00
|
|
|
const referer = clientRequest.referer
|
2017-05-23 23:06:49 +00:00
|
|
|
const hostname = referer && parseURL(referer).hostname
|
2017-05-23 22:00:09 +00:00
|
|
|
|
2017-05-24 19:07:29 +00:00
|
|
|
if (hostname) {
|
2017-08-17 18:24:40 +00:00
|
|
|
incr(`stats-hostnameRequests-${dayKey}`, hostname, 1, sevenDaysLater)
|
2017-11-25 21:25:01 +00:00
|
|
|
incr(`stats-hostnameBytes-${dayKey}`, hostname, edgeResponse.bytes, sevenDaysLater)
|
2017-05-24 19:07:29 +00:00
|
|
|
}
|
2017-05-23 22:00:09 +00:00
|
|
|
})
|
2017-11-25 21:25:01 +00:00
|
|
|
.on("end", function() {
|
2017-05-29 05:41:01 +00:00
|
|
|
resolve({ counters, expireat })
|
2017-05-23 22:00:09 +00:00
|
|
|
})
|
|
|
|
})
|
2017-08-12 16:18:54 +00:00
|
|
|
}
|
2017-05-23 22:00:09 +00:00
|
|
|
|
2017-08-12 16:18:54 +00:00
|
|
|
function processLogs(stream) {
|
2017-11-08 18:14:46 +00:00
|
|
|
return computeCounters(stream).then(({ counters, expireat }) => {
|
|
|
|
Object.keys(counters).forEach(key => {
|
2017-08-17 18:24:40 +00:00
|
|
|
const values = counters[key]
|
2017-05-24 16:25:00 +00:00
|
|
|
|
2017-11-08 18:14:46 +00:00
|
|
|
Object.keys(values).forEach(member => {
|
2017-08-17 18:24:40 +00:00
|
|
|
db.zincrby(key, values[member], member)
|
|
|
|
})
|
2017-05-29 05:41:01 +00:00
|
|
|
|
2017-11-08 16:57:15 +00:00
|
|
|
if (expireat[key]) db.expireat(key, expireat[key])
|
2017-05-23 22:00:09 +00:00
|
|
|
})
|
|
|
|
})
|
2017-08-12 16:18:54 +00:00
|
|
|
}
|
2017-05-23 22:00:09 +00:00
|
|
|
|
2017-08-12 16:18:54 +00:00
|
|
|
function ingestLogs(zone, startSeconds, endSeconds) {
|
2017-11-08 18:14:46 +00:00
|
|
|
return new Promise(resolve => {
|
2017-05-20 05:41:24 +00:00
|
|
|
console.log(
|
2017-11-25 21:25:01 +00:00
|
|
|
"info: Started ingesting logs for %s from %s to %s",
|
2017-05-20 05:41:24 +00:00
|
|
|
zone.name,
|
|
|
|
stringifySeconds(startSeconds),
|
|
|
|
stringifySeconds(endSeconds)
|
|
|
|
)
|
|
|
|
|
|
|
|
const startFetchTime = Date.now()
|
|
|
|
|
2017-05-23 22:00:09 +00:00
|
|
|
resolve(
|
2017-11-08 19:07:48 +00:00
|
|
|
CloudflareAPI.getLogs(zone.id, startSeconds, endSeconds).then(stream => {
|
2017-05-23 22:00:09 +00:00
|
|
|
const endFetchTime = Date.now()
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-05-23 22:00:09 +00:00
|
|
|
console.log(
|
2017-11-25 21:25:01 +00:00
|
|
|
"info: Fetched %ds worth of logs for %s in %dms",
|
2017-05-23 22:00:09 +00:00
|
|
|
endSeconds - startSeconds,
|
|
|
|
zone.name,
|
|
|
|
endFetchTime - startFetchTime
|
|
|
|
)
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-05-23 22:00:09 +00:00
|
|
|
const startProcessTime = Date.now()
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-11-08 18:14:46 +00:00
|
|
|
return processLogs(stream).then(() => {
|
2017-05-23 22:00:09 +00:00
|
|
|
const endProcessTime = Date.now()
|
2017-05-20 05:41:24 +00:00
|
|
|
|
|
|
|
console.log(
|
2017-11-25 21:25:01 +00:00
|
|
|
"info: Processed %ds worth of logs for %s in %dms",
|
2017-05-23 22:00:09 +00:00
|
|
|
endSeconds - startSeconds,
|
2017-05-20 05:41:24 +00:00
|
|
|
zone.name,
|
2017-05-23 22:00:09 +00:00
|
|
|
endProcessTime - startProcessTime
|
2017-05-20 05:41:24 +00:00
|
|
|
)
|
|
|
|
})
|
2017-05-23 22:00:09 +00:00
|
|
|
})
|
|
|
|
)
|
2017-05-20 05:41:24 +00:00
|
|
|
})
|
2017-08-12 16:18:54 +00:00
|
|
|
}
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-08-12 16:18:54 +00:00
|
|
|
function startZone(zone) {
|
2017-11-25 21:25:01 +00:00
|
|
|
const startSecondsKey = `ingestLogsWorker-nextStartSeconds-${zone.name.replace(".", "-")}`
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-08-12 16:18:54 +00:00
|
|
|
function takeATurn() {
|
2017-11-08 16:57:15 +00:00
|
|
|
db.get(startSecondsKey, function(error, value) {
|
2017-05-24 16:25:00 +00:00
|
|
|
let startSeconds = value && parseInt(value, 10)
|
2017-05-20 05:41:24 +00:00
|
|
|
|
|
|
|
const now = Date.now()
|
|
|
|
|
|
|
|
// Cloudflare keeps logs around for 72 hours.
|
2017-05-23 22:00:09 +00:00
|
|
|
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
|
2017-05-20 05:41:24 +00:00
|
|
|
const minSeconds = toSeconds(now - oneHour * 72)
|
|
|
|
|
|
|
|
if (startSeconds == null) {
|
|
|
|
startSeconds = minSeconds
|
|
|
|
} else if (startSeconds < minSeconds) {
|
|
|
|
console.warn(
|
2017-11-25 21:25:01 +00:00
|
|
|
"warning: Dropped logs for %s from %s to %s!",
|
2017-05-20 05:41:24 +00:00
|
|
|
zone.name,
|
|
|
|
stringifySeconds(startSeconds),
|
|
|
|
stringifySeconds(minSeconds)
|
|
|
|
)
|
|
|
|
|
|
|
|
startSeconds = minSeconds
|
|
|
|
}
|
|
|
|
|
2017-05-23 22:00:09 +00:00
|
|
|
// The log for a request is typically available within thirty (30) minutes
|
|
|
|
// of the request taking place under normal conditions. We deliver logs
|
|
|
|
// ordered by the time that the logs were created, i.e. the timestamp of
|
|
|
|
// the request when it was received by the edge. Given the order of
|
|
|
|
// delivery, we recommend waiting a full thirty minutes to ingest a full
|
|
|
|
// set of logs. This will help ensure that any congestion in the log
|
|
|
|
// pipeline has passed and a full set of logs can be ingested.
|
|
|
|
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
|
2017-11-08 16:57:15 +00:00
|
|
|
const maxSeconds = toSeconds(now - oneMinute * 30)
|
2017-05-23 22:00:09 +00:00
|
|
|
|
|
|
|
if (startSeconds < maxSeconds) {
|
|
|
|
const endSeconds = startSeconds + LogWindowSeconds
|
2017-05-20 05:41:24 +00:00
|
|
|
|
2017-11-08 16:57:15 +00:00
|
|
|
ingestLogs(zone, startSeconds, endSeconds).then(
|
|
|
|
function() {
|
|
|
|
db.set(startSecondsKey, endSeconds)
|
|
|
|
setTimeout(takeATurn)
|
|
|
|
},
|
|
|
|
function(error) {
|
|
|
|
console.error(error.stack)
|
|
|
|
process.exit(1)
|
|
|
|
}
|
|
|
|
)
|
2017-05-20 05:41:24 +00:00
|
|
|
} else {
|
2017-05-23 22:00:09 +00:00
|
|
|
setTimeout(takeATurn, (startSeconds - maxSeconds) * 1000)
|
2017-05-20 05:41:24 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
takeATurn()
|
|
|
|
}
|
|
|
|
|
2017-11-08 19:07:48 +00:00
|
|
|
Promise.all(DomainNames.map(CloudflareAPI.getZones)).then(results => {
|
2017-11-08 18:14:46 +00:00
|
|
|
const zones = results.reduce((memo, zones) => {
|
2017-08-12 16:18:54 +00:00
|
|
|
return memo.concat(zones)
|
|
|
|
})
|
|
|
|
|
2017-05-23 22:00:09 +00:00
|
|
|
zones.forEach(startZone)
|
2017-05-20 05:41:24 +00:00
|
|
|
})
|