More work on log ingestion

This commit is contained in:
MICHAEL JACKSON
2017-05-23 15:00:09 -07:00
parent f535ab6b90
commit f3fa340ae9

View File

@ -32,6 +32,19 @@ invariant(
'Missing the $FIREBASE_ACCOUNT environment variable' 'Missing the $FIREBASE_ACCOUNT environment variable'
) )
/**
* Domains we want to analyze.
*/
const DomainNames = [
//'npmcdn.com', // We don't have log data on npmcdn.com yet :/
'unpkg.com'
]
/**
* The window of time to download in a single fetch.
*/
const LogWindowSeconds = 30
/* /*
Stuff we wanna show on the website: Stuff we wanna show on the website:
@ -54,7 +67,6 @@ admin.initializeApp({
}) })
const db = admin.database() const db = admin.database()
const logsRef = db.ref('logs')
const getZones = (domain) => const getZones = (domain) =>
fetch(`https://api.cloudflare.com/client/v4/zones?name=${domain}`, { fetch(`https://api.cloudflare.com/client/v4/zones?name=${domain}`, {
@ -76,9 +88,6 @@ const getLogs = (zoneId, startTime, endTime) =>
} }
}).then(res => res.body.pipe(gunzip())) }).then(res => res.body.pipe(gunzip()))
const incrementKey = (object, key, n = 1) =>
object[key] = (object[key] || 0) + n
const toSeconds = (millis) => const toSeconds = (millis) =>
Math.floor(millis / 1000) Math.floor(millis / 1000)
@ -101,48 +110,35 @@ const oneMinute = oneSecond * 60
const thirtyMinutes = oneMinute * 30 const thirtyMinutes = oneMinute * 30
const oneHour = oneMinute * 60 const oneHour = oneMinute * 60
const ingestLogs = (zone, startSeconds, endSeconds) => const computeLogChanges = (stream) =>
new Promise((resolve, reject) => { new Promise((resolve, reject) => {
console.log(
'START ingesting logs for %s from %s to %s',
zone.name,
stringifySeconds(startSeconds),
stringifySeconds(endSeconds)
)
const startFetchTime = Date.now()
getLogs(zone.id, startSeconds, endSeconds).then(stream => {
const endFetchTime = Date.now()
console.log(
'Fetched %ds worth of logs for %s at %s in %dms',
endSeconds - startSeconds,
zone.name,
stringifySeconds(startSeconds),
endFetchTime - startFetchTime
)
const changes = {} const changes = {}
const incKey = (key, n = 1) =>
changes[key] = (changes[key] || 0) + n
stream stream
.pipe(ndjson.parse()) .pipe(ndjson.parse())
.on('error', reject) .on('error', reject)
.on('data', entry => { .on('data', entry => {
const date = new Date(Math.round(entry.timestamp / 1000000)) const date = new Date(Math.round(entry.timestamp / 1000000))
const dayKey = `${date.getUTCFullYear()}/${date.getUTCMonth()}/${date.getUTCDate()}` const dayKey = `${date.getUTCFullYear()}/${date.getUTCMonth()}/${date.getUTCDate()}`
const minuteKey = `${date.getUTCHours()}/${date.getUTCMinutes()}` const hourKey = `${dayKey}/${date.getUTCHours()}`
const minuteKey = `${hourKey}/${date.getUTCMinutes()}`
// Q: How many requests do we receive per day?
incKey(`requestsPerDay/${dayKey}`)
// Q: How many requests do we receive per minute? // Q: How many requests do we receive per minute?
incrementKey(changes, `${dayKey}/totalRequests/${minuteKey}`, 1) incKey(`requestsPerMinute/${minuteKey}`)
// Q: How many requests do we receive to edge/cache/origin per minute? // Q: How many requests do we receive to edge/cache/origin per hour?
if (entry.origin) { if (entry.origin) {
incrementKey(changes, `${dayKey}/originRequests/${minuteKey}`) incKey(`originRequestsPerHour/${hourKey}`)
} else if (entry.cache) { } else if (entry.cache) {
incrementKey(changes, `${dayKey}/cacheRequests/${minuteKey}`) incKey(`cacheRequestsPerHour/${hourKey}`)
} else { } else {
incrementKey(changes, `${dayKey}/edgeRequests/${minuteKey}`) incKey(`edgeRequestsPerHour/${hourKey}`)
} }
const clientRequest = entry.clientRequest const clientRequest = entry.clientRequest
@ -150,8 +146,9 @@ const ingestLogs = (zone, startSeconds, endSeconds) =>
// Q: How many requests per day do we receive for a package? // Q: How many requests per day do we receive for a package?
const uri = clientRequest.uri const uri = clientRequest.uri
const package = getPackageName(parseURL(uri).pathname) const package = getPackageName(parseURL(uri).pathname)
if (package) { if (package) {
const key = `${dayKey}/packages/${hashKey(package)}` const key = `packageRequestsPerDay/${dayKey}/${hashKey(package)}`
if (changes[key]) { if (changes[key]) {
changes[key].requests += 1 changes[key].requests += 1
@ -162,8 +159,9 @@ const ingestLogs = (zone, startSeconds, endSeconds) =>
// Q: How many requests per day do we receive via each protocol? // Q: How many requests per day do we receive via each protocol?
const protocol = clientRequest.httpProtocol const protocol = clientRequest.httpProtocol
if (protocol) { if (protocol) {
const key = `${dayKey}/protocols/${hashKey(protocol)}` const key = `protocolRequestsPerDay/${dayKey}/${hashKey(protocol)}`
if (changes[key]) { if (changes[key]) {
changes[key].requests += 1 changes[key].requests += 1
@ -173,34 +171,34 @@ const ingestLogs = (zone, startSeconds, endSeconds) =>
} }
// Q: How many requests per day do we receive from an origin? // Q: How many requests per day do we receive from an origin?
// const referer = clientRequest.referer const referer = clientRequest.referer
// if (referer) {
// const url = parseURL(referer) if (referer) {
// const origin = formatURL({ const url = parseURL(referer)
// protocol: url.protocol, const origin = formatURL({
// hostname: url.hostname protocol: url.protocol,
// }) hostname: url.hostname
// })
// const key = `${dayKey}/origins/${hashKey(origin)}`
// const key = `originRequestsPerDay/${dayKey}/${hashKey(origin)}`
// if (changes[key]) {
// changes[key].requests += 1 if (changes[key]) {
// } else { changes[key].requests += 1
// changes[key] = { origin, requests: 1 } } else {
// } changes[key] = { origin, requests: 1 }
// } }
}
}) })
.on('end', () => { .on('end', () => {
console.log( resolve(changes)
'FINISH ingesting logs for %s from %s to %s', })
zone.name, })
stringifySeconds(startSeconds),
stringifySeconds(endSeconds)
)
const processLogs = (stream) =>
computeLogChanges(stream).then(changes => {
// Record the changes. // Record the changes.
Object.keys(changes).forEach(key => { Object.keys(changes).forEach(key => {
const ref = logsRef.child(key) const ref = db.ref(`logs/${key}`)
ref.transaction(value => { ref.transaction(value => {
if (typeof changes[key].requests === 'number') { if (typeof changes[key].requests === 'number') {
@ -217,42 +215,64 @@ const ingestLogs = (zone, startSeconds, endSeconds) =>
} }
}) })
}) })
resolve(changes)
})
})
}) })
const startIngestingLogs = (zone) => { const ingestLogs = (zone, startSeconds, endSeconds) =>
const startSecondsRef = logsRef.child(`nextStartSeconds/${zone.name.replace('.', '-')}`) new Promise(resolve => {
let inProgress = false
const takeATurn = () => {
if (inProgress) {
console.log( console.log(
'Still ingesting logs for %s, waiting for another turn...', 'LOG: start ingesting logs for %s from %s to %s',
zone.name zone.name,
stringifySeconds(startSeconds),
stringifySeconds(endSeconds)
) )
return const startFetchTime = Date.now()
}
inProgress = true resolve(
getLogs(zone.id, startSeconds, endSeconds).then(stream => {
const endFetchTime = Date.now()
console.log(
'LOG: fetched %ds worth of logs for %s in %dms',
endSeconds - startSeconds,
zone.name,
endFetchTime - startFetchTime
)
const startProcessTime = Date.now()
return processLogs(stream).then(() => {
const endProcessTime = Date.now()
console.log(
'LOG: processed %ds worth of logs for %s in %dms',
endSeconds - startSeconds,
zone.name,
endProcessTime - startProcessTime
)
})
})
)
})
const startZone = (zone) => {
const startSecondsRef = db.ref(`logs/nextStartSeconds/${zone.name.replace('.', '-')}`)
const takeATurn = () => {
startSecondsRef.once('value', snapshot => { startSecondsRef.once('value', snapshot => {
let startSeconds = snapshot.val() let startSeconds = snapshot.val()
const now = Date.now() const now = Date.now()
// Cloudflare keeps logs around for 72 hours. // Cloudflare keeps logs around for 72 hours.
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
const minSeconds = toSeconds(now - oneHour * 72) const minSeconds = toSeconds(now - oneHour * 72)
if (startSeconds == null) { if (startSeconds == null) {
startSeconds = minSeconds startSeconds = minSeconds
} else if (startSeconds < minSeconds) { } else if (startSeconds < minSeconds) {
console.warn( console.warn(
'WARNING: dropping logs for %s from %s to %s!', 'WARNING: dropped logs for %s from %s to %s!',
zone.name, zone.name,
stringifySeconds(startSeconds), stringifySeconds(startSeconds),
stringifySeconds(minSeconds) stringifySeconds(minSeconds)
@ -261,34 +281,44 @@ const startIngestingLogs = (zone) => {
startSeconds = minSeconds startSeconds = minSeconds
} }
if (startSeconds < toSeconds(now - thirtyMinutes)) { // The log for a request is typically available within thirty (30) minutes
const endSeconds = startSeconds + 10 // of the request taking place under normal conditions. We deliver logs
// ordered by the time that the logs were created, i.e. the timestamp of
// the request when it was received by the edge. Given the order of
// delivery, we recommend waiting a full thirty minutes to ingest a full
// set of logs. This will help ensure that any congestion in the log
// pipeline has passed and a full set of logs can be ingested.
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
const maxSeconds = toSeconds(now - thirtyMinutes)
if (startSeconds < maxSeconds) {
const endSeconds = startSeconds + LogWindowSeconds
ingestLogs(zone, startSeconds, endSeconds).then(() => { ingestLogs(zone, startSeconds, endSeconds).then(() => {
startSecondsRef.set(endSeconds) startSecondsRef.set(endSeconds)
inProgress = false setTimeout(takeATurn)
}, error => {
console.error(error.stack)
process.exit(1)
}) })
} else { } else {
console.log( setTimeout(takeATurn, (startSeconds - maxSeconds) * 1000)
'Waiting for 30 minutes to pass before fetching logs for %s...',
zone.name
)
inProgress = false
} }
}) })
} }
takeATurn() takeATurn()
setInterval(takeATurn, oneSecond * 3)
} }
const domains = [ Promise.all(DomainNames.map(getZones)).then(results => {
//'npmcdn.com', // We don't have log data on npmcdn.com yet :/
'unpkg.com'
]
Promise.all(domains.map(getZones)).then(results => {
const zones = results.reduce((memo, zones) => memo.concat(zones)) const zones = results.reduce((memo, zones) => memo.concat(zones))
zones.forEach(startIngestingLogs) zones.forEach(startZone)
}) })
//const getValues = (object) =>
// Object.keys(object).map(key => object[key])
//
//db.ref('logs/2017/4/17/packages').orderByChild('requests').limitToLast(10).once('value', (snapshot) => {
// const values = getValues(snapshot.val()).sort((a, b) => b.requests - a.requests)
// console.log(values)
//})