Merge branch 'master' into percent-40

This commit is contained in:
William Hilton
2019-02-01 12:17:42 -05:00
committed by GitHub
270 changed files with 123523 additions and 11955 deletions

View File

@ -0,0 +1,39 @@
import * as auth from '../auth';
describe('Auth API', () => {
beforeEach(done => {
auth.removeAllRevokedTokens().then(() => done(), done);
});
it('creates tokens with the right scopes', done => {
const scopes = {
blacklist: {
add: true,
remove: true
}
};
auth.createToken(scopes).then(token => {
auth.verifyToken(token).then(payload => {
expect(payload.jti).toEqual(expect.any(String));
expect(payload.iss).toEqual(expect.any(String));
expect(payload.iat).toEqual(expect.any(Number));
expect(payload.scopes).toMatchObject(scopes);
done();
});
});
});
it('refuses to verify revoked tokens', done => {
const scopes = {};
auth.createToken(scopes).then(token => {
auth.revokeToken(token).then(() => {
auth.verifyToken(token).then(payload => {
expect(payload).toBe(null);
done();
});
});
});
});
});

View File

@ -1,4 +1,4 @@
const createSearch = require('../createSearch');
import createSearch from '../createSearch';
describe('createSearch', () => {
it('omits the trailing = for empty string values', () => {

View File

@ -1,4 +1,4 @@
const getContentType = require('../getContentType');
import getContentType from '../getContentType';
it('gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile', () => {
expect(getContentType('AUTHORS')).toBe('text/plain');

View File

@ -1,4 +1,4 @@
const parsePackageURL = require('../parsePackageURL');
import parsePackageURL from '../parsePackageURL';
describe('parsePackageURL', () => {
it('parses plain packages', () => {

View File

@ -1,5 +1,3 @@
function addLeadingSlash(name) {
export default function addLeadingSlash(name) {
return name.charAt(0) === '/' ? name : '/' + name;
}
module.exports = addLeadingSlash;

86
modules/utils/auth.js Normal file
View File

@ -0,0 +1,86 @@
import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import data from './data';
import { privateKey, publicKey } from './secret';
function getCurrentSeconds() {
return Math.floor(Date.now() / 1000);
}
function createTokenId() {
return crypto.randomBytes(16).toString('hex');
}
export function createToken(scopes = {}) {
return new Promise((resolve, reject) => {
const payload = {
jti: createTokenId(),
iss: 'https://unpkg.com',
iat: getCurrentSeconds(),
scopes
};
jwt.sign(payload, privateKey, { algorithm: 'RS256' }, (error, token) => {
if (error) {
reject(error);
} else {
resolve(token);
}
});
});
}
const revokedTokensSet = 'revoked-tokens';
export function verifyToken(token) {
return new Promise((resolve, reject) => {
const options = { algorithms: ['RS256'] };
jwt.verify(token, publicKey, options, (error, payload) => {
if (error) {
reject(error);
} else {
if (payload.jti) {
data.sismember(revokedTokensSet, payload.jti, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value === 0 ? payload : null);
}
});
} else {
resolve(null);
}
}
});
});
}
export function revokeToken(token) {
return verifyToken(token).then(payload => {
if (payload) {
return new Promise((resolve, reject) => {
data.sadd(revokedTokensSet, payload.jti, error => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
}
});
}
export function removeAllRevokedTokens() {
return new Promise((resolve, reject) => {
data.del(revokedTokensSet, error => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
}

View File

@ -1,4 +1,4 @@
function bufferStream(stream) {
export default function bufferStream(stream) {
return new Promise((resolve, reject) => {
const chunks = [];
@ -8,5 +8,3 @@ function bufferStream(stream) {
.on('end', () => resolve(Buffer.concat(chunks)));
});
}
module.exports = bufferStream;

View File

@ -1,23 +0,0 @@
const LRUCache = require('lru-cache');
const maxMegabytes = 40; // Cap the cache at 40 MB
const maxLength = maxMegabytes * 1024 * 1024;
const maxSeconds = 60;
const maxAge = maxSeconds * 1000;
const cache = new LRUCache({
max: maxLength,
maxAge: maxAge,
length: Buffer.byteLength
});
function get(key) {
return cache.get(key);
}
function setex(key, ttlSeconds, value) {
return cache.set(key, value, ttlSeconds * 1000);
}
module.exports = { get, setex };

View File

@ -0,0 +1,92 @@
import 'isomorphic-fetch';
import invariant from 'invariant';
import gunzip from 'gunzip-maybe';
import ndjson from 'ndjson';
const cloudflareURL = 'https://api.cloudflare.com/client/v4';
const cloudflareEmail = process.env.CLOUDFLARE_EMAIL;
const cloudflareKey = process.env.CLOUDFLARE_KEY;
invariant(
cloudflareEmail,
'Missing the $CLOUDFLARE_EMAIL environment variable'
);
invariant(cloudflareKey, 'Missing the $CLOUDFLARE_KEY environment variable');
export function get(path, headers) {
return fetch(`${cloudflareURL}${path}`, {
headers: Object.assign({}, headers, {
'X-Auth-Email': cloudflareEmail,
'X-Auth-Key': cloudflareKey
})
});
}
export function getJSON(path, headers) {
return get(path, headers)
.then(res => {
return res.json();
})
.then(data => {
if (!data.success) {
console.error(`cloudflare.getJSON failed at ${path}`);
console.error(data);
throw new Error('Failed to getJSON from Cloudflare');
}
return data.result;
});
}
export function getZones(domains) {
return Promise.all(
(Array.isArray(domains) ? domains : [domains]).map(domain =>
getJSON(`/zones?name=${domain}`)
)
).then(results => results.reduce((memo, zones) => memo.concat(zones)));
}
function reduceResults(target, values) {
Object.keys(values).forEach(key => {
const value = values[key];
if (typeof value === 'object' && value) {
target[key] = reduceResults(target[key] || {}, value);
} else if (typeof value === 'number') {
target[key] = (target[key] || 0) + values[key];
}
});
return target;
}
export function getZoneAnalyticsDashboard(zones, since, until) {
return Promise.all(
(Array.isArray(zones) ? zones : [zones]).map(zone => {
return getJSON(
`/zones/${
zone.id
}/analytics/dashboard?since=${since.toISOString()}&until=${until.toISOString()}`
);
})
).then(results => results.reduce(reduceResults));
}
export function getJSONStream(path, headers) {
const gzipHeaders = Object.assign({}, headers, {
'Accept-Encoding': 'gzip'
});
return get(path, gzipHeaders)
.then(res => res.body.pipe(gunzip()))
.then(stream => stream.pipe(ndjson.parse()));
}
export function getLogs(zoneId, startTime, endTime, fieldsArray) {
const fields = fieldsArray.join(',');
return getJSONStream(
`/zones/${zoneId}/logs/received?start=${startTime}&end=${endTime}&fields=${fields}`
);
}

View File

@ -1,4 +1,9 @@
function createPackageURL(packageName, version, pathname, search) {
export default function createPackageURL(
packageName,
version,
pathname,
search
) {
let url = `/${packageName}`;
if (version != null) url += `@${version}`;
@ -7,5 +12,3 @@ function createPackageURL(packageName, version, pathname, search) {
return url;
}
module.exports = createPackageURL;

View File

@ -0,0 +1,7 @@
import express from 'express';
export default function createRouter(configureRouter) {
const router = express.Router();
configureRouter(router);
return router;
}

View File

@ -1,4 +1,4 @@
function createSearch(query) {
export default function createSearch(query) {
const keys = Object.keys(query).sort();
const params = keys.reduce(
(memo, key) =>
@ -12,5 +12,3 @@ function createSearch(query) {
return params.length ? `?${params.join('&')}` : '';
}
module.exports = createSearch;

View File

@ -1,4 +1,4 @@
const redis = require('redis');
import redis from 'redis';
redis.debug_mode = process.env.DEBUG_REDIS != null;
@ -6,4 +6,4 @@ const client = redis.createClient(
process.env.DATA_URL || process.env.OPENREDIS_URL || 'redis://localhost:6379'
);
module.exports = client;
export default client;

2
modules/utils/debug.js Normal file
View File

@ -0,0 +1,2 @@
const debug = process.env.DEBUG ? console.log.bind(console) : () => {};
export default debug;

View File

@ -1,21 +1,17 @@
const url = require('url');
const https = require('https');
const gunzip = require('gunzip-maybe');
const tar = require('tar-stream');
import url from 'url';
import https from 'https';
import gunzip from 'gunzip-maybe';
import tar from 'tar-stream';
const bufferStream = require('./bufferStream');
const agent = require('./registryAgent');
const logging = require('./logging');
import debug from './debug';
import bufferStream from './bufferStream';
import agent from './registryAgent';
function fetchNpmPackage(packageConfig) {
export default function fetchNpmPackage(packageConfig) {
return new Promise((resolve, reject) => {
const tarballURL = packageConfig.dist.tarball;
logging.debug(
'Fetching package for %s from %s',
packageConfig.name,
tarballURL
);
debug('Fetching package for %s from %s', packageConfig.name, tarballURL);
const { hostname, pathname } = url.parse(tarballURL);
const options = {
@ -45,5 +41,3 @@ function fetchNpmPackage(packageConfig) {
.on('error', reject);
});
}
module.exports = fetchNpmPackage;

View File

@ -1,25 +1,27 @@
const url = require('url');
const https = require('https');
import url from 'url';
import https from 'https';
const serverConfig = require('../serverConfig');
const bufferStream = require('./bufferStream');
const agent = require('./registryAgent');
const logging = require('./logging');
import debug from './debug';
import bufferStream from './bufferStream';
import agent from './registryAgent';
const npmRegistryURL =
process.env.NPM_REGISTRY_URL || 'https://registry.npmjs.org';
function parseJSON(res) {
return bufferStream(res).then(JSON.parse);
}
function fetchNpmPackageInfo(packageName) {
export default function fetchNpmPackageInfo(packageName) {
return new Promise((resolve, reject) => {
const encodedPackageName =
packageName.charAt(0) === '@'
? `@${encodeURIComponent(packageName.substring(1))}`
: encodeURIComponent(packageName);
const infoURL = `${serverConfig.registryURL}/${encodedPackageName}`;
const infoURL = `${npmRegistryURL}/${encodedPackageName}`;
logging.debug('Fetching package info for %s from %s', packageName, infoURL);
debug('Fetching package info for %s from %s', packageName, infoURL);
const { hostname, pathname } = url.parse(infoURL);
const options = {
@ -53,5 +55,3 @@ function fetchNpmPackageInfo(packageName) {
.on('error', reject);
});
}
module.exports = fetchNpmPackageInfo;

View File

@ -1,22 +1,23 @@
const mime = require('mime');
import mime from 'mime';
mime.define({
'text/plain': [
'authors',
'changes',
'license',
'makefile',
'patents',
'readme',
'ts',
'flow'
]
});
mime.define(
{
'text/plain': [
'authors',
'changes',
'license',
'makefile',
'patents',
'readme',
'ts',
'flow'
]
},
/* force */ true
);
const textFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore|\.lock)$/i;
function getContentType(file) {
return textFiles.test(file) ? 'text/plain' : mime.lookup(file);
export default function getContentType(file) {
return textFiles.test(file) ? 'text/plain' : mime.getType(file);
}
module.exports = getContentType;

View File

@ -1,5 +1,3 @@
function getContentTypeHeader(type) {
export default function getContentTypeHeader(type) {
return type === 'application/javascript' ? type + '; charset=utf-8' : type;
}
module.exports = getContentTypeHeader;

View File

@ -1,7 +1,5 @@
const SRIToolbox = require('sri-toolbox');
import SRIToolbox from 'sri-toolbox';
function getIntegrity(data) {
export default function getIntegrity(data) {
return SRIToolbox.generate({ algorithms: ['sha384'] }, data);
}
module.exports = getIntegrity;

View File

@ -1,19 +1,21 @@
const cache = require('./cache');
const fetchNpmPackageInfo = require('./fetchNpmPackageInfo');
import LRUCache from 'lru-cache';
import fetchNpmPackageInfo from './fetchNpmPackageInfo';
const maxMegabytes = 40; // Cap the cache at 40 MB
const maxLength = maxMegabytes * 1024 * 1024;
const oneSecond = 1000;
const oneMinute = 60 * oneSecond;
const cache = new LRUCache({
max: maxLength,
maxAge: oneMinute,
length: Buffer.byteLength
});
const notFound = '';
function cleanPackageInfo(packageInfo) {
return {
versions: Object.keys(packageInfo.versions).reduce((memo, key) => {
memo[key] = packageInfo.versions[key];
return memo;
}, {}),
'dist-tags': packageInfo['dist-tags']
};
}
function getNpmPackageInfo(packageName) {
export default function getNpmPackageInfo(packageName) {
return new Promise((resolve, reject) => {
const key = `npmPackageInfo-${packageName}`;
const value = cache.get(key);
@ -21,25 +23,21 @@ function getNpmPackageInfo(packageName) {
if (value != null) {
resolve(value === notFound ? null : JSON.parse(value));
} else {
fetchNpmPackageInfo(packageName).then(value => {
if (value == null) {
fetchNpmPackageInfo(packageName).then(info => {
if (info == null) {
// Cache 404s for 5 minutes. This prevents us from making
// unnecessary requests to the registry for bad package names.
// In the worst case, a brand new package's info will be
// available within 5 minutes.
cache.setex(key, 300, notFound);
cache.set(key, notFound, oneMinute * 5);
resolve(null);
} else {
value = cleanPackageInfo(value);
// Cache valid package info for 1 minute. In the worst case,
// new versions won't be available for 1 minute.
cache.setex(key, 60, JSON.stringify(value));
resolve(value);
cache.set(key, JSON.stringify(info), oneMinute);
resolve(info);
}
}, reject);
}
});
}
module.exports = getNpmPackageInfo;

View File

@ -1,15 +0,0 @@
const db = require('./data');
function incrementCounter(counter, key, by = 1) {
return new Promise((resolve, reject) => {
db.hincrby(counter, key, by, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value);
}
});
});
}
module.exports = incrementCounter;

192
modules/utils/ingestLogs.js Normal file
View File

@ -0,0 +1,192 @@
import url from 'url';
import { startOfDay, addDays } from 'date-fns';
import data from './data';
import isValidPackageName from './isValidPackageName';
import parsePackageURL from './parsePackageURL';
import * as cloudflare from './cloudflare';
import * as stats from './stats';
/**
* Domains we want to analyze.
*/
const domainNames = [
'unpkg.com'
//"npmcdn.com" // We don't have log data on npmcdn.com yet :/
];
let cachedZones;
function getSeconds(date) {
return Math.floor(date.getTime() / 1000);
}
function stringifySeconds(seconds) {
return new Date(seconds * 1000).toISOString().replace(/\.0+Z$/, 'Z');
}
function toSeconds(ms) {
return Math.floor(ms / 1000);
}
function computeCounters(stream) {
return new Promise((resolve, reject) => {
const counters = {};
const expireat = {};
let totalEntries = 0;
function incr(key, member, by, expiry) {
counters[key] = counters[key] || {};
counters[key][member] = (counters[key][member] || 0) + by;
expireat[key] = expiry;
}
stream
.on('error', reject)
.on('data', entry => {
totalEntries += 1;
const date = new Date(Math.round(entry.EdgeStartTimestamp / 1000000));
const nextDay = startOfDay(addDays(date, 1));
const sevenDaysLater = getSeconds(addDays(nextDay, 7));
const thirtyDaysLater = getSeconds(addDays(nextDay, 30));
const dayKey = stats.createDayKey(date);
if (entry.EdgeResponseStatus === 200) {
// Q: How many requests do we serve for a package per day?
// Q: How many bytes do we serve for a package per day?
const parsed = parsePackageURL(entry.ClientRequestURI);
const packageName = parsed && parsed.packageName;
if (packageName && isValidPackageName(packageName)) {
incr(
`stats-packageRequests-${dayKey}`,
packageName,
1,
thirtyDaysLater
);
incr(
`stats-packageBytes-${dayKey}`,
packageName,
entry.EdgeResponseBytes,
thirtyDaysLater
);
}
}
// Q: How many requests per day do we receive via a protocol?
const protocol = entry.ClientRequestProtocol;
if (protocol) {
incr(
`stats-protocolRequests-${dayKey}`,
protocol,
1,
thirtyDaysLater
);
}
// Q: How many requests do we receive from a hostname per day?
// Q: How many bytes do we serve to a hostname per day?
const referer = entry.ClientRequestReferer;
const hostname = referer && url.parse(referer).hostname;
if (hostname) {
incr(`stats-hostnameRequests-${dayKey}`, hostname, 1, sevenDaysLater);
incr(
`stats-hostnameBytes-${dayKey}`,
hostname,
entry.EdgeResponseBytes,
sevenDaysLater
);
}
})
.on('end', () => {
resolve({ counters, expireat, totalEntries });
});
});
}
function processLogs(stream) {
return computeCounters(stream).then(
({ counters, expireat, totalEntries }) => {
Object.keys(counters).forEach(key => {
const values = counters[key];
Object.keys(values).forEach(member => {
data.zincrby(key, values[member], member);
});
if (expireat[key]) {
data.expireat(key, expireat[key]);
}
});
return totalEntries;
}
);
}
function ingestLogsForZone(zone, startDate, endDate) {
const startSeconds = toSeconds(startDate);
const endSeconds = toSeconds(endDate);
const startFetchTime = Date.now();
const fields = [
'EdgeStartTimestamp',
'EdgeResponseStatus',
'EdgeResponseBytes',
'ClientRequestProtocol',
'ClientRequestURI',
'ClientRequestReferer'
];
return cloudflare
.getLogs(
zone.id,
stringifySeconds(startSeconds),
stringifySeconds(endSeconds),
fields
)
.then(stream => {
const endFetchTime = Date.now();
console.log(
'Fetched logs for %s from %s to %s (%dms)',
zone.name,
stringifySeconds(startSeconds),
stringifySeconds(endSeconds),
endFetchTime - startFetchTime
);
const startProcessTime = Date.now();
return processLogs(stream).then(totalEntries => {
const endProcessTime = Date.now();
console.log(
'Processed %d log entries for %s (%dms)',
totalEntries,
zone.name,
endProcessTime - startProcessTime
);
});
});
}
function getZones(domainNames) {
return Promise.all(domainNames.map(cloudflare.getZones)).then(results =>
results.reduce((memo, zones) => memo.concat(zones))
);
}
export default function ingestLogs(startDate, endDate) {
return Promise.resolve(cachedZones || getZones(domainNames)).then(zones => {
if (!cachedZones) cachedZones = zones;
return Promise.all(
zones.map(zone => ingestLogsForZone(zone, startDate, endDate))
);
});
}

View File

@ -1,7 +1,5 @@
const validateNpmPackageName = require('validate-npm-package-name');
import validateNpmPackageName from 'validate-npm-package-name';
function isValidPackageName(packageName) {
export default function isValidPackageName(packageName) {
return validateNpmPackageName(packageName).errors == null;
}
module.exports = isValidPackageName;

View File

@ -1,24 +0,0 @@
const log = console.log.bind(console);
function noop() {}
let debug, info, warn;
if (process.env.LOG_LEVEL === 'none') {
debug = info = warn = noop;
} else if (process.env.LOG_LEVEL === 'debug') {
debug = info = warn = log;
} else if (process.env.LOG_LEVEL === 'warn') {
debug = info = noop;
warn = log;
} else {
// default LOG_LEVEL = "info"
debug = noop;
info = warn = log;
}
module.exports = {
debug,
info,
warn
};

View File

@ -1,14 +1,15 @@
const url = require('url');
import url from 'url';
const packageURLFormat = /^\/((?:@[^/@]+\/)?[^/@]+)(?:@([^/]+))?(\/.*)?$/;
function parsePackageURL(originalURL) {
export default function parsePackageURL(originalURL) {
let { pathname, search, query } = url.parse(originalURL, true);
try {
pathname = decodeURIComponent(pathname);
} catch (error) {
return null;
}
const match = packageURLFormat.exec(pathname);
// Disallow invalid URL formats.
@ -30,5 +31,3 @@ function parsePackageURL(originalURL) {
filename // /file.js
};
}
module.exports = parsePackageURL;

View File

@ -1,7 +1,7 @@
const https = require('https');
import https from 'https';
const agent = new https.Agent({
keepAlive: true
});
module.exports = agent;
export default agent;

View File

@ -1,11 +0,0 @@
const React = require('react');
const ReactDOMServer = require('react-dom/server');
const doctype = '<!DOCTYPE html>';
function renderPage(page, props) {
const element = React.createElement(page, props);
return doctype + ReactDOMServer.renderToStaticMarkup(element);
}
module.exports = renderPage;

View File

@ -1,8 +1,10 @@
const babel = require('babel-core');
import babel from '@babel/core';
const unpkgRewrite = require('../plugins/unpkgRewrite');
import unpkgRewrite from '../plugins/unpkgRewrite';
function rewriteBareModuleIdentifiers(code, packageConfig) {
const origin = process.env.ORIGIN || 'https://unpkg.com';
export default function rewriteBareModuleIdentifiers(code, packageConfig) {
const dependencies = Object.assign(
{},
packageConfig.peerDependencies,
@ -14,10 +16,8 @@ function rewriteBareModuleIdentifiers(code, packageConfig) {
// because we haven't installed dependencies so
// we can't load plugins; see #84
babelrc: false,
plugins: [unpkgRewrite(dependencies)]
plugins: [unpkgRewrite(origin, dependencies)]
};
return babel.transform(code, options).code;
}
module.exports = rewriteBareModuleIdentifiers;

8
modules/utils/secret.js Normal file
View File

@ -0,0 +1,8 @@
import invariant from 'invariant';
const secretKey = process.env.SECRET_KEY;
invariant(secretKey, 'Missing $SECRET_KEY environment variable');
export const privateKey = secretKey.private;
export const publicKey = secretKey.public;

145
modules/utils/stats.js Normal file
View File

@ -0,0 +1,145 @@
// import data from './data';
import * as cloudflare from './cloudflare';
export function createDayKey(date) {
return `${date.getUTCFullYear()}-${date.getUTCMonth()}-${date.getUTCDate()}`;
}
export function createHourKey(date) {
return `${createDayKey(date)}-${date.getUTCHours()}`;
}
export function createMinuteKey(date) {
return `${createHourKey(date)}-${date.getUTCMinutes()}`;
}
// function createScoresMap(array) {
// const map = {};
// for (let i = 0; i < array.length; i += 2) {
// map[array[i]] = parseInt(array[i + 1], 10);
// }
// return map;
// }
// function getScoresMap(key, n = 100) {
// return new Promise((resolve, reject) => {
// data.zrevrange(key, 0, n, 'withscores', (error, value) => {
// if (error) {
// reject(error);
// } else {
// resolve(createScoresMap(value));
// }
// });
// });
// }
// function getPackageRequests(date, n = 100) {
// return getScoresMap(`stats-packageRequests-${createDayKey(date)}`, n);
// }
// function getPackageBandwidth(date, n = 100) {
// return getScoresMap(`stats-packageBytes-${createDayKey(date)}`, n);
// }
// function getProtocolRequests(date) {
// return getScoresMap(`stats-protocolRequests-${createDayKey(date)}`);
// }
// function addDailyMetricsToTimeseries(timeseries) {
// const since = new Date(timeseries.since);
// return Promise.all([
// getPackageRequests(since),
// getPackageBandwidth(since),
// getProtocolRequests(since)
// ]).then(results => {
// timeseries.requests.package = results[0];
// timeseries.bandwidth.package = results[1];
// timeseries.requests.protocol = results[2];
// return timeseries;
// });
// }
// function sumMaps(maps) {
// return maps.reduce((memo, map) => {
// Object.keys(map).forEach(key => {
// memo[key] = (memo[key] || 0) + map[key];
// });
// return memo;
// }, {});
// }
// function addDailyMetrics(result) {
// return Promise.all(result.timeseries.map(addDailyMetricsToTimeseries)).then(
// () => {
// result.totals.requests.package = sumMaps(
// result.timeseries.map(timeseries => {
// return timeseries.requests.package;
// })
// );
// result.totals.bandwidth.package = sumMaps(
// result.timeseries.map(timeseries => timeseries.bandwidth.package)
// );
// result.totals.requests.protocol = sumMaps(
// result.timeseries.map(timeseries => timeseries.requests.protocol)
// );
// return result;
// }
// );
// }
function extractPublicInfo(data) {
return {
since: data.since,
until: data.until,
requests: {
all: data.requests.all,
cached: data.requests.cached,
country: data.requests.country,
status: data.requests.http_status
},
bandwidth: {
all: data.bandwidth.all,
cached: data.bandwidth.cached,
country: data.bandwidth.country
},
threats: {
all: data.threats.all,
country: data.threats.country
},
uniques: {
all: data.uniques.all
}
};
}
const DomainNames = ['unpkg.com', 'npmcdn.com'];
function fetchStats(since, until) {
return cloudflare.getZones(DomainNames).then(zones => {
return cloudflare
.getZoneAnalyticsDashboard(zones, since, until)
.then(dashboard => {
return {
timeseries: dashboard.timeseries.map(extractPublicInfo),
totals: extractPublicInfo(dashboard.totals)
};
});
});
}
// const oneMinute = 1000 * 60;
// const oneHour = oneMinute * 60;
// const oneDay = oneHour * 24;
export function getStats(since, until) {
const promise = fetchStats(since, until);
// return until - since > oneDay ? promise.then(addDailyMetrics) : promise;
return promise;
}