Experimental port to Firebase hosting

This commit is contained in:
Michael Jackson
2019-01-05 16:50:05 -08:00
parent e4d6df255e
commit 31e7d3865a
300 changed files with 129300 additions and 5817 deletions

View File

@ -0,0 +1,39 @@
import * as auth from '../auth';
describe('Auth API', () => {
beforeEach(done => {
auth.removeAllRevokedTokens().then(() => done(), done);
});
it('creates tokens with the right scopes', done => {
const scopes = {
blacklist: {
add: true,
remove: true
}
};
auth.createToken(scopes).then(token => {
auth.verifyToken(token).then(payload => {
expect(payload.jti).toEqual(expect.any(String));
expect(payload.iss).toEqual(expect.any(String));
expect(payload.iat).toEqual(expect.any(Number));
expect(payload.scopes).toMatchObject(scopes);
done();
});
});
});
it('refuses to verify revoked tokens', done => {
const scopes = {};
auth.createToken(scopes).then(token => {
auth.revokeToken(token).then(() => {
auth.verifyToken(token).then(payload => {
expect(payload).toBe(null);
done();
});
});
});
});
});

View File

@ -0,0 +1,24 @@
import * as blacklist from '../blacklist';
describe('Blacklist API', () => {
beforeEach(done => {
blacklist.removeAllPackages().then(() => done(), done);
});
it('adds and removes packages to/from the blacklist', done => {
const packageName = 'bad-package';
blacklist.addPackage(packageName).then(() => {
blacklist.getPackages().then(packageNames => {
expect(packageNames).toEqual([packageName]);
blacklist.removePackage(packageName).then(() => {
blacklist.getPackages().then(packageNames => {
expect(packageNames).toEqual([]);
done();
});
});
});
});
});
});

View File

@ -1,4 +1,4 @@
const createSearch = require('../createSearch');
import createSearch from '../createSearch';
describe('createSearch', () => {
it('omits the trailing = for empty string values', () => {

View File

@ -1,4 +1,4 @@
const getContentType = require('../getContentType');
import getContentType from '../getContentType';
it('gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile', () => {
expect(getContentType('AUTHORS')).toBe('text/plain');

View File

@ -1,4 +1,4 @@
const parsePackageURL = require('../parsePackageURL');
import parsePackageURL from '../parsePackageURL';
describe('parsePackageURL', () => {
it('parses plain packages', () => {

View File

@ -1,5 +1,3 @@
function addLeadingSlash(name) {
export default function addLeadingSlash(name) {
return name.charAt(0) === '/' ? name : '/' + name;
}
module.exports = addLeadingSlash;

91
modules/utils/auth.js Normal file
View File

@ -0,0 +1,91 @@
import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import data from './data';
import secretKey from '../secretKey';
function getCurrentSeconds() {
return Math.floor(Date.now() / 1000);
}
function createTokenId() {
return crypto.randomBytes(16).toString('hex');
}
export function createToken(scopes = {}) {
return new Promise((resolve, reject) => {
const payload = {
jti: createTokenId(),
iss: 'https://unpkg.com',
iat: getCurrentSeconds(),
scopes
};
jwt.sign(
payload,
secretKey.private,
{ algorithm: 'RS256' },
(error, token) => {
if (error) {
reject(error);
} else {
resolve(token);
}
}
);
});
}
const revokedTokensSet = 'revoked-tokens';
export function verifyToken(token) {
return new Promise((resolve, reject) => {
const options = { algorithms: ['RS256'] };
jwt.verify(token, secretKey.public, options, (error, payload) => {
if (error) {
reject(error);
} else {
if (payload.jti) {
data.sismember(revokedTokensSet, payload.jti, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value === 0 ? payload : null);
}
});
} else {
resolve(null);
}
}
});
});
}
export function revokeToken(token) {
return verifyToken(token).then(payload => {
if (payload) {
return new Promise((resolve, reject) => {
data.sadd(revokedTokensSet, payload.jti, error => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
}
});
}
export function removeAllRevokedTokens() {
return new Promise((resolve, reject) => {
data.del(revokedTokensSet, error => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
}

View File

@ -0,0 +1,63 @@
import data from './data';
const blacklistSet = 'blacklisted-packages';
export function addPackage(packageName) {
return new Promise((resolve, reject) => {
data.sadd(blacklistSet, packageName, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value === 1);
}
});
});
}
export function removePackage(packageName) {
return new Promise((resolve, reject) => {
data.srem(blacklistSet, packageName, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value === 1);
}
});
});
}
export function removeAllPackages() {
return new Promise((resolve, reject) => {
data.del(blacklistSet, error => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
}
export function getPackages() {
return new Promise((resolve, reject) => {
data.smembers(blacklistSet, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value);
}
});
});
}
export function includesPackage(packageName) {
return new Promise((resolve, reject) => {
data.sismember(blacklistSet, packageName, (error, value) => {
if (error) {
reject(error);
} else {
resolve(value === 1);
}
});
});
}

View File

@ -1,4 +1,4 @@
function bufferStream(stream) {
export default function bufferStream(stream) {
return new Promise((resolve, reject) => {
const chunks = [];
@ -8,5 +8,3 @@ function bufferStream(stream) {
.on('end', () => resolve(Buffer.concat(chunks)));
});
}
module.exports = bufferStream;

View File

@ -1,23 +0,0 @@
const LRUCache = require('lru-cache');
const maxMegabytes = 40; // Cap the cache at 40 MB
const maxLength = maxMegabytes * 1024 * 1024;
const maxSeconds = 60;
const maxAge = maxSeconds * 1000;
const cache = new LRUCache({
max: maxLength,
maxAge: maxAge,
length: Buffer.byteLength
});
function get(key) {
return cache.get(key);
}
function setex(key, ttlSeconds, value) {
return cache.set(key, value, ttlSeconds * 1000);
}
module.exports = { get, setex };

View File

@ -0,0 +1,92 @@
import 'isomorphic-fetch';
import invariant from 'invariant';
import gunzip from 'gunzip-maybe';
import ndjson from 'ndjson';
const cloudflareURL = 'https://api.cloudflare.com/client/v4';
const cloudflareEmail = process.env.CLOUDFLARE_EMAIL;
const cloudflareKey = process.env.CLOUDFLARE_KEY;
invariant(
cloudflareEmail,
'Missing the $CLOUDFLARE_EMAIL environment variable'
);
invariant(cloudflareKey, 'Missing the $CLOUDFLARE_KEY environment variable');
export function get(path, headers) {
return fetch(`${cloudflareURL}${path}`, {
headers: Object.assign({}, headers, {
'X-Auth-Email': cloudflareEmail,
'X-Auth-Key': cloudflareKey
})
});
}
export function getJSON(path, headers) {
return get(path, headers)
.then(res => {
return res.json();
})
.then(data => {
if (!data.success) {
console.error(`cloudflare.getJSON failed at ${path}`);
console.error(data);
throw new Error('Failed to getJSON from Cloudflare');
}
return data.result;
});
}
export function getZones(domains) {
return Promise.all(
(Array.isArray(domains) ? domains : [domains]).map(domain =>
getJSON(`/zones?name=${domain}`)
)
).then(results => results.reduce((memo, zones) => memo.concat(zones)));
}
function reduceResults(target, values) {
Object.keys(values).forEach(key => {
const value = values[key];
if (typeof value === 'object' && value) {
target[key] = reduceResults(target[key] || {}, value);
} else if (typeof value === 'number') {
target[key] = (target[key] || 0) + values[key];
}
});
return target;
}
export function getZoneAnalyticsDashboard(zones, since, until) {
return Promise.all(
(Array.isArray(zones) ? zones : [zones]).map(zone => {
return getJSON(
`/zones/${
zone.id
}/analytics/dashboard?since=${since.toISOString()}&until=${until.toISOString()}`
);
})
).then(results => results.reduce(reduceResults));
}
export function getJSONStream(path, headers) {
const gzipHeaders = Object.assign({}, headers, {
'Accept-Encoding': 'gzip'
});
return get(path, gzipHeaders)
.then(res => res.body.pipe(gunzip()))
.then(stream => stream.pipe(ndjson.parse()));
}
export function getLogs(zoneId, startTime, endTime, fieldsArray) {
const fields = fieldsArray.join(',');
return getJSONStream(
`/zones/${zoneId}/logs/received?start=${startTime}&end=${endTime}&fields=${fields}`
);
}

View File

@ -1,4 +1,9 @@
function createPackageURL(packageName, version, pathname, search) {
export default function createPackageURL(
packageName,
version,
pathname,
search
) {
let url = `/${packageName}`;
if (version != null) url += `@${version}`;
@ -7,5 +12,3 @@ function createPackageURL(packageName, version, pathname, search) {
return url;
}
module.exports = createPackageURL;

View File

@ -1,4 +1,4 @@
function createSearch(query) {
export default function createSearch(query) {
const keys = Object.keys(query).sort();
const params = keys.reduce(
(memo, key) =>
@ -12,5 +12,3 @@ function createSearch(query) {
return params.length ? `?${params.join('&')}` : '';
}
module.exports = createSearch;

View File

@ -1,4 +1,4 @@
const redis = require('redis');
import redis from 'redis';
redis.debug_mode = process.env.DEBUG_REDIS != null;
@ -6,4 +6,4 @@ const client = redis.createClient(
process.env.DATA_URL || process.env.OPENREDIS_URL || 'redis://localhost:6379'
);
module.exports = client;
export default client;

View File

@ -1,17 +1,16 @@
const url = require('url');
const https = require('https');
const gunzip = require('gunzip-maybe');
const tar = require('tar-stream');
import url from 'url';
import https from 'https';
import gunzip from 'gunzip-maybe';
import tar from 'tar-stream';
const bufferStream = require('./bufferStream');
const agent = require('./registryAgent');
const logging = require('./logging');
import bufferStream from './bufferStream';
import agent from './registryAgent';
function fetchNpmPackage(packageConfig) {
export default function fetchNpmPackage(packageConfig) {
return new Promise((resolve, reject) => {
const tarballURL = packageConfig.dist.tarball;
logging.debug(
console.log(
'Fetching package for %s from %s',
packageConfig.name,
tarballURL
@ -45,5 +44,3 @@ function fetchNpmPackage(packageConfig) {
.on('error', reject);
});
}
module.exports = fetchNpmPackage;

View File

@ -1,25 +1,25 @@
const url = require('url');
const https = require('https');
import url from 'url';
import https from 'https';
const serverConfig = require('../serverConfig');
const bufferStream = require('./bufferStream');
const agent = require('./registryAgent');
const logging = require('./logging');
import { npmRegistryURL } from '../config';
import bufferStream from './bufferStream';
import agent from './registryAgent';
function parseJSON(res) {
return bufferStream(res).then(JSON.parse);
}
function fetchNpmPackageInfo(packageName) {
export default function fetchNpmPackageInfo(packageName) {
return new Promise((resolve, reject) => {
const encodedPackageName =
packageName.charAt(0) === '@'
? `@${encodeURIComponent(packageName.substring(1))}`
: encodeURIComponent(packageName);
const infoURL = `${serverConfig.registryURL}/${encodedPackageName}`;
const infoURL = `${npmRegistryURL}/${encodedPackageName}`;
logging.debug('Fetching package info for %s from %s', packageName, infoURL);
console.log('Fetching package info for %s from %s', packageName, infoURL);
const { hostname, pathname } = url.parse(infoURL);
const options = {
@ -53,5 +53,3 @@ function fetchNpmPackageInfo(packageName) {
.on('error', reject);
});
}
module.exports = fetchNpmPackageInfo;

View File

@ -1,22 +1,23 @@
const mime = require('mime');
import mime from 'mime';
mime.define({
'text/plain': [
'authors',
'changes',
'license',
'makefile',
'patents',
'readme',
'ts',
'flow'
]
});
mime.define(
{
'text/plain': [
'authors',
'changes',
'license',
'makefile',
'patents',
'readme',
'ts',
'flow'
]
},
/* force */ true
);
const textFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore|\.lock)$/i;
function getContentType(file) {
return textFiles.test(file) ? 'text/plain' : mime.lookup(file);
export default function getContentType(file) {
return textFiles.test(file) ? 'text/plain' : mime.getType(file);
}
module.exports = getContentType;

View File

@ -1,5 +1,3 @@
function getContentTypeHeader(type) {
export default function getContentTypeHeader(type) {
return type === 'application/javascript' ? type + '; charset=utf-8' : type;
}
module.exports = getContentTypeHeader;

View File

@ -1,7 +1,5 @@
const SRIToolbox = require('sri-toolbox');
import SRIToolbox from 'sri-toolbox';
function getIntegrity(data) {
export default function getIntegrity(data) {
return SRIToolbox.generate({ algorithms: ['sha384'] }, data);
}
module.exports = getIntegrity;

View File

@ -1,19 +1,21 @@
const cache = require('./cache');
const fetchNpmPackageInfo = require('./fetchNpmPackageInfo');
import LRUCache from 'lru-cache';
import fetchNpmPackageInfo from './fetchNpmPackageInfo';
const maxMegabytes = 40; // Cap the cache at 40 MB
const maxLength = maxMegabytes * 1024 * 1024;
const oneSecond = 1000;
const oneMinute = 60 * oneSecond;
const cache = new LRUCache({
max: maxLength,
maxAge: oneMinute,
length: Buffer.byteLength
});
const notFound = '';
function cleanPackageInfo(packageInfo) {
return {
versions: Object.keys(packageInfo.versions).reduce((memo, key) => {
memo[key] = packageInfo.versions[key];
return memo;
}, {}),
'dist-tags': packageInfo['dist-tags']
};
}
function getNpmPackageInfo(packageName) {
export default function getNpmPackageInfo(packageName) {
return new Promise((resolve, reject) => {
const key = `npmPackageInfo-${packageName}`;
const value = cache.get(key);
@ -21,25 +23,21 @@ function getNpmPackageInfo(packageName) {
if (value != null) {
resolve(value === notFound ? null : JSON.parse(value));
} else {
fetchNpmPackageInfo(packageName).then(value => {
if (value == null) {
fetchNpmPackageInfo(packageName).then(info => {
if (info == null) {
// Cache 404s for 5 minutes. This prevents us from making
// unnecessary requests to the registry for bad package names.
// In the worst case, a brand new package's info will be
// available within 5 minutes.
cache.setex(key, 300, notFound);
cache.set(key, notFound, oneMinute * 5);
resolve(null);
} else {
value = cleanPackageInfo(value);
// Cache valid package info for 1 minute. In the worst case,
// new versions won't be available for 1 minute.
cache.setex(key, 60, JSON.stringify(value));
resolve(value);
cache.set(key, JSON.stringify(info), oneMinute);
resolve(info);
}
}, reject);
}
});
}
module.exports = getNpmPackageInfo;

View File

@ -1,6 +1,6 @@
const db = require('./data');
import db from './data';
function incrementCounter(counter, key, by = 1) {
export default function incrementCounter(counter, key, by = 1) {
return new Promise((resolve, reject) => {
db.hincrby(counter, key, by, (error, value) => {
if (error) {
@ -11,5 +11,3 @@ function incrementCounter(counter, key, by = 1) {
});
});
}
module.exports = incrementCounter;

View File

@ -1,7 +1,5 @@
const validateNpmPackageName = require('validate-npm-package-name');
import validateNpmPackageName from 'validate-npm-package-name';
function isValidPackageName(packageName) {
export default function isValidPackageName(packageName) {
return validateNpmPackageName(packageName).errors == null;
}
module.exports = isValidPackageName;

View File

@ -1,24 +0,0 @@
const log = console.log.bind(console);
function noop() {}
let debug, info, warn;
if (process.env.LOG_LEVEL === 'none') {
debug = info = warn = noop;
} else if (process.env.LOG_LEVEL === 'debug') {
debug = info = warn = log;
} else if (process.env.LOG_LEVEL === 'warn') {
debug = info = noop;
warn = log;
} else {
// default LOG_LEVEL = "info"
debug = noop;
info = warn = log;
}
module.exports = {
debug,
info,
warn
};

View File

@ -1,4 +1,4 @@
const url = require('url');
import url from 'url';
const packageURLFormat = /^\/((?:@[^/@]+\/)?[^/@]+)(?:@([^/]+))?(\/.*)?$/;
@ -14,7 +14,7 @@ function decodeParam(param) {
return '';
}
function parsePackageURL(originalURL) {
export default function parsePackageURL(originalURL) {
const { pathname, search, query } = url.parse(originalURL, true);
const match = packageURLFormat.exec(pathname);
@ -37,5 +37,3 @@ function parsePackageURL(originalURL) {
filename // /file.js
};
}
module.exports = parsePackageURL;

View File

@ -1,7 +1,7 @@
const https = require('https');
import https from 'https';
const agent = new https.Agent({
keepAlive: true
});
module.exports = agent;
export default agent;

View File

@ -1,11 +1,11 @@
const React = require('react');
const ReactDOMServer = require('react-dom/server');
import React from 'react';
import ReactDOMServer from 'react-dom/server';
const doctype = '<!DOCTYPE html>';
function renderPage(page, props) {
const element = React.createElement(page, props);
return doctype + ReactDOMServer.renderToStaticMarkup(element);
export default function renderPage(page, props) {
return (
doctype +
ReactDOMServer.renderToStaticMarkup(React.createElement(page, props))
);
}
module.exports = renderPage;

View File

@ -1,8 +1,8 @@
const babel = require('babel-core');
import babel from '@babel/core';
const unpkgRewrite = require('../plugins/unpkgRewrite');
import unpkgRewrite from '../plugins/unpkgRewrite';
function rewriteBareModuleIdentifiers(code, packageConfig) {
export default function rewriteBareModuleIdentifiers(code, packageConfig) {
const dependencies = Object.assign(
{},
packageConfig.peerDependencies,
@ -19,5 +19,3 @@ function rewriteBareModuleIdentifiers(code, packageConfig) {
return babel.transform(code, options).code;
}
module.exports = rewriteBareModuleIdentifiers;

162
modules/utils/stats.js Normal file
View File

@ -0,0 +1,162 @@
// import data from './data';
import * as cloudflare from './cloudflare';
// import * as blacklist from './blacklist';
// function prunePackages(packagesMap) {
// return Promise.all(
// Object.keys(packagesMap).map(packageName =>
// blacklist.includesPackage(packageName).then(blacklisted => {
// if (blacklisted) {
// delete packagesMap[packageName];
// }
// })
// )
// ).then(() => packagesMap);
// }
// export function createDayKey(date) {
// return `${date.getUTCFullYear()}-${date.getUTCMonth()}-${date.getUTCDate()}`;
// }
// export function createHourKey(date) {
// return `${createDayKey(date)}-${date.getUTCHours()}`;
// }
// export function createMinuteKey(date) {
// return `${createHourKey(date)}-${date.getUTCMinutes()}`;
// }
// function createScoresMap(array) {
// const map = {};
// for (let i = 0; i < array.length; i += 2) {
// map[array[i]] = parseInt(array[i + 1], 10);
// }
// return map;
// }
// function getScoresMap(key, n = 100) {
// return new Promise((resolve, reject) => {
// data.zrevrange(key, 0, n, 'withscores', (error, value) => {
// if (error) {
// reject(error);
// } else {
// resolve(createScoresMap(value));
// }
// });
// });
// }
// function getPackageRequests(date, n = 100) {
// return getScoresMap(`stats-packageRequests-${createDayKey(date)}`, n).then(
// prunePackages
// );
// }
// function getPackageBandwidth(date, n = 100) {
// return getScoresMap(`stats-packageBytes-${createDayKey(date)}`, n).then(
// prunePackages
// );
// }
// function getProtocolRequests(date) {
// return getScoresMap(`stats-protocolRequests-${createDayKey(date)}`);
// }
// function addDailyMetricsToTimeseries(timeseries) {
// const since = new Date(timeseries.since);
// return Promise.all([
// getPackageRequests(since),
// getPackageBandwidth(since),
// getProtocolRequests(since)
// ]).then(results => {
// timeseries.requests.package = results[0];
// timeseries.bandwidth.package = results[1];
// timeseries.requests.protocol = results[2];
// return timeseries;
// });
// }
// function sumMaps(maps) {
// return maps.reduce((memo, map) => {
// Object.keys(map).forEach(key => {
// memo[key] = (memo[key] || 0) + map[key];
// });
// return memo;
// }, {});
// }
// function addDailyMetrics(result) {
// return Promise.all(result.timeseries.map(addDailyMetricsToTimeseries)).then(
// () => {
// result.totals.requests.package = sumMaps(
// result.timeseries.map(timeseries => {
// return timeseries.requests.package;
// })
// );
// result.totals.bandwidth.package = sumMaps(
// result.timeseries.map(timeseries => timeseries.bandwidth.package)
// );
// result.totals.requests.protocol = sumMaps(
// result.timeseries.map(timeseries => timeseries.requests.protocol)
// );
// return result;
// }
// );
// }
function extractPublicInfo(data) {
return {
since: data.since,
until: data.until,
requests: {
all: data.requests.all,
cached: data.requests.cached,
country: data.requests.country,
status: data.requests.http_status
},
bandwidth: {
all: data.bandwidth.all,
cached: data.bandwidth.cached,
country: data.bandwidth.country
},
threats: {
all: data.threats.all,
country: data.threats.country
},
uniques: {
all: data.uniques.all
}
};
}
const DomainNames = ['unpkg.com', 'npmcdn.com'];
function fetchStats(since, until) {
return cloudflare.getZones(DomainNames).then(zones => {
return cloudflare
.getZoneAnalyticsDashboard(zones, since, until)
.then(dashboard => {
return {
timeseries: dashboard.timeseries.map(extractPublicInfo),
totals: extractPublicInfo(dashboard.totals)
};
});
});
}
// const oneMinute = 1000 * 60;
// const oneHour = oneMinute * 60;
// const oneDay = oneHour * 24;
export function getStats(since, until) {
const promise = fetchStats(since, until);
// return until - since > oneDay ? promise.then(addDailyMetrics) : promise;
return promise;
}