Prettify everything

This commit is contained in:
MICHAEL JACKSON 2018-02-17 18:00:56 -08:00
parent d6f2bc089a
commit 2e1f09e913
58 changed files with 1061 additions and 932 deletions

View File

@ -1,6 +1,8 @@
import React from "react" import React from "react";
import contentHTML from "./About.md" import contentHTML from "./About.md";
const About = () => <div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} /> const About = () => (
<div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} />
);
export default About export default About;

View File

@ -1,11 +1,11 @@
import React from "react" import React from "react";
import { HashRouter } from "react-router-dom" import { HashRouter } from "react-router-dom";
import Layout from "./Layout" import Layout from "./Layout";
const App = () => ( const App = () => (
<HashRouter> <HashRouter>
<Layout /> <Layout />
</HashRouter> </HashRouter>
) );
export default App export default App;

View File

@ -1,6 +1,8 @@
import React from "react" import React from "react";
import contentHTML from "./Home.md" import contentHTML from "./Home.md";
const Home = () => <div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} /> const Home = () => (
<div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} />
);
export default Home export default Home;

View File

@ -1,78 +1,81 @@
import React from "react" import React from "react";
import PropTypes from "prop-types" import PropTypes from "prop-types";
import { Motion, spring } from "react-motion" import { Motion, spring } from "react-motion";
import { Switch, Route, Link, withRouter } from "react-router-dom" import { Switch, Route, Link, withRouter } from "react-router-dom";
import WindowSize from "./WindowSize" import WindowSize from "./WindowSize";
import About from "./About" import About from "./About";
import Stats from "./Stats" import Stats from "./Stats";
import Home from "./Home" import Home from "./Home";
class Layout extends React.Component { class Layout extends React.Component {
static propTypes = { static propTypes = {
location: PropTypes.object, location: PropTypes.object,
children: PropTypes.node children: PropTypes.node
} };
state = { state = {
underlineLeft: 0, underlineLeft: 0,
underlineWidth: 0, underlineWidth: 0,
useSpring: false, useSpring: false,
stats: null stats: null
} };
adjustUnderline = (useSpring = false) => { adjustUnderline = (useSpring = false) => {
let itemIndex let itemIndex;
switch (this.props.location.pathname) { switch (this.props.location.pathname) {
case "/stats": case "/stats":
itemIndex = 1 itemIndex = 1;
break break;
case "/about": case "/about":
itemIndex = 2 itemIndex = 2;
break break;
case "/": case "/":
default: default:
itemIndex = 0 itemIndex = 0;
} }
const itemNodes = this.listNode.querySelectorAll("li") const itemNodes = this.listNode.querySelectorAll("li");
const currentNode = itemNodes[itemIndex] const currentNode = itemNodes[itemIndex];
this.setState({ this.setState({
underlineLeft: currentNode.offsetLeft, underlineLeft: currentNode.offsetLeft,
underlineWidth: currentNode.offsetWidth, underlineWidth: currentNode.offsetWidth,
useSpring useSpring
}) });
} };
componentDidMount() { componentDidMount() {
this.adjustUnderline() this.adjustUnderline();
fetch("/_stats?period=last-month") fetch("/_stats?period=last-month")
.then(res => res.json()) .then(res => res.json())
.then(stats => this.setState({ stats })) .then(stats => this.setState({ stats }));
if (window.localStorage) { if (window.localStorage) {
const savedStats = window.localStorage.savedStats const savedStats = window.localStorage.savedStats;
if (savedStats) this.setState({ stats: JSON.parse(savedStats) }) if (savedStats) this.setState({ stats: JSON.parse(savedStats) });
window.onbeforeunload = () => { window.onbeforeunload = () => {
localStorage.savedStats = JSON.stringify(this.state.stats) localStorage.savedStats = JSON.stringify(this.state.stats);
} };
} }
} }
componentDidUpdate(prevProps) { componentDidUpdate(prevProps) {
if (prevProps.location.pathname !== this.props.location.pathname) this.adjustUnderline(true) if (prevProps.location.pathname !== this.props.location.pathname)
this.adjustUnderline(true);
} }
render() { render() {
const { underlineLeft, underlineWidth, useSpring } = this.state const { underlineLeft, underlineWidth, useSpring } = this.state;
const style = { const style = {
left: useSpring ? spring(underlineLeft, { stiffness: 220 }) : underlineLeft, left: useSpring
? spring(underlineLeft, { stiffness: 220 })
: underlineLeft,
width: useSpring ? spring(underlineWidth) : underlineWidth width: useSpring ? spring(underlineWidth) : underlineWidth
} };
return ( return (
<div> <div>
@ -81,7 +84,10 @@ class Layout extends React.Component {
<header> <header>
<h1 className="layout-title">unpkg</h1> <h1 className="layout-title">unpkg</h1>
<nav className="layout-nav"> <nav className="layout-nav">
<ol className="layout-nav-list" ref={node => (this.listNode = node)}> <ol
className="layout-nav-list"
ref={node => (this.listNode = node)}
>
<li> <li>
<Link to="/">Home</Link> <Link to="/">Home</Link>
</li> </li>
@ -111,13 +117,16 @@ class Layout extends React.Component {
</div> </div>
<Switch> <Switch>
<Route path="/stats" render={() => <Stats data={this.state.stats} />} /> <Route
path="/stats"
render={() => <Stats data={this.state.stats} />}
/>
<Route path="/about" component={About} /> <Route path="/about" component={About} />
<Route path="/" component={Home} /> <Route path="/" component={Home} />
</Switch> </Switch>
</div> </div>
) );
} }
} }
export default withRouter(Layout) export default withRouter(Layout);

View File

@ -1,51 +1,55 @@
import React from "react" import React from "react";
import PropTypes from "prop-types" import PropTypes from "prop-types";
import formatBytes from "pretty-bytes" import formatBytes from "pretty-bytes";
import formatDate from "date-fns/format" import formatDate from "date-fns/format";
import parseDate from "date-fns/parse" import parseDate from "date-fns/parse";
import formatNumber from "./utils/formatNumber" import formatNumber from "./utils/formatNumber";
import formatPercent from "./utils/formatPercent" import formatPercent from "./utils/formatPercent";
import { continents, countries } from "countries-list" import { continents, countries } from "countries-list";
const getCountriesByContinent = continent => const getCountriesByContinent = continent =>
Object.keys(countries).filter(country => countries[country].continent === continent) Object.keys(countries).filter(
country => countries[country].continent === continent
);
const sumKeyValues = (hash, keys) => keys.reduce((n, key) => n + (hash[key] || 0), 0) const sumKeyValues = (hash, keys) =>
keys.reduce((n, key) => n + (hash[key] || 0), 0);
const sumValues = hash => Object.keys(hash).reduce((memo, key) => memo + hash[key], 0) const sumValues = hash =>
Object.keys(hash).reduce((memo, key) => memo + hash[key], 0);
class Stats extends React.Component { class Stats extends React.Component {
static propTypes = { static propTypes = {
data: PropTypes.object data: PropTypes.object
} };
state = { state = {
minPackageRequests: 1000000, minPackageRequests: 1000000,
minCountryRequests: 1000000 minCountryRequests: 1000000
} };
render() { render() {
const { data } = this.props const { data } = this.props;
if (data == null) return null if (data == null) return null;
const totals = data.totals const totals = data.totals;
// Summary data // Summary data
const since = parseDate(totals.since) const since = parseDate(totals.since);
const until = parseDate(totals.until) const until = parseDate(totals.until);
// Packages // Packages
const packageRows = [] const packageRows = [];
Object.keys(totals.requests.package) Object.keys(totals.requests.package)
.sort((a, b) => { .sort((a, b) => {
return totals.requests.package[b] - totals.requests.package[a] return totals.requests.package[b] - totals.requests.package[a];
}) })
.forEach(packageName => { .forEach(packageName => {
const requests = totals.requests.package[packageName] const requests = totals.requests.package[packageName];
const bandwidth = totals.bandwidth.package[packageName] const bandwidth = totals.bandwidth.package[packageName];
if (requests >= this.state.minPackageRequests) { if (requests >= this.state.minPackageRequests) {
packageRows.push( packageRows.push(
@ -59,44 +63,51 @@ class Stats extends React.Component {
</a> </a>
</td> </td>
<td> <td>
{formatNumber(requests)} ({formatPercent(requests / totals.requests.all)}%) {formatNumber(requests)} ({formatPercent(
requests / totals.requests.all
)}%)
</td> </td>
{bandwidth ? ( {bandwidth ? (
<td> <td>
{formatBytes(bandwidth)} ({formatPercent(bandwidth / totals.bandwidth.all)}%) {formatBytes(bandwidth)} ({formatPercent(
bandwidth / totals.bandwidth.all
)}%)
</td> </td>
) : ( ) : (
<td>-</td> <td>-</td>
)} )}
</tr> </tr>
) );
} }
}) });
// Regions // Regions
const regionRows = [] const regionRows = [];
const continentsData = Object.keys(continents).reduce((memo, continent) => { const continentsData = Object.keys(continents).reduce((memo, continent) => {
const localCountries = getCountriesByContinent(continent) const localCountries = getCountriesByContinent(continent);
memo[continent] = { memo[continent] = {
countries: localCountries, countries: localCountries,
requests: sumKeyValues(totals.requests.country, localCountries), requests: sumKeyValues(totals.requests.country, localCountries),
bandwidth: sumKeyValues(totals.bandwidth.country, localCountries) bandwidth: sumKeyValues(totals.bandwidth.country, localCountries)
} };
return memo return memo;
}, {}) }, {});
const topContinents = Object.keys(continentsData).sort((a, b) => { const topContinents = Object.keys(continentsData).sort((a, b) => {
return continentsData[b].requests - continentsData[a].requests return continentsData[b].requests - continentsData[a].requests;
}) });
topContinents.forEach(continent => { topContinents.forEach(continent => {
const continentName = continents[continent] const continentName = continents[continent];
const continentData = continentsData[continent] const continentData = continentsData[continent];
if (continentData.requests > this.state.minCountryRequests && continentData.bandwidth !== 0) { if (
continentData.requests > this.state.minCountryRequests &&
continentData.bandwidth !== 0
) {
regionRows.push( regionRows.push(
<tr key={continent} className="continent-row"> <tr key={continent} className="continent-row">
<td>{continentName}</td> <td>{continentName}</td>
@ -111,15 +122,15 @@ class Stats extends React.Component {
)}%) )}%)
</td> </td>
</tr> </tr>
) );
const topCountries = continentData.countries.sort((a, b) => { const topCountries = continentData.countries.sort((a, b) => {
return totals.requests.country[b] - totals.requests.country[a] return totals.requests.country[b] - totals.requests.country[a];
}) });
topCountries.forEach(country => { topCountries.forEach(country => {
const countryRequests = totals.requests.country[country] const countryRequests = totals.requests.country[country];
const countryBandwidth = totals.bandwidth.country[country] const countryBandwidth = totals.bandwidth.country[country];
if (countryRequests > this.state.minCountryRequests) { if (countryRequests > this.state.minCountryRequests) {
regionRows.push( regionRows.push(
@ -136,19 +147,19 @@ class Stats extends React.Component {
)}%) )}%)
</td> </td>
</tr> </tr>
) );
} }
}) });
} }
}) });
// Protocols // Protocols
const protocolRows = Object.keys(totals.requests.protocol) const protocolRows = Object.keys(totals.requests.protocol)
.sort((a, b) => { .sort((a, b) => {
return totals.requests.protocol[b] - totals.requests.protocol[a] return totals.requests.protocol[b] - totals.requests.protocol[a];
}) })
.map(protocol => { .map(protocol => {
const requests = totals.requests.protocol[protocol] const requests = totals.requests.protocol[protocol];
return ( return (
<tr key={protocol}> <tr key={protocol}>
@ -159,19 +170,22 @@ class Stats extends React.Component {
)}%) )}%)
</td> </td>
</tr> </tr>
) );
}) });
return ( return (
<div className="wrapper"> <div className="wrapper">
<p> <p>
From <strong>{formatDate(since, "MMM D")}</strong> to{" "} From <strong>{formatDate(since, "MMM D")}</strong> to{" "}
<strong>{formatDate(until, "MMM D")}</strong> unpkg served{" "} <strong>{formatDate(until, "MMM D")}</strong> unpkg served{" "}
<strong>{formatNumber(totals.requests.all)}</strong> requests and a total of{" "} <strong>{formatNumber(totals.requests.all)}</strong> requests and a
<strong>{formatBytes(totals.bandwidth.all)}</strong> of data to{" "} total of <strong>{formatBytes(totals.bandwidth.all)}</strong> of data
<strong>{formatNumber(totals.uniques.all)}</strong> unique visitors,{" "} to <strong>{formatNumber(totals.uniques.all)}</strong> unique
<strong>{formatPercent(totals.requests.cached / totals.requests.all, 0)}%</strong> of visitors,{" "}
which were served from the cache. <strong>
{formatPercent(totals.requests.cached / totals.requests.all, 0)}%
</strong>{" "}
of which were served from the cache.
</p> </p>
<h3>Packages</h3> <h3>Packages</h3>
@ -241,7 +255,12 @@ class Stats extends React.Component {
requests. requests.
</p> </p>
<table cellSpacing="0" cellPadding="0" style={{ width: "100%" }} className="regions-table"> <table
cellSpacing="0"
cellPadding="0"
style={{ width: "100%" }}
className="regions-table"
>
<thead> <thead>
<tr> <tr>
<th>Region</th> <th>Region</th>
@ -270,8 +289,8 @@ class Stats extends React.Component {
<tbody>{protocolRows}</tbody> <tbody>{protocolRows}</tbody>
</table> </table>
</div> </div>
) );
} }
} }
export default Stats export default Stats;

View File

@ -1,34 +1,34 @@
import React from "react" import React from "react";
import PropTypes from "prop-types" import PropTypes from "prop-types";
import addEvent from "./utils/addEvent" import addEvent from "./utils/addEvent";
import removeEvent from "./utils/removeEvent" import removeEvent from "./utils/removeEvent";
const ResizeEvent = "resize" const ResizeEvent = "resize";
class WindowSize extends React.Component { class WindowSize extends React.Component {
static propTypes = { static propTypes = {
onChange: PropTypes.func onChange: PropTypes.func
} };
handleWindowResize = () => { handleWindowResize = () => {
if (this.props.onChange) if (this.props.onChange)
this.props.onChange({ this.props.onChange({
width: window.innerWidth, width: window.innerWidth,
height: window.innerHeight height: window.innerHeight
}) });
} };
componentDidMount() { componentDidMount() {
addEvent(window, ResizeEvent, this.handleWindowResize) addEvent(window, ResizeEvent, this.handleWindowResize);
} }
componentWillUnmount() { componentWillUnmount() {
removeEvent(window, ResizeEvent, this.handleWindowResize) removeEvent(window, ResizeEvent, this.handleWindowResize);
} }
render() { render() {
return null return null;
} }
} }
export default WindowSize export default WindowSize;

View File

@ -1,12 +1,7 @@
body { body {
font-size: 16px; font-size: 16px;
font-family: -apple-system, font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica,
BlinkMacSystemFont, Arial, sans-serif;
"Segoe UI",
Roboto,
Helvetica,
Arial,
sans-serif;
line-height: 1.5; line-height: 1.5;
padding: 5px 20px; padding: 5px 20px;
} }
@ -50,7 +45,8 @@ th {
text-align: left; text-align: left;
background-color: #eee; background-color: #eee;
} }
th, td { th,
td {
padding: 5px; padding: 5px;
} }
th { th {

View File

@ -1,6 +1,6 @@
import React from "react" import React from "react";
import ReactDOM from "react-dom" import ReactDOM from "react-dom";
import App from "./App" import App from "./App";
import "./main.css" import "./main.css";
ReactDOM.render(<App />, document.getElementById("app")) ReactDOM.render(<App />, document.getElementById("app"));

View File

@ -1,9 +1,9 @@
const addEvent = (node, type, handler) => { const addEvent = (node, type, handler) => {
if (node.addEventListener) { if (node.addEventListener) {
node.addEventListener(type, handler, false) node.addEventListener(type, handler, false);
} else if (node.attachEvent) { } else if (node.attachEvent) {
node.attachEvent("on" + type, handler) node.attachEvent("on" + type, handler);
} }
} };
export default addEvent export default addEvent;

View File

@ -1,10 +1,10 @@
const formatNumber = n => { const formatNumber = n => {
const digits = String(n).split("") const digits = String(n).split("");
const groups = [] const groups = [];
while (digits.length) groups.unshift(digits.splice(-3).join("")) while (digits.length) groups.unshift(digits.splice(-3).join(""));
return groups.join(",") return groups.join(",");
} };
export default formatNumber export default formatNumber;

View File

@ -1,3 +1,4 @@
const formatPercent = (n, fixed = 1) => String((n.toPrecision(2) * 100).toFixed(fixed)) const formatPercent = (n, fixed = 1) =>
String((n.toPrecision(2) * 100).toFixed(fixed));
export default formatPercent export default formatPercent;

View File

@ -1,3 +1,3 @@
const parseNumber = s => parseInt(s.replace(/,/g, ""), 10) || 0 const parseNumber = s => parseInt(s.replace(/,/g, ""), 10) || 0;
export default parseNumber export default parseNumber;

View File

@ -1,9 +1,9 @@
const removeEvent = (node, type, handler) => { const removeEvent = (node, type, handler) => {
if (node.removeEventListener) { if (node.removeEventListener) {
node.removeEventListener(type, handler, false) node.removeEventListener(type, handler, false);
} else if (node.detachEvent) { } else if (node.detachEvent) {
node.detachEvent("on" + type, handler) node.detachEvent("on" + type, handler);
} }
} };
export default removeEvent export default removeEvent;

View File

@ -4,8 +4,8 @@ Some API methods require an authentication token. This token is a [JSON web toke
Once you obtain an API token (see below) you can pass it to the server in one of two ways: Once you obtain an API token (see below) you can pass it to the server in one of two ways:
- For GET/HEAD requests, use the `?token` query parameter * For GET/HEAD requests, use the `?token` query parameter
- For all other requests, use the `{token}` parameter as part of the JSON in the request body * For all other requests, use the `{token}` parameter as part of the JSON in the request body
### POST /\_auth ### POST /\_auth
@ -40,7 +40,7 @@ Required scope: none
Query parameters: Query parameters:
- `token` - The auth token to verify and decode * `token` - The auth token to verify and decode
Example: Example:
@ -102,8 +102,8 @@ Required scope: `blacklist.add`
Body parameters: Body parameters:
- `token` - The auth token * `token` - The auth token
- `packageName` - The package to add to the blacklist * `packageName` - The package to add to the blacklist
Example: Example:
@ -122,7 +122,7 @@ Required scope: `blacklist.remove`
Body parameters: Body parameters:
- `token` - The auth token * `token` - The auth token
Example: Example:

View File

@ -1,23 +1,23 @@
const AuthAPI = require("../server/AuthAPI") const AuthAPI = require("../server/AuthAPI");
const scopes = { const scopes = {
blacklist: { blacklist: {
read: true read: true
} }
} };
AuthAPI.createToken(scopes).then( AuthAPI.createToken(scopes).then(
token => { token => {
// Verify it, just to be sure. // Verify it, just to be sure.
AuthAPI.verifyToken(token).then(payload => { AuthAPI.verifyToken(token).then(payload => {
console.log(token, "\n") console.log(token, "\n");
console.log(JSON.stringify(payload, null, 2), "\n") console.log(JSON.stringify(payload, null, 2), "\n");
console.log(AuthAPI.getPublicKey()) console.log(AuthAPI.getPublicKey());
process.exit() process.exit();
}) });
}, },
error => { error => {
console.error(error) console.error(error);
process.exit(1) process.exit(1);
} }
) );

View File

@ -1,15 +1,21 @@
require("isomorphic-fetch") require("isomorphic-fetch");
const invariant = require("invariant") const invariant = require("invariant");
const CloudflareEmail = process.env.CLOUDFLARE_EMAIL const CloudflareEmail = process.env.CLOUDFLARE_EMAIL;
const CloudflareKey = process.env.CLOUDFLARE_KEY const CloudflareKey = process.env.CLOUDFLARE_KEY;
const RayID = process.argv[2] const RayID = process.argv[2];
invariant(CloudflareEmail, "Missing the $CLOUDFLARE_EMAIL environment variable") invariant(
CloudflareEmail,
"Missing the $CLOUDFLARE_EMAIL environment variable"
);
invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable") invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable");
invariant(RayID, "Missing the RAY_ID argument; use `heroku run node show-log.js RAY_ID`") invariant(
RayID,
"Missing the RAY_ID argument; use `heroku run node show-log.js RAY_ID`"
);
function getZones(domain) { function getZones(domain) {
return fetch(`https://api.cloudflare.com/client/v4/zones?name=${domain}`, { return fetch(`https://api.cloudflare.com/client/v4/zones?name=${domain}`, {
@ -20,21 +26,24 @@ function getZones(domain) {
} }
}) })
.then(res => res.json()) .then(res => res.json())
.then(data => data.result) .then(data => data.result);
} }
function getLog(zoneId, rayId) { function getLog(zoneId, rayId) {
return fetch(`https://api.cloudflare.com/client/v4/zones/${zoneId}/logs/requests/${rayId}`, { return fetch(
method: "GET", `https://api.cloudflare.com/client/v4/zones/${zoneId}/logs/requests/${rayId}`,
headers: { {
"X-Auth-Email": CloudflareEmail, method: "GET",
"X-Auth-Key": CloudflareKey headers: {
"X-Auth-Email": CloudflareEmail,
"X-Auth-Key": CloudflareKey
}
} }
}).then(res => (res.status === 404 ? "NOT FOUND" : res.json())) ).then(res => (res.status === 404 ? "NOT FOUND" : res.json()));
} }
getZones("unpkg.com").then(zones => { getZones("unpkg.com").then(zones => {
getLog(zones[0].id, RayID).then(entry => { getLog(zones[0].id, RayID).then(entry => {
console.log(entry) console.log(entry);
}) });
}) });

View File

@ -1,48 +1,58 @@
const subDays = require("date-fns/sub_days") const subDays = require("date-fns/sub_days");
const prettyBytes = require("pretty-bytes") const prettyBytes = require("pretty-bytes");
const table = require("text-table") const table = require("text-table");
const StatsAPI = require("../server/StatsAPI") const StatsAPI = require("../server/StatsAPI");
const now = new Date() const now = new Date();
function createRange(start, end) { function createRange(start, end) {
const range = [] const range = [];
while (start < end) range.push(start++) while (start < end) range.push(start++);
return range return range;
} }
function createPastDays(n) { function createPastDays(n) {
return createRange(1, n + 1) return createRange(1, n + 1)
.map(days => subDays(now, days)) .map(days => subDays(now, days))
.reverse() .reverse();
} }
const pastSevenDays = createPastDays(7) const pastSevenDays = createPastDays(7);
const pastThirtyDays = createPastDays(30) const pastThirtyDays = createPastDays(30);
Promise.all([ Promise.all([
StatsAPI.sumKeys(pastSevenDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)), StatsAPI.sumKeys(
StatsAPI.sumKeys(pastSevenDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)), pastSevenDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)
StatsAPI.sumKeys(pastThirtyDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)), ),
StatsAPI.sumKeys(pastThirtyDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)) StatsAPI.sumKeys(
pastSevenDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)
),
StatsAPI.sumKeys(
pastThirtyDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)
),
StatsAPI.sumKeys(
pastThirtyDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)
)
]).then(results => { ]).then(results => {
console.log("\n## Summary") console.log("\n## Summary");
console.log("Requests this week: %s", results[0].toLocaleString()) console.log("Requests this week: %s", results[0].toLocaleString());
console.log("Bandwidth this week: %s", prettyBytes(results[1])) console.log("Bandwidth this week: %s", prettyBytes(results[1]));
console.log("Requests this month: %s", results[2].toLocaleString()) console.log("Requests this month: %s", results[2].toLocaleString());
console.log("Bandwidth this month: %s", prettyBytes(results[3])) console.log("Bandwidth this month: %s", prettyBytes(results[3]));
StatsAPI.sumTopScores( StatsAPI.sumTopScores(
pastSevenDays.map(date => `stats-packageRequests-${StatsAPI.createDayKey(date)}`) pastSevenDays.map(
date => `stats-packageRequests-${StatsAPI.createDayKey(date)}`
)
).then(topPackages => { ).then(topPackages => {
console.log("\n## Top Packages This Week") console.log("\n## Top Packages This Week");
topPackages.forEach(result => { topPackages.forEach(result => {
result[1] = result[1].toLocaleString() result[1] = result[1].toLocaleString();
}) });
console.log(table(topPackages)) console.log(table(topPackages));
process.exit() process.exit();
}) });
}) });

View File

@ -1,35 +1,35 @@
const fs = require("fs") const fs = require("fs");
const path = require("path") const path = require("path");
const crypto = require("crypto") const crypto = require("crypto");
const jwt = require("jsonwebtoken") const jwt = require("jsonwebtoken");
const invariant = require("invariant") const invariant = require("invariant");
const forge = require("node-forge") const forge = require("node-forge");
const db = require("./RedisClient") const db = require("./RedisClient");
let keys let keys;
if (process.env.NODE_ENV === "production") { if (process.env.NODE_ENV === "production") {
keys = { keys = {
public: fs.readFileSync(path.resolve(__dirname, "../public.key"), "utf8"), public: fs.readFileSync(path.resolve(__dirname, "../public.key"), "utf8"),
private: process.env.PRIVATE_KEY private: process.env.PRIVATE_KEY
} };
invariant(keys.private, "Missing $PRIVATE_KEY environment variable") invariant(keys.private, "Missing $PRIVATE_KEY environment variable");
} else { } else {
// Generate a random keypair for dev/testing. // Generate a random keypair for dev/testing.
// See https://gist.github.com/sebadoom/2b70969e70db5da9a203bebd9cff099f // See https://gist.github.com/sebadoom/2b70969e70db5da9a203bebd9cff099f
const keypair = forge.rsa.generateKeyPair({ bits: 2048 }) const keypair = forge.rsa.generateKeyPair({ bits: 2048 });
keys = { keys = {
public: forge.pki.publicKeyToPem(keypair.publicKey, 72), public: forge.pki.publicKeyToPem(keypair.publicKey, 72),
private: forge.pki.privateKeyToPem(keypair.privateKey, 72) private: forge.pki.privateKeyToPem(keypair.privateKey, 72)
} };
} }
function getCurrentSeconds() { function getCurrentSeconds() {
return Math.floor(Date.now() / 1000) return Math.floor(Date.now() / 1000);
} }
function createTokenId() { function createTokenId() {
return crypto.randomBytes(16).toString("hex") return crypto.randomBytes(16).toString("hex");
} }
function createToken(scopes = {}) { function createToken(scopes = {}) {
@ -39,42 +39,42 @@ function createToken(scopes = {}) {
iss: "https://unpkg.com", iss: "https://unpkg.com",
iat: getCurrentSeconds(), iat: getCurrentSeconds(),
scopes scopes
} };
jwt.sign(payload, keys.private, { algorithm: "RS256" }, (error, token) => { jwt.sign(payload, keys.private, { algorithm: "RS256" }, (error, token) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(token) resolve(token);
} }
}) });
}) });
} }
const RevokedTokensSet = "revoked-tokens" const RevokedTokensSet = "revoked-tokens";
function verifyToken(token) { function verifyToken(token) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const options = { algorithms: ["RS256"] } const options = { algorithms: ["RS256"] };
jwt.verify(token, keys.public, options, (error, payload) => { jwt.verify(token, keys.public, options, (error, payload) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
if (payload.jti) { if (payload.jti) {
db.sismember(RevokedTokensSet, payload.jti, (error, value) => { db.sismember(RevokedTokensSet, payload.jti, (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(value === 0 ? payload : null) resolve(value === 0 ? payload : null);
} }
}) });
} else { } else {
resolve(null) resolve(null);
} }
} }
}) });
}) });
} }
function revokeToken(token) { function revokeToken(token) {
@ -83,30 +83,30 @@ function revokeToken(token) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.sadd(RevokedTokensSet, payload.jti, error => { db.sadd(RevokedTokensSet, payload.jti, error => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve() resolve();
} }
}) });
}) });
} }
}) });
} }
function removeAllRevokedTokens() { function removeAllRevokedTokens() {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.del(RevokedTokensSet, error => { db.del(RevokedTokensSet, error => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve() resolve();
} }
}) });
}) });
} }
function getPublicKey() { function getPublicKey() {
return keys.public return keys.public;
} }
module.exports = { module.exports = {
@ -115,4 +115,4 @@ module.exports = {
revokeToken, revokeToken,
removeAllRevokedTokens, removeAllRevokedTokens,
getPublicKey getPublicKey
} };

View File

@ -1,65 +1,65 @@
const db = require("./RedisClient") const db = require("./RedisClient");
const BlacklistSet = "blacklisted-packages" const BlacklistSet = "blacklisted-packages";
function addPackage(packageName) { function addPackage(packageName) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.sadd(BlacklistSet, packageName, (error, value) => { db.sadd(BlacklistSet, packageName, (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(value === 1) resolve(value === 1);
} }
}) });
}) });
} }
function removePackage(packageName) { function removePackage(packageName) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.srem(BlacklistSet, packageName, (error, value) => { db.srem(BlacklistSet, packageName, (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(value === 1) resolve(value === 1);
} }
}) });
}) });
} }
function removeAllPackages() { function removeAllPackages() {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.del(BlacklistSet, error => { db.del(BlacklistSet, error => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve() resolve();
} }
}) });
}) });
} }
function getPackages() { function getPackages() {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.smembers(BlacklistSet, (error, value) => { db.smembers(BlacklistSet, (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(value) resolve(value);
} }
}) });
}) });
} }
function includesPackage(packageName) { function includesPackage(packageName) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.sismember(BlacklistSet, packageName, (error, value) => { db.sismember(BlacklistSet, packageName, (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(value === 1) resolve(value === 1);
} }
}) });
}) });
} }
module.exports = { module.exports = {
@ -68,4 +68,4 @@ module.exports = {
removeAllPackages, removeAllPackages,
getPackages, getPackages,
includesPackage includesPackage
} };

View File

@ -1,15 +1,18 @@
require("isomorphic-fetch") require("isomorphic-fetch");
const invariant = require("invariant") const invariant = require("invariant");
const gunzip = require("gunzip-maybe") const gunzip = require("gunzip-maybe");
const ndjson = require("ndjson") const ndjson = require("ndjson");
const CloudflareAPIURL = "https://api.cloudflare.com" const CloudflareAPIURL = "https://api.cloudflare.com";
const CloudflareEmail = process.env.CLOUDFLARE_EMAIL const CloudflareEmail = process.env.CLOUDFLARE_EMAIL;
const CloudflareKey = process.env.CLOUDFLARE_KEY const CloudflareKey = process.env.CLOUDFLARE_KEY;
invariant(CloudflareEmail, "Missing the $CLOUDFLARE_EMAIL environment variable") invariant(
CloudflareEmail,
"Missing the $CLOUDFLARE_EMAIL environment variable"
);
invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable") invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable");
function get(path, headers) { function get(path, headers) {
return fetch(`${CloudflareAPIURL}/client/v4${path}`, { return fetch(`${CloudflareAPIURL}/client/v4${path}`, {
@ -17,49 +20,49 @@ function get(path, headers) {
"X-Auth-Email": CloudflareEmail, "X-Auth-Email": CloudflareEmail,
"X-Auth-Key": CloudflareKey "X-Auth-Key": CloudflareKey
}) })
}) });
} }
function getJSON(path, headers) { function getJSON(path, headers) {
return get(path, headers) return get(path, headers)
.then(res => { .then(res => {
return res.json() return res.json();
}) })
.then(data => { .then(data => {
if (!data.success) { if (!data.success) {
console.error(`CloudflareAPI.getJSON failed at ${path}`) console.error(`CloudflareAPI.getJSON failed at ${path}`);
console.error(data) console.error(data);
throw new Error("Failed to getJSON from Cloudflare") throw new Error("Failed to getJSON from Cloudflare");
} }
return data.result return data.result;
}) });
} }
function getZones(domains) { function getZones(domains) {
return Promise.all( return Promise.all(
(Array.isArray(domains) ? domains : [domains]).map(domain => { (Array.isArray(domains) ? domains : [domains]).map(domain => {
return getJSON(`/zones?name=${domain}`) return getJSON(`/zones?name=${domain}`);
}) })
).then(results => { ).then(results => {
return results.reduce((memo, zones) => { return results.reduce((memo, zones) => {
return memo.concat(zones) return memo.concat(zones);
}) });
}) });
} }
function reduceResults(target, values) { function reduceResults(target, values) {
Object.keys(values).forEach(key => { Object.keys(values).forEach(key => {
const value = values[key] const value = values[key];
if (typeof value === "object" && value) { if (typeof value === "object" && value) {
target[key] = reduceResults(target[key] || {}, value) target[key] = reduceResults(target[key] || {}, value);
} else if (typeof value === "number") { } else if (typeof value === "number") {
target[key] = (target[key] || 0) + values[key] target[key] = (target[key] || 0) + values[key];
} }
}) });
return target return target;
} }
function getZoneAnalyticsDashboard(zones, since, until) { function getZoneAnalyticsDashboard(zones, since, until) {
@ -69,29 +72,31 @@ function getZoneAnalyticsDashboard(zones, since, until) {
`/zones/${ `/zones/${
zone.id zone.id
}/analytics/dashboard?since=${since.toISOString()}&until=${until.toISOString()}` }/analytics/dashboard?since=${since.toISOString()}&until=${until.toISOString()}`
) );
}) })
).then(results => { ).then(results => {
return results.reduce(reduceResults) return results.reduce(reduceResults);
}) });
} }
function getJSONStream(path, headers) { function getJSONStream(path, headers) {
const acceptGzipHeaders = Object.assign({}, headers, { const acceptGzipHeaders = Object.assign({}, headers, {
"Accept-Encoding": "gzip" "Accept-Encoding": "gzip"
}) });
return get(path, acceptGzipHeaders) return get(path, acceptGzipHeaders)
.then(res => { .then(res => {
return res.body.pipe(gunzip()) return res.body.pipe(gunzip());
}) })
.then(stream => { .then(stream => {
return stream.pipe(ndjson.parse()) return stream.pipe(ndjson.parse());
}) });
} }
function getLogs(zoneId, startTime, endTime) { function getLogs(zoneId, startTime, endTime) {
return getJSONStream(`/zones/${zoneId}/logs/requests?start=${startTime}&end=${endTime}`) return getJSONStream(
`/zones/${zoneId}/logs/requests?start=${startTime}&end=${endTime}`
);
} }
module.exports = { module.exports = {
@ -101,4 +106,4 @@ module.exports = {
getZoneAnalyticsDashboard, getZoneAnalyticsDashboard,
getJSONStream, getJSONStream,
getLogs getLogs
} };

View File

@ -1,9 +1,12 @@
const redis = require("redis") const redis = require("redis");
redis.debug_mode = process.env.DEBUG_REDIS != null redis.debug_mode = process.env.DEBUG_REDIS != null;
const RedisURL = process.env.OPENREDIS_URL || process.env.REDIS_URL || "redis://localhost:6379" const RedisURL =
process.env.OPENREDIS_URL ||
process.env.REDIS_URL ||
"redis://localhost:6379";
const client = redis.createClient(RedisURL) const client = redis.createClient(RedisURL);
module.exports = client module.exports = client;

View File

@ -1,108 +1,114 @@
const db = require("./RedisClient") const db = require("./RedisClient");
const CloudflareAPI = require("./CloudflareAPI") const CloudflareAPI = require("./CloudflareAPI");
const BlacklistAPI = require("./BlacklistAPI") const BlacklistAPI = require("./BlacklistAPI");
function prunePackages(packagesMap) { function prunePackages(packagesMap) {
return Promise.all( return Promise.all(
Object.keys(packagesMap).map(packageName => Object.keys(packagesMap).map(packageName =>
BlacklistAPI.includesPackage(packageName).then(blacklisted => { BlacklistAPI.includesPackage(packageName).then(blacklisted => {
if (blacklisted) { if (blacklisted) {
delete packagesMap[packageName] delete packagesMap[packageName];
} }
}) })
) )
).then(() => packagesMap) ).then(() => packagesMap);
} }
function createDayKey(date) { function createDayKey(date) {
return `${date.getUTCFullYear()}-${date.getUTCMonth()}-${date.getUTCDate()}` return `${date.getUTCFullYear()}-${date.getUTCMonth()}-${date.getUTCDate()}`;
} }
function createHourKey(date) { function createHourKey(date) {
return `${createDayKey(date)}-${date.getUTCHours()}` return `${createDayKey(date)}-${date.getUTCHours()}`;
} }
function createMinuteKey(date) { function createMinuteKey(date) {
return `${createHourKey(date)}-${date.getUTCMinutes()}` return `${createHourKey(date)}-${date.getUTCMinutes()}`;
} }
function createScoresMap(array) { function createScoresMap(array) {
const map = {} const map = {};
for (let i = 0; i < array.length; i += 2) { for (let i = 0; i < array.length; i += 2) {
map[array[i]] = parseInt(array[i + 1], 10) map[array[i]] = parseInt(array[i + 1], 10);
} }
return map return map;
} }
function getScoresMap(key, n = 100) { function getScoresMap(key, n = 100) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.zrevrange(key, 0, n, "withscores", (error, value) => { db.zrevrange(key, 0, n, "withscores", (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(createScoresMap(value)) resolve(createScoresMap(value));
} }
}) });
}) });
} }
function getPackageRequests(date, n = 100) { function getPackageRequests(date, n = 100) {
return getScoresMap(`stats-packageRequests-${createDayKey(date)}`, n).then(prunePackages) return getScoresMap(`stats-packageRequests-${createDayKey(date)}`, n).then(
prunePackages
);
} }
function getPackageBandwidth(date, n = 100) { function getPackageBandwidth(date, n = 100) {
return getScoresMap(`stats-packageBytes-${createDayKey(date)}`, n).then(prunePackages) return getScoresMap(`stats-packageBytes-${createDayKey(date)}`, n).then(
prunePackages
);
} }
function getProtocolRequests(date) { function getProtocolRequests(date) {
return getScoresMap(`stats-protocolRequests-${createDayKey(date)}`) return getScoresMap(`stats-protocolRequests-${createDayKey(date)}`);
} }
function addDailyMetricsToTimeseries(timeseries) { function addDailyMetricsToTimeseries(timeseries) {
const since = new Date(timeseries.since) const since = new Date(timeseries.since);
return Promise.all([ return Promise.all([
getPackageRequests(since), getPackageRequests(since),
getPackageBandwidth(since), getPackageBandwidth(since),
getProtocolRequests(since) getProtocolRequests(since)
]).then(results => { ]).then(results => {
timeseries.requests.package = results[0] timeseries.requests.package = results[0];
timeseries.bandwidth.package = results[1] timeseries.bandwidth.package = results[1];
timeseries.requests.protocol = results[2] timeseries.requests.protocol = results[2];
return timeseries return timeseries;
}) });
} }
function sumMaps(maps) { function sumMaps(maps) {
return maps.reduce((memo, map) => { return maps.reduce((memo, map) => {
Object.keys(map).forEach(key => { Object.keys(map).forEach(key => {
memo[key] = (memo[key] || 0) + map[key] memo[key] = (memo[key] || 0) + map[key];
}) });
return memo return memo;
}, {}) }, {});
} }
function addDailyMetrics(result) { function addDailyMetrics(result) {
return Promise.all(result.timeseries.map(addDailyMetricsToTimeseries)).then(() => { return Promise.all(result.timeseries.map(addDailyMetricsToTimeseries)).then(
result.totals.requests.package = sumMaps( () => {
result.timeseries.map(timeseries => { result.totals.requests.package = sumMaps(
return timeseries.requests.package result.timeseries.map(timeseries => {
}) return timeseries.requests.package;
) })
);
result.totals.bandwidth.package = sumMaps( result.totals.bandwidth.package = sumMaps(
result.timeseries.map(timeseries => timeseries.bandwidth.package) result.timeseries.map(timeseries => timeseries.bandwidth.package)
) );
result.totals.requests.protocol = sumMaps( result.totals.requests.protocol = sumMaps(
result.timeseries.map(timeseries => timeseries.requests.protocol) result.timeseries.map(timeseries => timeseries.requests.protocol)
) );
return result return result;
}) }
);
} }
function extractPublicInfo(data) { function extractPublicInfo(data) {
@ -131,29 +137,31 @@ function extractPublicInfo(data) {
uniques: { uniques: {
all: data.uniques.all all: data.uniques.all
} }
} };
} }
const DomainNames = ["unpkg.com", "npmcdn.com"] const DomainNames = ["unpkg.com", "npmcdn.com"];
function fetchStats(since, until) { function fetchStats(since, until) {
return CloudflareAPI.getZones(DomainNames).then(zones => { return CloudflareAPI.getZones(DomainNames).then(zones => {
return CloudflareAPI.getZoneAnalyticsDashboard(zones, since, until).then(dashboard => { return CloudflareAPI.getZoneAnalyticsDashboard(zones, since, until).then(
return { dashboard => {
timeseries: dashboard.timeseries.map(extractPublicInfo), return {
totals: extractPublicInfo(dashboard.totals) timeseries: dashboard.timeseries.map(extractPublicInfo),
totals: extractPublicInfo(dashboard.totals)
};
} }
}) );
}) });
} }
const oneMinute = 1000 * 60 const oneMinute = 1000 * 60;
const oneHour = oneMinute * 60 const oneHour = oneMinute * 60;
const oneDay = oneHour * 24 const oneDay = oneHour * 24;
function getStats(since, until) { function getStats(since, until) {
const promise = fetchStats(since, until) const promise = fetchStats(since, until);
return until - since > oneDay ? promise.then(addDailyMetrics) : promise return until - since > oneDay ? promise.then(addDailyMetrics) : promise;
} }
module.exports = { module.exports = {
@ -161,4 +169,4 @@ module.exports = {
createHourKey, createHourKey,
createMinuteKey, createMinuteKey,
getStats getStats
} };

View File

@ -1,9 +1,9 @@
const AuthAPI = require("../AuthAPI") const AuthAPI = require("../AuthAPI");
describe("Auth API", () => { describe("Auth API", () => {
beforeEach(done => { beforeEach(done => {
AuthAPI.removeAllRevokedTokens().then(() => done(), done) AuthAPI.removeAllRevokedTokens().then(() => done(), done);
}) });
it("creates tokens with the right scopes", done => { it("creates tokens with the right scopes", done => {
const scopes = { const scopes = {
@ -11,29 +11,29 @@ describe("Auth API", () => {
add: true, add: true,
remove: true remove: true
} }
} };
AuthAPI.createToken(scopes).then(token => { AuthAPI.createToken(scopes).then(token => {
AuthAPI.verifyToken(token).then(payload => { AuthAPI.verifyToken(token).then(payload => {
expect(payload.jti).toEqual(expect.any(String)) expect(payload.jti).toEqual(expect.any(String));
expect(payload.iss).toEqual(expect.any(String)) expect(payload.iss).toEqual(expect.any(String));
expect(payload.iat).toEqual(expect.any(Number)) expect(payload.iat).toEqual(expect.any(Number));
expect(payload.scopes).toMatchObject(scopes) expect(payload.scopes).toMatchObject(scopes);
done() done();
}) });
}) });
}) });
it("refuses to verify revoked tokens", done => { it("refuses to verify revoked tokens", done => {
const scopes = {} const scopes = {};
AuthAPI.createToken(scopes).then(token => { AuthAPI.createToken(scopes).then(token => {
AuthAPI.revokeToken(token).then(() => { AuthAPI.revokeToken(token).then(() => {
AuthAPI.verifyToken(token).then(payload => { AuthAPI.verifyToken(token).then(payload => {
expect(payload).toBe(null) expect(payload).toBe(null);
done() done();
}) });
}) });
}) });
}) });
}) });

View File

@ -1,24 +1,24 @@
const BlacklistAPI = require("../BlacklistAPI") const BlacklistAPI = require("../BlacklistAPI");
describe("Blacklist API", () => { describe("Blacklist API", () => {
beforeEach(done => { beforeEach(done => {
BlacklistAPI.removeAllPackages().then(() => done(), done) BlacklistAPI.removeAllPackages().then(() => done(), done);
}) });
it("adds and removes packages to/from the blacklist", done => { it("adds and removes packages to/from the blacklist", done => {
const packageName = "bad-package" const packageName = "bad-package";
BlacklistAPI.addPackage(packageName).then(() => { BlacklistAPI.addPackage(packageName).then(() => {
BlacklistAPI.getPackages().then(packageNames => { BlacklistAPI.getPackages().then(packageNames => {
expect(packageNames).toEqual([packageName]) expect(packageNames).toEqual([packageName]);
BlacklistAPI.removePackage(packageName).then(() => { BlacklistAPI.removePackage(packageName).then(() => {
BlacklistAPI.getPackages().then(packageNames => { BlacklistAPI.getPackages().then(packageNames => {
expect(packageNames).toEqual([]) expect(packageNames).toEqual([]);
done() done();
}) });
}) });
}) });
}) });
}) });
}) });

View File

@ -1,66 +1,66 @@
const request = require("supertest") const request = require("supertest");
const createServer = require("../createServer") const createServer = require("../createServer");
const clearBlacklist = require("./utils/clearBlacklist") const clearBlacklist = require("./utils/clearBlacklist");
const withBlacklist = require("./utils/withBlacklist") const withBlacklist = require("./utils/withBlacklist");
const withRevokedToken = require("./utils/withRevokedToken") const withRevokedToken = require("./utils/withRevokedToken");
const withToken = require("./utils/withToken") const withToken = require("./utils/withToken");
describe("The server", () => { describe("The server", () => {
let server let server;
beforeEach(() => { beforeEach(() => {
server = createServer() server = createServer();
}) });
it("rejects invalid package names", done => { it("rejects invalid package names", done => {
request(server) request(server)
.get("/_invalid/index.js") .get("/_invalid/index.js")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(403) expect(res.statusCode).toBe(403);
done() done();
}) });
}) });
it("redirects invalid query params", done => { it("redirects invalid query params", done => {
request(server) request(server)
.get("/react?main=index&invalid") .get("/react?main=index&invalid")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(302) expect(res.statusCode).toBe(302);
expect(res.headers.location).toBe("/react?main=index") expect(res.headers.location).toBe("/react?main=index");
done() done();
}) });
}) });
it("redirects /_meta to ?meta", done => { it("redirects /_meta to ?meta", done => {
request(server) request(server)
.get("/_meta/react?main=index") .get("/_meta/react?main=index")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(302) expect(res.statusCode).toBe(302);
expect(res.headers.location).toBe("/react?main=index&meta") expect(res.headers.location).toBe("/react?main=index&meta");
done() done();
}) });
}) });
it("does not serve blacklisted packages", done => { it("does not serve blacklisted packages", done => {
withBlacklist(["bad-package"], () => { withBlacklist(["bad-package"], () => {
request(server) request(server)
.get("/bad-package/index.js") .get("/bad-package/index.js")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(403) expect(res.statusCode).toBe(403);
done() done();
}) });
}) });
}) });
describe("POST /_auth", () => { describe("POST /_auth", () => {
it("creates a new auth token", done => { it("creates a new auth token", done => {
request(server) request(server)
.post("/_auth") .post("/_auth")
.end((err, res) => { .end((err, res) => {
expect(res.body).toHaveProperty("token") expect(res.body).toHaveProperty("token");
done() done();
}) });
}) });
}) });
describe("GET /_auth", () => { describe("GET /_auth", () => {
describe("with no auth", () => { describe("with no auth", () => {
@ -68,12 +68,12 @@ describe("The server", () => {
request(server) request(server)
.get("/_auth") .get("/_auth")
.end((err, res) => { .end((err, res) => {
expect(res.body).toHaveProperty("auth") expect(res.body).toHaveProperty("auth");
expect(res.body.auth).toBe(null) expect(res.body.auth).toBe(null);
done() done();
}) });
}) });
}) });
describe("with a revoked auth token", () => { describe("with a revoked auth token", () => {
it("echoes back null", done => { it("echoes back null", done => {
@ -81,13 +81,13 @@ describe("The server", () => {
request(server) request(server)
.get("/_auth?token=" + token) .get("/_auth?token=" + token)
.end((err, res) => { .end((err, res) => {
expect(res.body).toHaveProperty("auth") expect(res.body).toHaveProperty("auth");
expect(res.body.auth).toBe(null) expect(res.body.auth).toBe(null);
done() done();
}) });
}) });
}) });
}) });
describe("with a valid auth token", () => { describe("with a valid auth token", () => {
it("echoes back the auth payload", done => { it("echoes back the auth payload", done => {
@ -95,39 +95,39 @@ describe("The server", () => {
request(server) request(server)
.get("/_auth?token=" + token) .get("/_auth?token=" + token)
.end((err, res) => { .end((err, res) => {
expect(res.body).toHaveProperty("auth") expect(res.body).toHaveProperty("auth");
expect(typeof res.body.auth).toBe("object") expect(typeof res.body.auth).toBe("object");
done() done();
}) });
}) });
}) });
}) });
}) });
describe("GET /_publicKey", () => { describe("GET /_publicKey", () => {
it("echoes the public key", done => { it("echoes the public key", done => {
request(server) request(server)
.get("/_publicKey") .get("/_publicKey")
.end((err, res) => { .end((err, res) => {
expect(res.text).toMatch(/PUBLIC KEY/) expect(res.text).toMatch(/PUBLIC KEY/);
done() done();
}) });
}) });
}) });
describe("POST /_blacklist", () => { describe("POST /_blacklist", () => {
afterEach(clearBlacklist) afterEach(clearBlacklist);
describe("with no auth", () => { describe("with no auth", () => {
it("is forbidden", done => { it("is forbidden", done => {
request(server) request(server)
.post("/_blacklist") .post("/_blacklist")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(403) expect(res.statusCode).toBe(403);
done() done();
}) });
}) });
}) });
describe('with the "blacklist.add" scope', () => { describe('with the "blacklist.add" scope', () => {
it("can add to the blacklist", done => { it("can add to the blacklist", done => {
@ -136,15 +136,17 @@ describe("The server", () => {
.post("/_blacklist") .post("/_blacklist")
.send({ token, packageName: "bad-package" }) .send({ token, packageName: "bad-package" })
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(200) expect(res.statusCode).toBe(200);
expect(res.headers["content-location"]).toEqual("/_blacklist/bad-package") expect(res.headers["content-location"]).toEqual(
expect(res.body.ok).toBe(true) "/_blacklist/bad-package"
done() );
}) expect(res.body.ok).toBe(true);
}) done();
}) });
}) });
}) });
});
});
describe("GET /_blacklist", () => { describe("GET /_blacklist", () => {
describe("with no auth", () => { describe("with no auth", () => {
@ -152,11 +154,11 @@ describe("The server", () => {
request(server) request(server)
.get("/_blacklist") .get("/_blacklist")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(403) expect(res.statusCode).toBe(403);
done() done();
}) });
}) });
}) });
describe('with the "blacklist.read" scope', () => { describe('with the "blacklist.read" scope', () => {
it("can read the blacklist", done => { it("can read the blacklist", done => {
@ -164,13 +166,13 @@ describe("The server", () => {
request(server) request(server)
.get("/_blacklist?token=" + token) .get("/_blacklist?token=" + token)
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(200) expect(res.statusCode).toBe(200);
done() done();
}) });
}) });
}) });
}) });
}) });
describe("DELETE /_blacklist/:packageName", () => { describe("DELETE /_blacklist/:packageName", () => {
describe("with no auth", () => { describe("with no auth", () => {
@ -178,11 +180,11 @@ describe("The server", () => {
request(server) request(server)
.delete("/_blacklist/bad-package") .delete("/_blacklist/bad-package")
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(403) expect(res.statusCode).toBe(403);
done() done();
}) });
}) });
}) });
describe('with the "blacklist.remove" scope', () => { describe('with the "blacklist.remove" scope', () => {
it("can remove a package from the blacklist", done => { it("can remove a package from the blacklist", done => {
@ -191,12 +193,12 @@ describe("The server", () => {
.delete("/_blacklist/bad-package") .delete("/_blacklist/bad-package")
.send({ token }) .send({ token })
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(200) expect(res.statusCode).toBe(200);
expect(res.body.ok).toBe(true) expect(res.body.ok).toBe(true);
done() done();
}) });
}) });
}) });
it("can remove a scoped package from the blacklist", done => { it("can remove a scoped package from the blacklist", done => {
withToken({ blacklist: { remove: true } }, token => { withToken({ blacklist: { remove: true } }, token => {
@ -204,12 +206,12 @@ describe("The server", () => {
.delete("/_blacklist/@scope/bad-package") .delete("/_blacklist/@scope/bad-package")
.send({ token }) .send({ token })
.end((err, res) => { .end((err, res) => {
expect(res.statusCode).toBe(200) expect(res.statusCode).toBe(200);
expect(res.body.ok).toBe(true) expect(res.body.ok).toBe(true);
done() done();
}) });
}) });
}) });
}) });
}) });
}) });

View File

@ -1,7 +1,7 @@
const BlacklistAPI = require("../../BlacklistAPI") const BlacklistAPI = require("../../BlacklistAPI");
function clearBlacklist(done) { function clearBlacklist(done) {
BlacklistAPI.removeAllPackages().then(done, done) BlacklistAPI.removeAllPackages().then(done, done);
} }
module.exports = clearBlacklist module.exports = clearBlacklist;

View File

@ -1,7 +1,7 @@
const BlacklistAPI = require("../../BlacklistAPI") const BlacklistAPI = require("../../BlacklistAPI");
function withBlacklist(blacklist, callback) { function withBlacklist(blacklist, callback) {
return Promise.all(blacklist.map(BlacklistAPI.addPackage)).then(callback) return Promise.all(blacklist.map(BlacklistAPI.addPackage)).then(callback);
} }
module.exports = withBlacklist module.exports = withBlacklist;

View File

@ -1,12 +1,12 @@
const withToken = require("./withToken") const withToken = require("./withToken");
const AuthAPI = require("../../AuthAPI") const AuthAPI = require("../../AuthAPI");
function withRevokedToken(scopes, callback) { function withRevokedToken(scopes, callback) {
withToken(scopes, token => { withToken(scopes, token => {
AuthAPI.revokeToken(token).then(() => { AuthAPI.revokeToken(token).then(() => {
callback(token) callback(token);
}) });
}) });
} }
module.exports = withRevokedToken module.exports = withRevokedToken;

View File

@ -1,7 +1,7 @@
const AuthAPI = require("../../AuthAPI") const AuthAPI = require("../../AuthAPI");
function withToken(scopes, callback) { function withToken(scopes, callback) {
AuthAPI.createToken(scopes).then(callback) AuthAPI.createToken(scopes).then(callback);
} }
module.exports = withToken module.exports = withToken;

View File

@ -1,42 +1,48 @@
const validateNpmPackageName = require("validate-npm-package-name") const validateNpmPackageName = require("validate-npm-package-name");
const BlacklistAPI = require("../BlacklistAPI") const BlacklistAPI = require("../BlacklistAPI");
function addToBlacklist(req, res) { function addToBlacklist(req, res) {
const packageName = req.body.packageName const packageName = req.body.packageName;
if (!packageName) { if (!packageName) {
return res.status(403).send({ error: 'Missing "packageName" body parameter' }) return res
.status(403)
.send({ error: 'Missing "packageName" body parameter' });
} }
const nameErrors = validateNpmPackageName(packageName).errors const nameErrors = validateNpmPackageName(packageName).errors;
// Disallow invalid package names. // Disallow invalid package names.
if (nameErrors) { if (nameErrors) {
const reason = nameErrors.join(", ") const reason = nameErrors.join(", ");
return res.status(403).send({ return res.status(403).send({
error: `Invalid package name "${packageName}" (${reason})` error: `Invalid package name "${packageName}" (${reason})`
}) });
} }
BlacklistAPI.addPackage(packageName).then( BlacklistAPI.addPackage(packageName).then(
added => { added => {
if (added) { if (added) {
const userId = req.user.jti const userId = req.user.jti;
console.log(`Package "${packageName}" was added to the blacklist by ${userId}`) console.log(
`Package "${packageName}" was added to the blacklist by ${userId}`
);
} }
res.set({ "Content-Location": `/_blacklist/${packageName}` }).send({ res.set({ "Content-Location": `/_blacklist/${packageName}` }).send({
ok: true, ok: true,
message: `Package "${packageName}" was ${added ? "added to" : "already in"} the blacklist` message: `Package "${packageName}" was ${
}) added ? "added to" : "already in"
} the blacklist`
});
}, },
error => { error => {
console.error(error) console.error(error);
res.status(500).send({ res.status(500).send({
error: `Unable to add "${packageName}" to the blacklist` error: `Unable to add "${packageName}" to the blacklist`
}) });
} }
) );
} }
module.exports = addToBlacklist module.exports = addToBlacklist;

View File

@ -1,24 +1,24 @@
const AuthAPI = require("../AuthAPI") const AuthAPI = require("../AuthAPI");
const defaultScopes = { const defaultScopes = {
blacklist: { blacklist: {
read: true read: true
} }
} };
function createAuth(req, res) { function createAuth(req, res) {
AuthAPI.createToken(defaultScopes).then( AuthAPI.createToken(defaultScopes).then(
token => { token => {
res.send({ token }) res.send({ token });
}, },
error => { error => {
console.error(error) console.error(error);
res.status(500).send({ res.status(500).send({
error: "Unable to generate auth token" error: "Unable to generate auth token"
}) });
} }
) );
} }
module.exports = createAuth module.exports = createAuth;

View File

@ -1,28 +1,32 @@
const BlacklistAPI = require("../BlacklistAPI") const BlacklistAPI = require("../BlacklistAPI");
function removeFromBlacklist(req, res) { function removeFromBlacklist(req, res) {
const packageName = req.packageName const packageName = req.packageName;
BlacklistAPI.removePackage(packageName).then( BlacklistAPI.removePackage(packageName).then(
removed => { removed => {
if (removed) { if (removed) {
const userId = req.user.jti const userId = req.user.jti;
console.log(`Package "${packageName}" was removed from the blacklist by ${userId}`) console.log(
`Package "${packageName}" was removed from the blacklist by ${userId}`
);
} }
res.send({ res.send({
ok: true, ok: true,
message: `Package "${packageName}" was ${removed ? "removed from" : "not in"} the blacklist` message: `Package "${packageName}" was ${
}) removed ? "removed from" : "not in"
} the blacklist`
});
}, },
error => { error => {
console.error(error) console.error(error);
res.status(500).send({ res.status(500).send({
error: `Unable to remove "${packageName}" from the blacklist` error: `Unable to remove "${packageName}" from the blacklist`
}) });
} }
) );
} }
module.exports = removeFromBlacklist module.exports = removeFromBlacklist;

View File

@ -1,5 +1,5 @@
function showAuth(req, res) { function showAuth(req, res) {
res.send({ auth: req.user }) res.send({ auth: req.user });
} }
module.exports = showAuth module.exports = showAuth;

View File

@ -1,17 +1,17 @@
const BlacklistAPI = require("../BlacklistAPI") const BlacklistAPI = require("../BlacklistAPI");
function showBlacklist(req, res) { function showBlacklist(req, res) {
BlacklistAPI.getPackages().then( BlacklistAPI.getPackages().then(
blacklist => { blacklist => {
res.send({ blacklist }) res.send({ blacklist });
}, },
error => { error => {
console.error(error) console.error(error);
res.status(500).send({ res.status(500).send({
error: "Unable to fetch blacklist" error: "Unable to fetch blacklist"
}) });
} }
) );
} }
module.exports = showBlacklist module.exports = showBlacklist;

View File

@ -1,7 +1,7 @@
const AuthAPI = require("../AuthAPI") const AuthAPI = require("../AuthAPI");
function showPublicKey(req, res) { function showPublicKey(req, res) {
res.send({ publicKey: AuthAPI.getPublicKey() }) res.send({ publicKey: AuthAPI.getPublicKey() });
} }
module.exports = showPublicKey module.exports = showPublicKey;

View File

@ -1,42 +1,46 @@
const subDays = require("date-fns/sub_days") const subDays = require("date-fns/sub_days");
const startOfDay = require("date-fns/start_of_day") const startOfDay = require("date-fns/start_of_day");
const startOfSecond = require("date-fns/start_of_second") const startOfSecond = require("date-fns/start_of_second");
const StatsAPI = require("../StatsAPI") const StatsAPI = require("../StatsAPI");
function showStats(req, res) { function showStats(req, res) {
let since, until let since, until;
switch (req.query.period) { switch (req.query.period) {
case "last-day": case "last-day":
until = startOfDay(new Date()) until = startOfDay(new Date());
since = subDays(until, 1) since = subDays(until, 1);
break break;
case "last-week": case "last-week":
until = startOfDay(new Date()) until = startOfDay(new Date());
since = subDays(until, 7) since = subDays(until, 7);
break break;
case "last-month": case "last-month":
until = startOfDay(new Date()) until = startOfDay(new Date());
since = subDays(until, 30) since = subDays(until, 30);
break break;
default: default:
until = req.query.until ? new Date(req.query.until) : startOfSecond(new Date()) until = req.query.until
since = new Date(req.query.since) ? new Date(req.query.until)
: startOfSecond(new Date());
since = new Date(req.query.since);
} }
if (isNaN(since.getTime())) { if (isNaN(since.getTime())) {
return res.status(403).send({ error: "?since is not a valid date" }) return res.status(403).send({ error: "?since is not a valid date" });
} }
if (isNaN(until.getTime())) { if (isNaN(until.getTime())) {
return res.status(403).send({ error: "?until is not a valid date" }) return res.status(403).send({ error: "?until is not a valid date" });
} }
if (until <= since) { if (until <= since) {
return res.status(403).send({ error: "?until date must come after ?since date" }) return res
.status(403)
.send({ error: "?until date must come after ?since date" });
} }
if (until >= new Date()) { if (until >= new Date()) {
return res.status(403).send({ error: "?until must be a date in the past" }) return res.status(403).send({ error: "?until must be a date in the past" });
} }
StatsAPI.getStats(since, until).then( StatsAPI.getStats(since, until).then(
@ -46,13 +50,13 @@ function showStats(req, res) {
"Cache-Control": "public, max-age=60", "Cache-Control": "public, max-age=60",
"Cache-Tag": "stats" "Cache-Tag": "stats"
}) })
.send(stats) .send(stats);
}, },
error => { error => {
console.error(error) console.error(error);
res.status(500).send({ error: "Unable to fetch stats" }) res.status(500).send({ error: "Unable to fetch stats" });
} }
) );
} }
module.exports = showStats module.exports = showStats;

View File

@ -1,11 +1,11 @@
const parseURL = require("url").parse const parseURL = require("url").parse;
const startOfDay = require("date-fns/start_of_day") const startOfDay = require("date-fns/start_of_day");
const addDays = require("date-fns/add_days") const addDays = require("date-fns/add_days");
const parsePackageURL = require("./utils/parsePackageURL") const parsePackageURL = require("./utils/parsePackageURL");
const CloudflareAPI = require("./CloudflareAPI") const CloudflareAPI = require("./CloudflareAPI");
const StatsAPI = require("./StatsAPI") const StatsAPI = require("./StatsAPI");
const db = require("./RedisClient") const db = require("./RedisClient");
/** /**
* Domains we want to analyze. * Domains we want to analyze.
@ -13,98 +13,119 @@ const db = require("./RedisClient")
const DomainNames = [ const DomainNames = [
"unpkg.com" "unpkg.com"
//'npmcdn.com' // We don't have log data on npmcdn.com yet :/ //'npmcdn.com' // We don't have log data on npmcdn.com yet :/
] ];
/** /**
* The window of time to download in a single fetch. * The window of time to download in a single fetch.
*/ */
const LogWindowSeconds = 30 const LogWindowSeconds = 30;
function getSeconds(date) { function getSeconds(date) {
return Math.floor(date.getTime() / 1000) return Math.floor(date.getTime() / 1000);
} }
function stringifySeconds(seconds) { function stringifySeconds(seconds) {
return new Date(seconds * 1000).toISOString() return new Date(seconds * 1000).toISOString();
} }
function toSeconds(millis) { function toSeconds(millis) {
return Math.floor(millis / 1000) return Math.floor(millis / 1000);
} }
const oneSecond = 1000 const oneSecond = 1000;
const oneMinute = oneSecond * 60 const oneMinute = oneSecond * 60;
const oneHour = oneMinute * 60 const oneHour = oneMinute * 60;
function computeCounters(stream) { function computeCounters(stream) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const counters = {} const counters = {};
const expireat = {} const expireat = {};
function incr(key, member, by, expiry) { function incr(key, member, by, expiry) {
counters[key] = counters[key] || {} counters[key] = counters[key] || {};
counters[key][member] = (counters[key][member] || 0) + by counters[key][member] = (counters[key][member] || 0) + by;
expireat[key] = expiry expireat[key] = expiry;
} }
stream stream
.on("error", reject) .on("error", reject)
.on("data", function(entry) { .on("data", function(entry) {
const date = new Date(Math.round(entry.timestamp / 1000000)) const date = new Date(Math.round(entry.timestamp / 1000000));
const nextDay = startOfDay(addDays(date, 1)) const nextDay = startOfDay(addDays(date, 1));
const sevenDaysLater = getSeconds(addDays(nextDay, 7)) const sevenDaysLater = getSeconds(addDays(nextDay, 7));
const thirtyDaysLater = getSeconds(addDays(nextDay, 30)) const thirtyDaysLater = getSeconds(addDays(nextDay, 30));
const dayKey = StatsAPI.createDayKey(date) const dayKey = StatsAPI.createDayKey(date);
const clientRequest = entry.clientRequest const clientRequest = entry.clientRequest;
const edgeResponse = entry.edgeResponse const edgeResponse = entry.edgeResponse;
if (edgeResponse.status === 200) { if (edgeResponse.status === 200) {
// Q: How many requests do we serve for a package per day? // Q: How many requests do we serve for a package per day?
// Q: How many bytes do we serve for a package per day? // Q: How many bytes do we serve for a package per day?
const url = parsePackageURL(parseURL(clientRequest.uri).pathname) const url = parsePackageURL(parseURL(clientRequest.uri).pathname);
const packageName = url && url.packageName const packageName = url && url.packageName;
if (packageName) { if (packageName) {
incr(`stats-packageRequests-${dayKey}`, packageName, 1, thirtyDaysLater) incr(
incr(`stats-packageBytes-${dayKey}`, packageName, edgeResponse.bytes, thirtyDaysLater) `stats-packageRequests-${dayKey}`,
packageName,
1,
thirtyDaysLater
);
incr(
`stats-packageBytes-${dayKey}`,
packageName,
edgeResponse.bytes,
thirtyDaysLater
);
} }
} }
// Q: How many requests per day do we receive via a protocol? // Q: How many requests per day do we receive via a protocol?
const protocol = clientRequest.httpProtocol const protocol = clientRequest.httpProtocol;
if (protocol) incr(`stats-protocolRequests-${dayKey}`, protocol, 1, thirtyDaysLater) if (protocol)
incr(
`stats-protocolRequests-${dayKey}`,
protocol,
1,
thirtyDaysLater
);
// Q: How many requests do we receive from a hostname per day? // Q: How many requests do we receive from a hostname per day?
// Q: How many bytes do we serve to a hostname per day? // Q: How many bytes do we serve to a hostname per day?
const referer = clientRequest.referer const referer = clientRequest.referer;
const hostname = referer && parseURL(referer).hostname const hostname = referer && parseURL(referer).hostname;
if (hostname) { if (hostname) {
incr(`stats-hostnameRequests-${dayKey}`, hostname, 1, sevenDaysLater) incr(`stats-hostnameRequests-${dayKey}`, hostname, 1, sevenDaysLater);
incr(`stats-hostnameBytes-${dayKey}`, hostname, edgeResponse.bytes, sevenDaysLater) incr(
`stats-hostnameBytes-${dayKey}`,
hostname,
edgeResponse.bytes,
sevenDaysLater
);
} }
}) })
.on("end", function() { .on("end", function() {
resolve({ counters, expireat }) resolve({ counters, expireat });
}) });
}) });
} }
function processLogs(stream) { function processLogs(stream) {
return computeCounters(stream).then(({ counters, expireat }) => { return computeCounters(stream).then(({ counters, expireat }) => {
Object.keys(counters).forEach(key => { Object.keys(counters).forEach(key => {
const values = counters[key] const values = counters[key];
Object.keys(values).forEach(member => { Object.keys(values).forEach(member => {
db.zincrby(key, values[member], member) db.zincrby(key, values[member], member);
}) });
if (expireat[key]) db.expireat(key, expireat[key]) if (expireat[key]) db.expireat(key, expireat[key]);
}) });
}) });
} }
function ingestLogs(zone, startSeconds, endSeconds) { function ingestLogs(zone, startSeconds, endSeconds) {
@ -114,62 +135,65 @@ function ingestLogs(zone, startSeconds, endSeconds) {
zone.name, zone.name,
stringifySeconds(startSeconds), stringifySeconds(startSeconds),
stringifySeconds(endSeconds) stringifySeconds(endSeconds)
) );
const startFetchTime = Date.now() const startFetchTime = Date.now();
resolve( resolve(
CloudflareAPI.getLogs(zone.id, startSeconds, endSeconds).then(stream => { CloudflareAPI.getLogs(zone.id, startSeconds, endSeconds).then(stream => {
const endFetchTime = Date.now() const endFetchTime = Date.now();
console.log( console.log(
"info: Fetched %ds worth of logs for %s in %dms", "info: Fetched %ds worth of logs for %s in %dms",
endSeconds - startSeconds, endSeconds - startSeconds,
zone.name, zone.name,
endFetchTime - startFetchTime endFetchTime - startFetchTime
) );
const startProcessTime = Date.now() const startProcessTime = Date.now();
return processLogs(stream).then(() => { return processLogs(stream).then(() => {
const endProcessTime = Date.now() const endProcessTime = Date.now();
console.log( console.log(
"info: Processed %ds worth of logs for %s in %dms", "info: Processed %ds worth of logs for %s in %dms",
endSeconds - startSeconds, endSeconds - startSeconds,
zone.name, zone.name,
endProcessTime - startProcessTime endProcessTime - startProcessTime
) );
}) });
}) })
) );
}) });
} }
function startZone(zone) { function startZone(zone) {
const startSecondsKey = `ingestLogsWorker-nextStartSeconds-${zone.name.replace(".", "-")}` const startSecondsKey = `ingestLogsWorker-nextStartSeconds-${zone.name.replace(
".",
"-"
)}`;
function takeATurn() { function takeATurn() {
db.get(startSecondsKey, function(error, value) { db.get(startSecondsKey, function(error, value) {
let startSeconds = value && parseInt(value, 10) let startSeconds = value && parseInt(value, 10);
const now = Date.now() const now = Date.now();
// Cloudflare keeps logs around for 72 hours. // Cloudflare keeps logs around for 72 hours.
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API // https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
const minSeconds = toSeconds(now - oneHour * 72) const minSeconds = toSeconds(now - oneHour * 72);
if (startSeconds == null) { if (startSeconds == null) {
startSeconds = minSeconds startSeconds = minSeconds;
} else if (startSeconds < minSeconds) { } else if (startSeconds < minSeconds) {
console.warn( console.warn(
"warning: Dropped logs for %s from %s to %s!", "warning: Dropped logs for %s from %s to %s!",
zone.name, zone.name,
stringifySeconds(startSeconds), stringifySeconds(startSeconds),
stringifySeconds(minSeconds) stringifySeconds(minSeconds)
) );
startSeconds = minSeconds startSeconds = minSeconds;
} }
// The log for a request is typically available within thirty (30) minutes // The log for a request is typically available within thirty (30) minutes
@ -180,34 +204,34 @@ function startZone(zone) {
// set of logs. This will help ensure that any congestion in the log // set of logs. This will help ensure that any congestion in the log
// pipeline has passed and a full set of logs can be ingested. // pipeline has passed and a full set of logs can be ingested.
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API // https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
const maxSeconds = toSeconds(now - oneMinute * 30) const maxSeconds = toSeconds(now - oneMinute * 30);
if (startSeconds < maxSeconds) { if (startSeconds < maxSeconds) {
const endSeconds = startSeconds + LogWindowSeconds const endSeconds = startSeconds + LogWindowSeconds;
ingestLogs(zone, startSeconds, endSeconds).then( ingestLogs(zone, startSeconds, endSeconds).then(
function() { function() {
db.set(startSecondsKey, endSeconds) db.set(startSecondsKey, endSeconds);
setTimeout(takeATurn) setTimeout(takeATurn);
}, },
function(error) { function(error) {
console.error(error.stack) console.error(error.stack);
process.exit(1) process.exit(1);
} }
) );
} else { } else {
setTimeout(takeATurn, (startSeconds - maxSeconds) * 1000) setTimeout(takeATurn, (startSeconds - maxSeconds) * 1000);
} }
}) });
} }
takeATurn() takeATurn();
} }
Promise.all(DomainNames.map(CloudflareAPI.getZones)).then(results => { Promise.all(DomainNames.map(CloudflareAPI.getZones)).then(results => {
const zones = results.reduce((memo, zones) => { const zones = results.reduce((memo, zones) => {
return memo.concat(zones) return memo.concat(zones);
}) });
zones.forEach(startZone) zones.forEach(startZone);
}) });

View File

@ -3,37 +3,38 @@
* permissions. Otherwise rejects the request. * permissions. Otherwise rejects the request.
*/ */
function requireAuth(scope) { function requireAuth(scope) {
let checkScopes let checkScopes;
if (scope.includes(".")) { if (scope.includes(".")) {
const parts = scope.split(".") const parts = scope.split(".");
checkScopes = scopes => parts.reduce((memo, part) => memo && memo[part], scopes) != null checkScopes = scopes =>
parts.reduce((memo, part) => memo && memo[part], scopes) != null;
} else { } else {
checkScopes = scopes => scopes[scope] != null checkScopes = scopes => scopes[scope] != null;
} }
return function(req, res, next) { return function(req, res, next) {
if (req.auth && req.auth.includes(scope)) { if (req.auth && req.auth.includes(scope)) {
return next() // Already auth'd return next(); // Already auth'd
} }
const user = req.user const user = req.user;
if (!user) { if (!user) {
return res.status(403).send({ error: "Missing auth token" }) return res.status(403).send({ error: "Missing auth token" });
} }
if (!user.scopes || !checkScopes(user.scopes)) { if (!user.scopes || !checkScopes(user.scopes)) {
return res.status(403).send({ error: "Insufficient scopes" }) return res.status(403).send({ error: "Insufficient scopes" });
} }
if (req.auth) { if (req.auth) {
req.auth.push(scope) req.auth.push(scope);
} else { } else {
req.auth = [scope] req.auth = [scope];
} }
next() next();
} };
} }
module.exports = requireAuth module.exports = requireAuth;

View File

@ -1,41 +1,41 @@
const AuthAPI = require("../AuthAPI") const AuthAPI = require("../AuthAPI");
const ReadMethods = { GET: true, HEAD: true } const ReadMethods = { GET: true, HEAD: true };
/** /**
* Sets req.user from the payload in the auth token in the request. * Sets req.user from the payload in the auth token in the request.
*/ */
function userToken(req, res, next) { function userToken(req, res, next) {
if (req.user) { if (req.user) {
return next() return next();
} }
const token = (ReadMethods[req.method] ? req.query : req.body).token const token = (ReadMethods[req.method] ? req.query : req.body).token;
if (!token) { if (!token) {
req.user = null req.user = null;
return next() return next();
} }
AuthAPI.verifyToken(token).then( AuthAPI.verifyToken(token).then(
payload => { payload => {
req.user = payload req.user = payload;
next() next();
}, },
error => { error => {
if (error.name === "JsonWebTokenError") { if (error.name === "JsonWebTokenError") {
res.status(403).send({ res.status(403).send({
error: `Bad auth token: ${error.message}` error: `Bad auth token: ${error.message}`
}) });
} else { } else {
console.error(error) console.error(error);
res.status(500).send({ res.status(500).send({
error: "Unable to verify auth" error: "Unable to verify auth"
}) });
} }
} }
) );
} }
module.exports = userToken module.exports = userToken;

View File

@ -1,29 +1,29 @@
const db = require("../../RedisClient") const db = require("../../RedisClient");
function createCache(keyPrefix) { function createCache(keyPrefix) {
function createKey(key) { function createKey(key) {
return keyPrefix + "-" + key return keyPrefix + "-" + key;
} }
function set(key, value, expiry, callback) { function set(key, value, expiry, callback) {
db.setex(createKey(key), expiry, JSON.stringify(value), callback) db.setex(createKey(key), expiry, JSON.stringify(value), callback);
} }
function get(key, callback) { function get(key, callback) {
db.get(createKey(key), function(error, value) { db.get(createKey(key), function(error, value) {
callback(error, value && JSON.parse(value)) callback(error, value && JSON.parse(value));
}) });
} }
function del(key, callback) { function del(key, callback) {
db.del(createKey(key), callback) db.del(createKey(key), callback);
} }
return { return {
set, set,
get, get,
del del
} };
} }
module.exports = createCache module.exports = createCache;

View File

@ -1,24 +1,24 @@
function createMutex(doWork) { function createMutex(doWork) {
const mutex = {} const mutex = {};
return function(key, payload, callback) { return function(key, payload, callback) {
if (mutex[key]) { if (mutex[key]) {
mutex[key].push(callback) mutex[key].push(callback);
} else { } else {
mutex[key] = [ mutex[key] = [
function() { function() {
delete mutex[key] delete mutex[key];
}, },
callback callback
] ];
doWork(payload, function(error, value) { doWork(payload, function(error, value) {
mutex[key].forEach(callback => { mutex[key].forEach(callback => {
callback(error, value) callback(error, value);
}) });
}) });
} }
} };
} }
module.exports = createMutex module.exports = createMutex;

View File

@ -1,17 +1,17 @@
function createSearch(query) { function createSearch(query) {
const params = [] const params = [];
Object.keys(query).forEach(param => { Object.keys(query).forEach(param => {
if (query[param] === "") { if (query[param] === "") {
params.push(param) // Omit the trailing "=" from param= params.push(param); // Omit the trailing "=" from param=
} else { } else {
params.push(`${param}=${encodeURIComponent(query[param])}`) params.push(`${param}=${encodeURIComponent(query[param])}`);
} }
}) });
const search = params.join("&") const search = params.join("&");
return search ? `?${search}` : "" return search ? `?${search}` : "";
} }
module.exports = createSearch module.exports = createSearch;

View File

@ -1,15 +1,15 @@
require("isomorphic-fetch") require("isomorphic-fetch");
const fs = require("fs") const fs = require("fs");
const path = require("path") const path = require("path");
const tmpdir = require("os-tmpdir") const tmpdir = require("os-tmpdir");
const gunzip = require("gunzip-maybe") const gunzip = require("gunzip-maybe");
const mkdirp = require("mkdirp") const mkdirp = require("mkdirp");
const tar = require("tar-fs") const tar = require("tar-fs");
const createMutex = require("./createMutex") const createMutex = require("./createMutex");
function createTempPath(name, version) { function createTempPath(name, version) {
const normalName = name.replace(/\//g, "-") const normalName = name.replace(/\//g, "-");
return path.join(tmpdir(), `unpkg-${normalName}-${version}`) return path.join(tmpdir(), `unpkg-${normalName}-${version}`);
} }
function stripNamePrefix(headers) { function stripNamePrefix(headers) {
@ -17,12 +17,12 @@ function stripNamePrefix(headers) {
// so we shorten that to just "index.js" here. A few packages use a // so we shorten that to just "index.js" here. A few packages use a
// prefix other than "package/". e.g. the firebase package uses the // prefix other than "package/". e.g. the firebase package uses the
// "firebase_npm/" prefix. So we just strip the first dir name. // "firebase_npm/" prefix. So we just strip the first dir name.
headers.name = headers.name.replace(/^[^/]+\//, "") headers.name = headers.name.replace(/^[^/]+\//, "");
return headers return headers;
} }
function ignoreSymlinks(file, headers) { function ignoreSymlinks(file, headers) {
return headers.type === "link" return headers.type === "link";
} }
function extractResponse(response, outputDir) { function extractResponse(response, outputDir) {
@ -31,26 +31,26 @@ function extractResponse(response, outputDir) {
readable: true, // All dirs/files should be readable. readable: true, // All dirs/files should be readable.
map: stripNamePrefix, map: stripNamePrefix,
ignore: ignoreSymlinks ignore: ignoreSymlinks
}) });
response.body response.body
.pipe(gunzip()) .pipe(gunzip())
.pipe(extract) .pipe(extract)
.on("finish", resolve) .on("finish", resolve)
.on("error", reject) .on("error", reject);
}) });
} }
function fetchAndExtract(tarballURL, outputDir) { function fetchAndExtract(tarballURL, outputDir) {
console.log(`info: Fetching ${tarballURL} and extracting to ${outputDir}`) console.log(`info: Fetching ${tarballURL} and extracting to ${outputDir}`);
return fetch(tarballURL).then(response => { return fetch(tarballURL).then(response => {
return extractResponse(response, outputDir) return extractResponse(response, outputDir);
}) });
} }
const fetchMutex = createMutex((payload, callback) => { const fetchMutex = createMutex((payload, callback) => {
const { tarballURL, outputDir } = payload const { tarballURL, outputDir } = payload;
fs.access(outputDir, function(error) { fs.access(outputDir, function(error) {
if (error) { if (error) {
@ -59,30 +59,30 @@ const fetchMutex = createMutex((payload, callback) => {
// fetched a package for the first time. Carry on! // fetched a package for the first time. Carry on!
mkdirp(outputDir, function(error) { mkdirp(outputDir, function(error) {
if (error) { if (error) {
callback(error) callback(error);
} else { } else {
fetchAndExtract(tarballURL, outputDir).then(() => { fetchAndExtract(tarballURL, outputDir).then(() => {
callback() callback();
}, callback) }, callback);
} }
}) });
} else { } else {
callback(error) callback(error);
} }
} else { } else {
// Best case: we already have this package cached on disk! // Best case: we already have this package cached on disk!
callback() callback();
} }
}) });
}) });
function getPackage(packageConfig, callback) { function getPackage(packageConfig, callback) {
const tarballURL = packageConfig.dist.tarball const tarballURL = packageConfig.dist.tarball;
const outputDir = createTempPath(packageConfig.name, packageConfig.version) const outputDir = createTempPath(packageConfig.name, packageConfig.version);
fetchMutex(tarballURL, { tarballURL, outputDir }, function(error) { fetchMutex(tarballURL, { tarballURL, outputDir }, function(error) {
callback(error, outputDir) callback(error, outputDir);
}) });
} }
module.exports = getPackage module.exports = getPackage;

View File

@ -1,33 +1,34 @@
require("isomorphic-fetch") require("isomorphic-fetch");
const createCache = require("./createCache") const createCache = require("./createCache");
const createMutex = require("./createMutex") const createMutex = require("./createMutex");
const RegistryURL = process.env.NPM_REGISTRY_URL || "https://registry.npmjs.org" const RegistryURL =
process.env.NPM_REGISTRY_URL || "https://registry.npmjs.org";
const PackageInfoCache = createCache("packageInfo") const PackageInfoCache = createCache("packageInfo");
function fetchPackageInfo(packageName) { function fetchPackageInfo(packageName) {
console.log(`info: Fetching package info for ${packageName}`) console.log(`info: Fetching package info for ${packageName}`);
let encodedPackageName let encodedPackageName;
if (packageName.charAt(0) === "@") { if (packageName.charAt(0) === "@") {
encodedPackageName = `@${encodeURIComponent(packageName.substring(1))}` encodedPackageName = `@${encodeURIComponent(packageName.substring(1))}`;
} else { } else {
encodedPackageName = encodeURIComponent(packageName) encodedPackageName = encodeURIComponent(packageName);
} }
const url = `${RegistryURL}/${encodedPackageName}` const url = `${RegistryURL}/${encodedPackageName}`;
return fetch(url, { return fetch(url, {
headers: { headers: {
Accept: "application/json" Accept: "application/json"
} }
}).then(res => { }).then(res => {
return res.status === 404 ? null : res.json() return res.status === 404 ? null : res.json();
}) });
} }
const PackageNotFound = "PackageNotFound" const PackageNotFound = "PackageNotFound";
// This mutex prevents multiple concurrent requests to // This mutex prevents multiple concurrent requests to
// the registry for the same package info. // the registry for the same package info.
@ -40,32 +41,32 @@ const fetchMutex = createMutex((packageName, callback) => {
// In the worst case, a brand new package's info will be // In the worst case, a brand new package's info will be
// available within 5 minutes. // available within 5 minutes.
PackageInfoCache.set(packageName, PackageNotFound, 300, function() { PackageInfoCache.set(packageName, PackageNotFound, 300, function() {
callback(null, value) callback(null, value);
}) });
} else { } else {
// Cache valid package info for 1 minute. // Cache valid package info for 1 minute.
PackageInfoCache.set(packageName, value, 60, function() { PackageInfoCache.set(packageName, value, 60, function() {
callback(null, value) callback(null, value);
}) });
} }
}, },
function(error) { function(error) {
// Do not cache errors. // Do not cache errors.
PackageInfoCache.del(packageName, function() { PackageInfoCache.del(packageName, function() {
callback(error) callback(error);
}) });
} }
) );
}) });
function getPackageInfo(packageName, callback) { function getPackageInfo(packageName, callback) {
PackageInfoCache.get(packageName, function(error, value) { PackageInfoCache.get(packageName, function(error, value) {
if (error || value != null) { if (error || value != null) {
callback(error, value === PackageNotFound ? null : value) callback(error, value === PackageNotFound ? null : value);
} else { } else {
fetchMutex(packageName, packageName, callback) fetchMutex(packageName, packageName, callback);
} }
}) });
} }
module.exports = getPackageInfo module.exports = getPackageInfo;

View File

@ -1,15 +1,15 @@
const db = require("../../RedisClient") const db = require("../../RedisClient");
function incrementCounter(counter, key, by) { function incrementCounter(counter, key, by) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
db.hincrby(counter, key, by, (error, value) => { db.hincrby(counter, key, by, (error, value) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(value) resolve(value);
} }
}) });
}) });
} }
module.exports = incrementCounter module.exports = incrementCounter;

View File

@ -1,25 +1,25 @@
const parsePackageURL = require("../utils/parsePackageURL") const parsePackageURL = require("../utils/parsePackageURL");
/** /**
* Adds various properties to the request object to do with the * Adds various properties to the request object to do with the
* package/file being requested. * package/file being requested.
*/ */
function validatePackageURL(req, res, next) { function validatePackageURL(req, res, next) {
const url = parsePackageURL(req.url) const url = parsePackageURL(req.url);
if (url == null) { if (url == null) {
return res.status(403).send({ error: `Invalid URL: ${req.url}` }) return res.status(403).send({ error: `Invalid URL: ${req.url}` });
} }
req.packageName = url.packageName req.packageName = url.packageName;
req.packageVersion = url.packageVersion req.packageVersion = url.packageVersion;
req.packageSpec = `${url.packageName}@${url.packageVersion}` req.packageSpec = `${url.packageName}@${url.packageVersion}`;
req.pathname = url.pathname req.pathname = url.pathname;
req.filename = url.filename req.filename = url.filename;
req.search = url.search req.search = url.search;
req.query = url.query req.query = url.query;
next() next();
} }
module.exports = validatePackageURL module.exports = validatePackageURL;

View File

@ -1,35 +1,35 @@
const getFileContentType = require("../getFileContentType") const getFileContentType = require("../getFileContentType");
it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => { it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => {
expect(getFileContentType("AUTHORS")).toBe("text/plain") expect(getFileContentType("AUTHORS")).toBe("text/plain");
expect(getFileContentType("CHANGES")).toBe("text/plain") expect(getFileContentType("CHANGES")).toBe("text/plain");
expect(getFileContentType("LICENSE")).toBe("text/plain") expect(getFileContentType("LICENSE")).toBe("text/plain");
expect(getFileContentType("Makefile")).toBe("text/plain") expect(getFileContentType("Makefile")).toBe("text/plain");
expect(getFileContentType("PATENTS")).toBe("text/plain") expect(getFileContentType("PATENTS")).toBe("text/plain");
expect(getFileContentType("README")).toBe("text/plain") expect(getFileContentType("README")).toBe("text/plain");
}) });
it("gets a content type of text/plain for .*rc files", () => { it("gets a content type of text/plain for .*rc files", () => {
expect(getFileContentType(".eslintrc")).toBe("text/plain") expect(getFileContentType(".eslintrc")).toBe("text/plain");
expect(getFileContentType(".babelrc")).toBe("text/plain") expect(getFileContentType(".babelrc")).toBe("text/plain");
expect(getFileContentType(".anythingrc")).toBe("text/plain") expect(getFileContentType(".anythingrc")).toBe("text/plain");
}) });
it("gets a content type of text/plain for .git* files", () => { it("gets a content type of text/plain for .git* files", () => {
expect(getFileContentType(".gitignore")).toBe("text/plain") expect(getFileContentType(".gitignore")).toBe("text/plain");
expect(getFileContentType(".gitanything")).toBe("text/plain") expect(getFileContentType(".gitanything")).toBe("text/plain");
}) });
it("gets a content type of text/plain for .*ignore files", () => { it("gets a content type of text/plain for .*ignore files", () => {
expect(getFileContentType(".eslintignore")).toBe("text/plain") expect(getFileContentType(".eslintignore")).toBe("text/plain");
expect(getFileContentType(".anythingignore")).toBe("text/plain") expect(getFileContentType(".anythingignore")).toBe("text/plain");
}) });
it("gets a content type of text/plain for .ts files", () => { it("gets a content type of text/plain for .ts files", () => {
expect(getFileContentType("app.ts")).toBe("text/plain") expect(getFileContentType("app.ts")).toBe("text/plain");
expect(getFileContentType("app.d.ts")).toBe("text/plain") expect(getFileContentType("app.d.ts")).toBe("text/plain");
}) });
it("gets a content type of text/plain for .flow files", () => { it("gets a content type of text/plain for .flow files", () => {
expect(getFileContentType("app.js.flow")).toBe("text/plain") expect(getFileContentType("app.js.flow")).toBe("text/plain");
}) });

View File

@ -1,4 +1,4 @@
const parsePackageURL = require("../parsePackageURL") const parsePackageURL = require("../parsePackageURL");
describe("parsePackageURL", () => { describe("parsePackageURL", () => {
it("parses plain packages", () => { it("parses plain packages", () => {
@ -9,8 +9,8 @@ describe("parsePackageURL", () => {
packageName: "history", packageName: "history",
packageVersion: "1.0.0", packageVersion: "1.0.0",
filename: "/umd/history.min.js" filename: "/umd/history.min.js"
}) });
}) });
it("parses plain packages with a hyphen in the name", () => { it("parses plain packages with a hyphen in the name", () => {
expect(parsePackageURL("/query-string@5.0.0/index.js")).toEqual({ expect(parsePackageURL("/query-string@5.0.0/index.js")).toEqual({
@ -20,8 +20,8 @@ describe("parsePackageURL", () => {
packageName: "query-string", packageName: "query-string",
packageVersion: "5.0.0", packageVersion: "5.0.0",
filename: "/index.js" filename: "/index.js"
}) });
}) });
it("parses plain packages with no version specified", () => { it("parses plain packages with no version specified", () => {
expect(parsePackageURL("/query-string/index.js")).toEqual({ expect(parsePackageURL("/query-string/index.js")).toEqual({
@ -31,8 +31,8 @@ describe("parsePackageURL", () => {
packageName: "query-string", packageName: "query-string",
packageVersion: "latest", packageVersion: "latest",
filename: "/index.js" filename: "/index.js"
}) });
}) });
it("parses plain packages with version spec", () => { it("parses plain packages with version spec", () => {
expect(parsePackageURL("/query-string@>=4.0.0/index.js")).toEqual({ expect(parsePackageURL("/query-string@>=4.0.0/index.js")).toEqual({
@ -42,8 +42,8 @@ describe("parsePackageURL", () => {
packageName: "query-string", packageName: "query-string",
packageVersion: ">=4.0.0", packageVersion: ">=4.0.0",
filename: "/index.js" filename: "/index.js"
}) });
}) });
it("parses scoped packages", () => { it("parses scoped packages", () => {
expect(parsePackageURL("/@angular/router@4.3.3/src/index.d.ts")).toEqual({ expect(parsePackageURL("/@angular/router@4.3.3/src/index.d.ts")).toEqual({
@ -53,8 +53,8 @@ describe("parsePackageURL", () => {
packageName: "@angular/router", packageName: "@angular/router",
packageVersion: "4.3.3", packageVersion: "4.3.3",
filename: "/src/index.d.ts" filename: "/src/index.d.ts"
}) });
}) });
it("parses package names with a period in them", () => { it("parses package names with a period in them", () => {
expect(parsePackageURL("/index.js")).toEqual({ expect(parsePackageURL("/index.js")).toEqual({
@ -64,8 +64,8 @@ describe("parsePackageURL", () => {
packageName: "index.js", packageName: "index.js",
packageVersion: "latest", packageVersion: "latest",
filename: "" filename: ""
}) });
}) });
it("parses valid query parameters", () => { it("parses valid query parameters", () => {
expect(parsePackageURL("/history?main=browser")).toEqual({ expect(parsePackageURL("/history?main=browser")).toEqual({
@ -75,11 +75,11 @@ describe("parsePackageURL", () => {
packageName: "history", packageName: "history",
packageVersion: "latest", packageVersion: "latest",
filename: "" filename: ""
}) });
}) });
it("returns null for invalid pathnames", () => { it("returns null for invalid pathnames", () => {
expect(parsePackageURL("history")).toBe(null) expect(parsePackageURL("history")).toBe(null);
expect(parsePackageURL("/.invalid")).toBe(null) expect(parsePackageURL("/.invalid")).toBe(null);
}) });
}) });

View File

@ -1,5 +1,5 @@
const babel = require("babel-core") const babel = require("babel-core");
const unpkgRewrite = require("../unpkgRewriteBabelPlugin") const unpkgRewrite = require("../unpkgRewriteBabelPlugin");
const testCases = [ const testCases = [
{ {
@ -8,7 +8,8 @@ const testCases = [
}, },
{ {
before: "import router from '@angular/router';", before: "import router from '@angular/router';",
after: "import router from 'https://unpkg.com/@angular/router@4.3.5?module';" after:
"import router from 'https://unpkg.com/@angular/router@4.3.5?module';"
}, },
{ {
before: "import map from 'lodash.map';", before: "import map from 'lodash.map';",
@ -54,23 +55,23 @@ const testCases = [
before: "export var message = 'hello';", before: "export var message = 'hello';",
after: "export var message = 'hello';" after: "export var message = 'hello';"
} }
] ];
const dependencies = { const dependencies = {
react: "15.6.1", react: "15.6.1",
"@angular/router": "4.3.5", "@angular/router": "4.3.5",
"lodash.map": "4.6.0", "lodash.map": "4.6.0",
pn: "1.0.0" pn: "1.0.0"
} };
describe("Rewriting imports/exports", () => { describe("Rewriting imports/exports", () => {
testCases.forEach(testCase => { testCases.forEach(testCase => {
it(`successfully rewrites "${testCase.before}"`, () => { it(`successfully rewrites "${testCase.before}"`, () => {
const result = babel.transform(testCase.before, { const result = babel.transform(testCase.before, {
plugins: [unpkgRewrite(dependencies)] plugins: [unpkgRewrite(dependencies)]
}) });
expect(result.code).toEqual(testCase.after) expect(result.code).toEqual(testCase.after);
}) });
}) });
}) });

View File

@ -1,9 +1,9 @@
function createPackageURL(packageName, version, pathname, search) { function createPackageURL(packageName, version, pathname, search) {
let url = `/${packageName}` let url = `/${packageName}`;
if (version != null) url += `@${version}` if (version != null) url += `@${version}`;
if (pathname) url += pathname if (pathname) url += pathname;
if (search) url += search if (search) url += search;
return url return url;
} }
module.exports = createPackageURL module.exports = createPackageURL;

View File

@ -1,13 +1,22 @@
const mime = require("mime") const mime = require("mime");
mime.define({ mime.define({
"text/plain": ["authors", "changes", "license", "makefile", "patents", "readme", "ts", "flow"] "text/plain": [
}) "authors",
"changes",
"license",
"makefile",
"patents",
"readme",
"ts",
"flow"
]
});
const TextFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore)$/i const TextFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore)$/i;
function getFileContentType(file) { function getFileContentType(file) {
return TextFiles.test(file) ? "text/plain" : mime.lookup(file) return TextFiles.test(file) ? "text/plain" : mime.lookup(file);
} }
module.exports = getFileContentType module.exports = getFileContentType;

View File

@ -1,15 +1,15 @@
const fs = require("fs") const fs = require("fs");
function getFileStats(file) { function getFileStats(file) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
fs.lstat(file, (error, stats) => { fs.lstat(file, (error, stats) => {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(stats) resolve(stats);
} }
}) });
}) });
} }
module.exports = getFileStats module.exports = getFileStats;

View File

@ -1,44 +1,51 @@
const fs = require("fs") const fs = require("fs");
const path = require("path") const path = require("path");
const SRIToolbox = require("sri-toolbox") const SRIToolbox = require("sri-toolbox");
const getFileContentType = require("./getFileContentType") const getFileContentType = require("./getFileContentType");
const getFileStats = require("./getFileStats") const getFileStats = require("./getFileStats");
const getFileType = require("./getFileType") const getFileType = require("./getFileType");
function getEntries(dir, file, maximumDepth) { function getEntries(dir, file, maximumDepth) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
fs.readdir(path.join(dir, file), function(error, files) { fs.readdir(path.join(dir, file), function(error, files) {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve( resolve(
Promise.all(files.map(f => getFileStats(path.join(dir, file, f)))).then(statsArray => { Promise.all(
files.map(f => getFileStats(path.join(dir, file, f)))
).then(statsArray => {
return Promise.all( return Promise.all(
statsArray.map((stats, index) => statsArray.map((stats, index) =>
getMetadataRecursive(dir, path.join(file, files[index]), stats, maximumDepth - 1) getMetadataRecursive(
dir,
path.join(file, files[index]),
stats,
maximumDepth - 1
)
) )
) );
}) })
) );
} }
}) });
}) });
} }
function formatTime(time) { function formatTime(time) {
return new Date(time).toISOString() return new Date(time).toISOString();
} }
function getIntegrity(file) { function getIntegrity(file) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
fs.readFile(file, function(error, data) { fs.readFile(file, function(error, data) {
if (error) { if (error) {
reject(error) reject(error);
} else { } else {
resolve(SRIToolbox.generate({ algorithms: ["sha384"] }, data)) resolve(SRIToolbox.generate({ algorithms: ["sha384"] }, data));
} }
}) });
}) });
} }
function getMetadataRecursive(dir, file, stats, maximumDepth) { function getMetadataRecursive(dir, file, stats, maximumDepth) {
@ -48,27 +55,31 @@ function getMetadataRecursive(dir, file, stats, maximumDepth) {
path: file, path: file,
size: stats.size, size: stats.size,
type: getFileType(stats) type: getFileType(stats)
} };
if (stats.isFile()) { if (stats.isFile()) {
return getIntegrity(path.join(dir, file)).then(integrity => { return getIntegrity(path.join(dir, file)).then(integrity => {
metadata.integrity = integrity metadata.integrity = integrity;
return metadata return metadata;
}) });
} }
if (!stats.isDirectory() || maximumDepth === 0) return Promise.resolve(metadata) if (!stats.isDirectory() || maximumDepth === 0)
return Promise.resolve(metadata);
return getEntries(dir, file, maximumDepth).then(files => { return getEntries(dir, file, maximumDepth).then(files => {
metadata.files = files metadata.files = files;
return metadata return metadata;
}) });
} }
function getMetadata(baseDir, path, stats, maximumDepth, callback) { function getMetadata(baseDir, path, stats, maximumDepth, callback) {
getMetadataRecursive(baseDir, path, stats, maximumDepth).then(function(metadata) { getMetadataRecursive(baseDir, path, stats, maximumDepth).then(function(
callback(null, metadata) metadata
}, callback) ) {
callback(null, metadata);
},
callback);
} }
module.exports = getMetadata module.exports = getMetadata;

View File

@ -1,35 +1,35 @@
const url = require("url") const url = require("url");
const validatePackageName = require("./validatePackageName") const validatePackageName = require("./validatePackageName");
const URLFormat = /^\/((?:@[^/@]+\/)?[^/@]+)(?:@([^/]+))?(\/.*)?$/ const URLFormat = /^\/((?:@[^/@]+\/)?[^/@]+)(?:@([^/]+))?(\/.*)?$/;
function decodeParam(param) { function decodeParam(param) {
if (param) { if (param) {
try { try {
return decodeURIComponent(param) return decodeURIComponent(param);
} catch (error) { } catch (error) {
// Ignore invalid params. // Ignore invalid params.
} }
} }
return "" return "";
} }
function parsePackageURL(packageURL) { function parsePackageURL(packageURL) {
const { pathname, search, query } = url.parse(packageURL, true) const { pathname, search, query } = url.parse(packageURL, true);
const match = URLFormat.exec(pathname) const match = URLFormat.exec(pathname);
// Disallow invalid URL formats. // Disallow invalid URL formats.
if (match == null) return null if (match == null) return null;
const packageName = match[1] const packageName = match[1];
// Disallow invalid npm package names. // Disallow invalid npm package names.
if (!validatePackageName(packageName)) return null if (!validatePackageName(packageName)) return null;
const packageVersion = decodeParam(match[2]) || "latest" const packageVersion = decodeParam(match[2]) || "latest";
const filename = decodeParam(match[3]) const filename = decodeParam(match[3]);
return { return {
// If the URL is /@scope/name@version/file.js?main=browser: // If the URL is /@scope/name@version/file.js?main=browser:
@ -39,7 +39,7 @@ function parsePackageURL(packageURL) {
packageName, // @scope/name packageName, // @scope/name
packageVersion, // version packageVersion, // version
filename // /file.js filename // /file.js
} };
} }
module.exports = parsePackageURL module.exports = parsePackageURL;

View File

@ -1,9 +1,9 @@
const fs = require("fs") const fs = require("fs");
const path = require("path") const path = require("path");
const csso = require("csso") const csso = require("csso");
function readCSS(...args) { function readCSS(...args) {
return csso.minify(fs.readFileSync(path.resolve(...args), "utf8")).css return csso.minify(fs.readFileSync(path.resolve(...args), "utf8")).css;
} }
module.exports = readCSS module.exports = readCSS;

View File

@ -1,11 +1,13 @@
const React = require("react") const React = require("react");
const ReactDOMServer = require("react-dom/server") const ReactDOMServer = require("react-dom/server");
const doctype = "<!DOCTYPE html>" const doctype = "<!DOCTYPE html>";
function renderPage(page, props) { function renderPage(page, props) {
const html = ReactDOMServer.renderToStaticMarkup(React.createElement(page, props)) const html = ReactDOMServer.renderToStaticMarkup(
return doctype + html React.createElement(page, props)
);
return doctype + html;
} }
module.exports = renderPage module.exports = renderPage;

View File

@ -1,7 +1,7 @@
const URL = require("whatwg-url") const URL = require("whatwg-url");
const warning = require("warning") const warning = require("warning");
const BareIdentifierFormat = /^((?:@[^\/]+\/)?[^\/]+)(\/.*)?$/ const BareIdentifierFormat = /^((?:@[^\/]+\/)?[^\/]+)(\/.*)?$/;
function unpkgRewriteBabelPlugin(dependencies = {}) { function unpkgRewriteBabelPlugin(dependencies = {}) {
return { return {
@ -9,36 +9,36 @@ function unpkgRewriteBabelPlugin(dependencies = {}) {
visitor: { visitor: {
"ImportDeclaration|ExportNamedDeclaration|ExportAllDeclaration"(path) { "ImportDeclaration|ExportNamedDeclaration|ExportAllDeclaration"(path) {
if (!path.node.source) return // probably a variable declaration if (!path.node.source) return; // probably a variable declaration
if ( if (
URL.parseURL(path.node.source.value) != null || URL.parseURL(path.node.source.value) != null ||
path.node.source.value.substr(0, 2) === "//" path.node.source.value.substr(0, 2) === "//"
) )
return // valid URL or URL w/o protocol, leave it alone return; // valid URL or URL w/o protocol, leave it alone
if ([".", "/"].indexOf(path.node.source.value.charAt(0)) >= 0) { if ([".", "/"].indexOf(path.node.source.value.charAt(0)) >= 0) {
// local path // local path
path.node.source.value = `${path.node.source.value}?module` path.node.source.value = `${path.node.source.value}?module`;
} else { } else {
// "bare" identifier // "bare" identifier
const match = BareIdentifierFormat.exec(path.node.source.value) const match = BareIdentifierFormat.exec(path.node.source.value);
const packageName = match[1] const packageName = match[1];
const file = match[2] || "" const file = match[2] || "";
warning( warning(
dependencies[packageName], dependencies[packageName],
'Missing version info for package "%s" in dependencies; falling back to "latest"', 'Missing version info for package "%s" in dependencies; falling back to "latest"',
packageName packageName
) );
const version = dependencies[packageName] || "latest" const version = dependencies[packageName] || "latest";
path.node.source.value = `https://unpkg.com/${packageName}@${version}${file}?module` path.node.source.value = `https://unpkg.com/${packageName}@${version}${file}?module`;
} }
} }
} }
} };
} }
module.exports = unpkgRewriteBabelPlugin module.exports = unpkgRewriteBabelPlugin;

View File

@ -1,7 +1,7 @@
const validateNpmPackageName = require("validate-npm-package-name") const validateNpmPackageName = require("validate-npm-package-name");
function validatePackageName(packageName) { function validatePackageName(packageName) {
return validateNpmPackageName(packageName).errors == null return validateNpmPackageName(packageName).errors == null;
} }
module.exports = validatePackageName module.exports = validatePackageName;