Prettify everything

This commit is contained in:
MICHAEL JACKSON 2018-02-17 18:00:56 -08:00
parent d6f2bc089a
commit 2e1f09e913
58 changed files with 1061 additions and 932 deletions

View File

@ -1,6 +1,8 @@
import React from "react"
import contentHTML from "./About.md"
import React from "react";
import contentHTML from "./About.md";
const About = () => <div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} />
const About = () => (
<div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} />
);
export default About
export default About;

View File

@ -1,11 +1,11 @@
import React from "react"
import { HashRouter } from "react-router-dom"
import Layout from "./Layout"
import React from "react";
import { HashRouter } from "react-router-dom";
import Layout from "./Layout";
const App = () => (
<HashRouter>
<Layout />
</HashRouter>
)
);
export default App
export default App;

View File

@ -1,6 +1,8 @@
import React from "react"
import contentHTML from "./Home.md"
import React from "react";
import contentHTML from "./Home.md";
const Home = () => <div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} />
const Home = () => (
<div className="wrapper" dangerouslySetInnerHTML={{ __html: contentHTML }} />
);
export default Home
export default Home;

View File

@ -1,78 +1,81 @@
import React from "react"
import PropTypes from "prop-types"
import { Motion, spring } from "react-motion"
import { Switch, Route, Link, withRouter } from "react-router-dom"
import WindowSize from "./WindowSize"
import About from "./About"
import Stats from "./Stats"
import Home from "./Home"
import React from "react";
import PropTypes from "prop-types";
import { Motion, spring } from "react-motion";
import { Switch, Route, Link, withRouter } from "react-router-dom";
import WindowSize from "./WindowSize";
import About from "./About";
import Stats from "./Stats";
import Home from "./Home";
class Layout extends React.Component {
static propTypes = {
location: PropTypes.object,
children: PropTypes.node
}
};
state = {
underlineLeft: 0,
underlineWidth: 0,
useSpring: false,
stats: null
}
};
adjustUnderline = (useSpring = false) => {
let itemIndex
let itemIndex;
switch (this.props.location.pathname) {
case "/stats":
itemIndex = 1
break
itemIndex = 1;
break;
case "/about":
itemIndex = 2
break
itemIndex = 2;
break;
case "/":
default:
itemIndex = 0
itemIndex = 0;
}
const itemNodes = this.listNode.querySelectorAll("li")
const currentNode = itemNodes[itemIndex]
const itemNodes = this.listNode.querySelectorAll("li");
const currentNode = itemNodes[itemIndex];
this.setState({
underlineLeft: currentNode.offsetLeft,
underlineWidth: currentNode.offsetWidth,
useSpring
})
}
});
};
componentDidMount() {
this.adjustUnderline()
this.adjustUnderline();
fetch("/_stats?period=last-month")
.then(res => res.json())
.then(stats => this.setState({ stats }))
.then(stats => this.setState({ stats }));
if (window.localStorage) {
const savedStats = window.localStorage.savedStats
const savedStats = window.localStorage.savedStats;
if (savedStats) this.setState({ stats: JSON.parse(savedStats) })
if (savedStats) this.setState({ stats: JSON.parse(savedStats) });
window.onbeforeunload = () => {
localStorage.savedStats = JSON.stringify(this.state.stats)
}
localStorage.savedStats = JSON.stringify(this.state.stats);
};
}
}
componentDidUpdate(prevProps) {
if (prevProps.location.pathname !== this.props.location.pathname) this.adjustUnderline(true)
if (prevProps.location.pathname !== this.props.location.pathname)
this.adjustUnderline(true);
}
render() {
const { underlineLeft, underlineWidth, useSpring } = this.state
const { underlineLeft, underlineWidth, useSpring } = this.state;
const style = {
left: useSpring ? spring(underlineLeft, { stiffness: 220 }) : underlineLeft,
left: useSpring
? spring(underlineLeft, { stiffness: 220 })
: underlineLeft,
width: useSpring ? spring(underlineWidth) : underlineWidth
}
};
return (
<div>
@ -81,7 +84,10 @@ class Layout extends React.Component {
<header>
<h1 className="layout-title">unpkg</h1>
<nav className="layout-nav">
<ol className="layout-nav-list" ref={node => (this.listNode = node)}>
<ol
className="layout-nav-list"
ref={node => (this.listNode = node)}
>
<li>
<Link to="/">Home</Link>
</li>
@ -111,13 +117,16 @@ class Layout extends React.Component {
</div>
<Switch>
<Route path="/stats" render={() => <Stats data={this.state.stats} />} />
<Route
path="/stats"
render={() => <Stats data={this.state.stats} />}
/>
<Route path="/about" component={About} />
<Route path="/" component={Home} />
</Switch>
</div>
)
);
}
}
export default withRouter(Layout)
export default withRouter(Layout);

View File

@ -1,51 +1,55 @@
import React from "react"
import PropTypes from "prop-types"
import formatBytes from "pretty-bytes"
import formatDate from "date-fns/format"
import parseDate from "date-fns/parse"
import formatNumber from "./utils/formatNumber"
import formatPercent from "./utils/formatPercent"
import React from "react";
import PropTypes from "prop-types";
import formatBytes from "pretty-bytes";
import formatDate from "date-fns/format";
import parseDate from "date-fns/parse";
import formatNumber from "./utils/formatNumber";
import formatPercent from "./utils/formatPercent";
import { continents, countries } from "countries-list"
import { continents, countries } from "countries-list";
const getCountriesByContinent = continent =>
Object.keys(countries).filter(country => countries[country].continent === continent)
Object.keys(countries).filter(
country => countries[country].continent === continent
);
const sumKeyValues = (hash, keys) => keys.reduce((n, key) => n + (hash[key] || 0), 0)
const sumKeyValues = (hash, keys) =>
keys.reduce((n, key) => n + (hash[key] || 0), 0);
const sumValues = hash => Object.keys(hash).reduce((memo, key) => memo + hash[key], 0)
const sumValues = hash =>
Object.keys(hash).reduce((memo, key) => memo + hash[key], 0);
class Stats extends React.Component {
static propTypes = {
data: PropTypes.object
}
};
state = {
minPackageRequests: 1000000,
minCountryRequests: 1000000
}
};
render() {
const { data } = this.props
const { data } = this.props;
if (data == null) return null
if (data == null) return null;
const totals = data.totals
const totals = data.totals;
// Summary data
const since = parseDate(totals.since)
const until = parseDate(totals.until)
const since = parseDate(totals.since);
const until = parseDate(totals.until);
// Packages
const packageRows = []
const packageRows = [];
Object.keys(totals.requests.package)
.sort((a, b) => {
return totals.requests.package[b] - totals.requests.package[a]
return totals.requests.package[b] - totals.requests.package[a];
})
.forEach(packageName => {
const requests = totals.requests.package[packageName]
const bandwidth = totals.bandwidth.package[packageName]
const requests = totals.requests.package[packageName];
const bandwidth = totals.bandwidth.package[packageName];
if (requests >= this.state.minPackageRequests) {
packageRows.push(
@ -59,44 +63,51 @@ class Stats extends React.Component {
</a>
</td>
<td>
{formatNumber(requests)} ({formatPercent(requests / totals.requests.all)}%)
{formatNumber(requests)} ({formatPercent(
requests / totals.requests.all
)}%)
</td>
{bandwidth ? (
<td>
{formatBytes(bandwidth)} ({formatPercent(bandwidth / totals.bandwidth.all)}%)
{formatBytes(bandwidth)} ({formatPercent(
bandwidth / totals.bandwidth.all
)}%)
</td>
) : (
<td>-</td>
)}
</tr>
)
);
}
})
});
// Regions
const regionRows = []
const regionRows = [];
const continentsData = Object.keys(continents).reduce((memo, continent) => {
const localCountries = getCountriesByContinent(continent)
const localCountries = getCountriesByContinent(continent);
memo[continent] = {
countries: localCountries,
requests: sumKeyValues(totals.requests.country, localCountries),
bandwidth: sumKeyValues(totals.bandwidth.country, localCountries)
}
};
return memo
}, {})
return memo;
}, {});
const topContinents = Object.keys(continentsData).sort((a, b) => {
return continentsData[b].requests - continentsData[a].requests
})
return continentsData[b].requests - continentsData[a].requests;
});
topContinents.forEach(continent => {
const continentName = continents[continent]
const continentData = continentsData[continent]
const continentName = continents[continent];
const continentData = continentsData[continent];
if (continentData.requests > this.state.minCountryRequests && continentData.bandwidth !== 0) {
if (
continentData.requests > this.state.minCountryRequests &&
continentData.bandwidth !== 0
) {
regionRows.push(
<tr key={continent} className="continent-row">
<td>{continentName}</td>
@ -111,15 +122,15 @@ class Stats extends React.Component {
)}%)
</td>
</tr>
)
);
const topCountries = continentData.countries.sort((a, b) => {
return totals.requests.country[b] - totals.requests.country[a]
})
return totals.requests.country[b] - totals.requests.country[a];
});
topCountries.forEach(country => {
const countryRequests = totals.requests.country[country]
const countryBandwidth = totals.bandwidth.country[country]
const countryRequests = totals.requests.country[country];
const countryBandwidth = totals.bandwidth.country[country];
if (countryRequests > this.state.minCountryRequests) {
regionRows.push(
@ -136,19 +147,19 @@ class Stats extends React.Component {
)}%)
</td>
</tr>
)
);
}
})
});
}
})
});
// Protocols
const protocolRows = Object.keys(totals.requests.protocol)
.sort((a, b) => {
return totals.requests.protocol[b] - totals.requests.protocol[a]
return totals.requests.protocol[b] - totals.requests.protocol[a];
})
.map(protocol => {
const requests = totals.requests.protocol[protocol]
const requests = totals.requests.protocol[protocol];
return (
<tr key={protocol}>
@ -159,19 +170,22 @@ class Stats extends React.Component {
)}%)
</td>
</tr>
)
})
);
});
return (
<div className="wrapper">
<p>
From <strong>{formatDate(since, "MMM D")}</strong> to{" "}
<strong>{formatDate(until, "MMM D")}</strong> unpkg served{" "}
<strong>{formatNumber(totals.requests.all)}</strong> requests and a total of{" "}
<strong>{formatBytes(totals.bandwidth.all)}</strong> of data to{" "}
<strong>{formatNumber(totals.uniques.all)}</strong> unique visitors,{" "}
<strong>{formatPercent(totals.requests.cached / totals.requests.all, 0)}%</strong> of
which were served from the cache.
<strong>{formatNumber(totals.requests.all)}</strong> requests and a
total of <strong>{formatBytes(totals.bandwidth.all)}</strong> of data
to <strong>{formatNumber(totals.uniques.all)}</strong> unique
visitors,{" "}
<strong>
{formatPercent(totals.requests.cached / totals.requests.all, 0)}%
</strong>{" "}
of which were served from the cache.
</p>
<h3>Packages</h3>
@ -241,7 +255,12 @@ class Stats extends React.Component {
requests.
</p>
<table cellSpacing="0" cellPadding="0" style={{ width: "100%" }} className="regions-table">
<table
cellSpacing="0"
cellPadding="0"
style={{ width: "100%" }}
className="regions-table"
>
<thead>
<tr>
<th>Region</th>
@ -270,8 +289,8 @@ class Stats extends React.Component {
<tbody>{protocolRows}</tbody>
</table>
</div>
)
);
}
}
export default Stats
export default Stats;

View File

@ -1,34 +1,34 @@
import React from "react"
import PropTypes from "prop-types"
import addEvent from "./utils/addEvent"
import removeEvent from "./utils/removeEvent"
import React from "react";
import PropTypes from "prop-types";
import addEvent from "./utils/addEvent";
import removeEvent from "./utils/removeEvent";
const ResizeEvent = "resize"
const ResizeEvent = "resize";
class WindowSize extends React.Component {
static propTypes = {
onChange: PropTypes.func
}
};
handleWindowResize = () => {
if (this.props.onChange)
this.props.onChange({
width: window.innerWidth,
height: window.innerHeight
})
}
});
};
componentDidMount() {
addEvent(window, ResizeEvent, this.handleWindowResize)
addEvent(window, ResizeEvent, this.handleWindowResize);
}
componentWillUnmount() {
removeEvent(window, ResizeEvent, this.handleWindowResize)
removeEvent(window, ResizeEvent, this.handleWindowResize);
}
render() {
return null
return null;
}
}
export default WindowSize
export default WindowSize;

View File

@ -1,12 +1,7 @@
body {
font-size: 16px;
font-family: -apple-system,
BlinkMacSystemFont,
"Segoe UI",
Roboto,
Helvetica,
Arial,
sans-serif;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica,
Arial, sans-serif;
line-height: 1.5;
padding: 5px 20px;
}
@ -50,7 +45,8 @@ th {
text-align: left;
background-color: #eee;
}
th, td {
th,
td {
padding: 5px;
}
th {

View File

@ -1,6 +1,6 @@
import React from "react"
import ReactDOM from "react-dom"
import App from "./App"
import "./main.css"
import React from "react";
import ReactDOM from "react-dom";
import App from "./App";
import "./main.css";
ReactDOM.render(<App />, document.getElementById("app"))
ReactDOM.render(<App />, document.getElementById("app"));

View File

@ -1,9 +1,9 @@
const addEvent = (node, type, handler) => {
if (node.addEventListener) {
node.addEventListener(type, handler, false)
node.addEventListener(type, handler, false);
} else if (node.attachEvent) {
node.attachEvent("on" + type, handler)
node.attachEvent("on" + type, handler);
}
}
};
export default addEvent
export default addEvent;

View File

@ -1,10 +1,10 @@
const formatNumber = n => {
const digits = String(n).split("")
const groups = []
const digits = String(n).split("");
const groups = [];
while (digits.length) groups.unshift(digits.splice(-3).join(""))
while (digits.length) groups.unshift(digits.splice(-3).join(""));
return groups.join(",")
}
return groups.join(",");
};
export default formatNumber
export default formatNumber;

View File

@ -1,3 +1,4 @@
const formatPercent = (n, fixed = 1) => String((n.toPrecision(2) * 100).toFixed(fixed))
const formatPercent = (n, fixed = 1) =>
String((n.toPrecision(2) * 100).toFixed(fixed));
export default formatPercent
export default formatPercent;

View File

@ -1,3 +1,3 @@
const parseNumber = s => parseInt(s.replace(/,/g, ""), 10) || 0
const parseNumber = s => parseInt(s.replace(/,/g, ""), 10) || 0;
export default parseNumber
export default parseNumber;

View File

@ -1,9 +1,9 @@
const removeEvent = (node, type, handler) => {
if (node.removeEventListener) {
node.removeEventListener(type, handler, false)
node.removeEventListener(type, handler, false);
} else if (node.detachEvent) {
node.detachEvent("on" + type, handler)
node.detachEvent("on" + type, handler);
}
}
};
export default removeEvent
export default removeEvent;

View File

@ -4,8 +4,8 @@ Some API methods require an authentication token. This token is a [JSON web toke
Once you obtain an API token (see below) you can pass it to the server in one of two ways:
- For GET/HEAD requests, use the `?token` query parameter
- For all other requests, use the `{token}` parameter as part of the JSON in the request body
* For GET/HEAD requests, use the `?token` query parameter
* For all other requests, use the `{token}` parameter as part of the JSON in the request body
### POST /\_auth
@ -40,7 +40,7 @@ Required scope: none
Query parameters:
- `token` - The auth token to verify and decode
* `token` - The auth token to verify and decode
Example:
@ -102,8 +102,8 @@ Required scope: `blacklist.add`
Body parameters:
- `token` - The auth token
- `packageName` - The package to add to the blacklist
* `token` - The auth token
* `packageName` - The package to add to the blacklist
Example:
@ -122,7 +122,7 @@ Required scope: `blacklist.remove`
Body parameters:
- `token` - The auth token
* `token` - The auth token
Example:

View File

@ -1,23 +1,23 @@
const AuthAPI = require("../server/AuthAPI")
const AuthAPI = require("../server/AuthAPI");
const scopes = {
blacklist: {
read: true
}
}
};
AuthAPI.createToken(scopes).then(
token => {
// Verify it, just to be sure.
AuthAPI.verifyToken(token).then(payload => {
console.log(token, "\n")
console.log(JSON.stringify(payload, null, 2), "\n")
console.log(AuthAPI.getPublicKey())
process.exit()
})
console.log(token, "\n");
console.log(JSON.stringify(payload, null, 2), "\n");
console.log(AuthAPI.getPublicKey());
process.exit();
});
},
error => {
console.error(error)
process.exit(1)
console.error(error);
process.exit(1);
}
)
);

View File

@ -1,15 +1,21 @@
require("isomorphic-fetch")
const invariant = require("invariant")
require("isomorphic-fetch");
const invariant = require("invariant");
const CloudflareEmail = process.env.CLOUDFLARE_EMAIL
const CloudflareKey = process.env.CLOUDFLARE_KEY
const RayID = process.argv[2]
const CloudflareEmail = process.env.CLOUDFLARE_EMAIL;
const CloudflareKey = process.env.CLOUDFLARE_KEY;
const RayID = process.argv[2];
invariant(CloudflareEmail, "Missing the $CLOUDFLARE_EMAIL environment variable")
invariant(
CloudflareEmail,
"Missing the $CLOUDFLARE_EMAIL environment variable"
);
invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable")
invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable");
invariant(RayID, "Missing the RAY_ID argument; use `heroku run node show-log.js RAY_ID`")
invariant(
RayID,
"Missing the RAY_ID argument; use `heroku run node show-log.js RAY_ID`"
);
function getZones(domain) {
return fetch(`https://api.cloudflare.com/client/v4/zones?name=${domain}`, {
@ -20,21 +26,24 @@ function getZones(domain) {
}
})
.then(res => res.json())
.then(data => data.result)
.then(data => data.result);
}
function getLog(zoneId, rayId) {
return fetch(`https://api.cloudflare.com/client/v4/zones/${zoneId}/logs/requests/${rayId}`, {
method: "GET",
headers: {
"X-Auth-Email": CloudflareEmail,
"X-Auth-Key": CloudflareKey
return fetch(
`https://api.cloudflare.com/client/v4/zones/${zoneId}/logs/requests/${rayId}`,
{
method: "GET",
headers: {
"X-Auth-Email": CloudflareEmail,
"X-Auth-Key": CloudflareKey
}
}
}).then(res => (res.status === 404 ? "NOT FOUND" : res.json()))
).then(res => (res.status === 404 ? "NOT FOUND" : res.json()));
}
getZones("unpkg.com").then(zones => {
getLog(zones[0].id, RayID).then(entry => {
console.log(entry)
})
})
console.log(entry);
});
});

View File

@ -1,48 +1,58 @@
const subDays = require("date-fns/sub_days")
const prettyBytes = require("pretty-bytes")
const table = require("text-table")
const subDays = require("date-fns/sub_days");
const prettyBytes = require("pretty-bytes");
const table = require("text-table");
const StatsAPI = require("../server/StatsAPI")
const now = new Date()
const StatsAPI = require("../server/StatsAPI");
const now = new Date();
function createRange(start, end) {
const range = []
while (start < end) range.push(start++)
return range
const range = [];
while (start < end) range.push(start++);
return range;
}
function createPastDays(n) {
return createRange(1, n + 1)
.map(days => subDays(now, days))
.reverse()
.reverse();
}
const pastSevenDays = createPastDays(7)
const pastThirtyDays = createPastDays(30)
const pastSevenDays = createPastDays(7);
const pastThirtyDays = createPastDays(30);
Promise.all([
StatsAPI.sumKeys(pastSevenDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)),
StatsAPI.sumKeys(pastSevenDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)),
StatsAPI.sumKeys(pastThirtyDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)),
StatsAPI.sumKeys(pastThirtyDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`))
StatsAPI.sumKeys(
pastSevenDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)
),
StatsAPI.sumKeys(
pastSevenDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)
),
StatsAPI.sumKeys(
pastThirtyDays.map(date => `stats-requests-${StatsAPI.createDayKey(date)}`)
),
StatsAPI.sumKeys(
pastThirtyDays.map(date => `stats-bandwidth-${StatsAPI.createDayKey(date)}`)
)
]).then(results => {
console.log("\n## Summary")
console.log("Requests this week: %s", results[0].toLocaleString())
console.log("Bandwidth this week: %s", prettyBytes(results[1]))
console.log("Requests this month: %s", results[2].toLocaleString())
console.log("Bandwidth this month: %s", prettyBytes(results[3]))
console.log("\n## Summary");
console.log("Requests this week: %s", results[0].toLocaleString());
console.log("Bandwidth this week: %s", prettyBytes(results[1]));
console.log("Requests this month: %s", results[2].toLocaleString());
console.log("Bandwidth this month: %s", prettyBytes(results[3]));
StatsAPI.sumTopScores(
pastSevenDays.map(date => `stats-packageRequests-${StatsAPI.createDayKey(date)}`)
pastSevenDays.map(
date => `stats-packageRequests-${StatsAPI.createDayKey(date)}`
)
).then(topPackages => {
console.log("\n## Top Packages This Week")
console.log("\n## Top Packages This Week");
topPackages.forEach(result => {
result[1] = result[1].toLocaleString()
})
result[1] = result[1].toLocaleString();
});
console.log(table(topPackages))
console.log(table(topPackages));
process.exit()
})
})
process.exit();
});
});

View File

@ -1,35 +1,35 @@
const fs = require("fs")
const path = require("path")
const crypto = require("crypto")
const jwt = require("jsonwebtoken")
const invariant = require("invariant")
const forge = require("node-forge")
const db = require("./RedisClient")
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
const jwt = require("jsonwebtoken");
const invariant = require("invariant");
const forge = require("node-forge");
const db = require("./RedisClient");
let keys
let keys;
if (process.env.NODE_ENV === "production") {
keys = {
public: fs.readFileSync(path.resolve(__dirname, "../public.key"), "utf8"),
private: process.env.PRIVATE_KEY
}
};
invariant(keys.private, "Missing $PRIVATE_KEY environment variable")
invariant(keys.private, "Missing $PRIVATE_KEY environment variable");
} else {
// Generate a random keypair for dev/testing.
// See https://gist.github.com/sebadoom/2b70969e70db5da9a203bebd9cff099f
const keypair = forge.rsa.generateKeyPair({ bits: 2048 })
const keypair = forge.rsa.generateKeyPair({ bits: 2048 });
keys = {
public: forge.pki.publicKeyToPem(keypair.publicKey, 72),
private: forge.pki.privateKeyToPem(keypair.privateKey, 72)
}
};
}
function getCurrentSeconds() {
return Math.floor(Date.now() / 1000)
return Math.floor(Date.now() / 1000);
}
function createTokenId() {
return crypto.randomBytes(16).toString("hex")
return crypto.randomBytes(16).toString("hex");
}
function createToken(scopes = {}) {
@ -39,42 +39,42 @@ function createToken(scopes = {}) {
iss: "https://unpkg.com",
iat: getCurrentSeconds(),
scopes
}
};
jwt.sign(payload, keys.private, { algorithm: "RS256" }, (error, token) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(token)
resolve(token);
}
})
})
});
});
}
const RevokedTokensSet = "revoked-tokens"
const RevokedTokensSet = "revoked-tokens";
function verifyToken(token) {
return new Promise((resolve, reject) => {
const options = { algorithms: ["RS256"] }
const options = { algorithms: ["RS256"] };
jwt.verify(token, keys.public, options, (error, payload) => {
if (error) {
reject(error)
reject(error);
} else {
if (payload.jti) {
db.sismember(RevokedTokensSet, payload.jti, (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(value === 0 ? payload : null)
resolve(value === 0 ? payload : null);
}
})
});
} else {
resolve(null)
resolve(null);
}
}
})
})
});
});
}
function revokeToken(token) {
@ -83,30 +83,30 @@ function revokeToken(token) {
return new Promise((resolve, reject) => {
db.sadd(RevokedTokensSet, payload.jti, error => {
if (error) {
reject(error)
reject(error);
} else {
resolve()
resolve();
}
})
})
});
});
}
})
});
}
function removeAllRevokedTokens() {
return new Promise((resolve, reject) => {
db.del(RevokedTokensSet, error => {
if (error) {
reject(error)
reject(error);
} else {
resolve()
resolve();
}
})
})
});
});
}
function getPublicKey() {
return keys.public
return keys.public;
}
module.exports = {
@ -115,4 +115,4 @@ module.exports = {
revokeToken,
removeAllRevokedTokens,
getPublicKey
}
};

View File

@ -1,65 +1,65 @@
const db = require("./RedisClient")
const db = require("./RedisClient");
const BlacklistSet = "blacklisted-packages"
const BlacklistSet = "blacklisted-packages";
function addPackage(packageName) {
return new Promise((resolve, reject) => {
db.sadd(BlacklistSet, packageName, (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(value === 1)
resolve(value === 1);
}
})
})
});
});
}
function removePackage(packageName) {
return new Promise((resolve, reject) => {
db.srem(BlacklistSet, packageName, (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(value === 1)
resolve(value === 1);
}
})
})
});
});
}
function removeAllPackages() {
return new Promise((resolve, reject) => {
db.del(BlacklistSet, error => {
if (error) {
reject(error)
reject(error);
} else {
resolve()
resolve();
}
})
})
});
});
}
function getPackages() {
return new Promise((resolve, reject) => {
db.smembers(BlacklistSet, (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(value)
resolve(value);
}
})
})
});
});
}
function includesPackage(packageName) {
return new Promise((resolve, reject) => {
db.sismember(BlacklistSet, packageName, (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(value === 1)
resolve(value === 1);
}
})
})
});
});
}
module.exports = {
@ -68,4 +68,4 @@ module.exports = {
removeAllPackages,
getPackages,
includesPackage
}
};

View File

@ -1,15 +1,18 @@
require("isomorphic-fetch")
const invariant = require("invariant")
const gunzip = require("gunzip-maybe")
const ndjson = require("ndjson")
require("isomorphic-fetch");
const invariant = require("invariant");
const gunzip = require("gunzip-maybe");
const ndjson = require("ndjson");
const CloudflareAPIURL = "https://api.cloudflare.com"
const CloudflareEmail = process.env.CLOUDFLARE_EMAIL
const CloudflareKey = process.env.CLOUDFLARE_KEY
const CloudflareAPIURL = "https://api.cloudflare.com";
const CloudflareEmail = process.env.CLOUDFLARE_EMAIL;
const CloudflareKey = process.env.CLOUDFLARE_KEY;
invariant(CloudflareEmail, "Missing the $CLOUDFLARE_EMAIL environment variable")
invariant(
CloudflareEmail,
"Missing the $CLOUDFLARE_EMAIL environment variable"
);
invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable")
invariant(CloudflareKey, "Missing the $CLOUDFLARE_KEY environment variable");
function get(path, headers) {
return fetch(`${CloudflareAPIURL}/client/v4${path}`, {
@ -17,49 +20,49 @@ function get(path, headers) {
"X-Auth-Email": CloudflareEmail,
"X-Auth-Key": CloudflareKey
})
})
});
}
function getJSON(path, headers) {
return get(path, headers)
.then(res => {
return res.json()
return res.json();
})
.then(data => {
if (!data.success) {
console.error(`CloudflareAPI.getJSON failed at ${path}`)
console.error(data)
throw new Error("Failed to getJSON from Cloudflare")
console.error(`CloudflareAPI.getJSON failed at ${path}`);
console.error(data);
throw new Error("Failed to getJSON from Cloudflare");
}
return data.result
})
return data.result;
});
}
function getZones(domains) {
return Promise.all(
(Array.isArray(domains) ? domains : [domains]).map(domain => {
return getJSON(`/zones?name=${domain}`)
return getJSON(`/zones?name=${domain}`);
})
).then(results => {
return results.reduce((memo, zones) => {
return memo.concat(zones)
})
})
return memo.concat(zones);
});
});
}
function reduceResults(target, values) {
Object.keys(values).forEach(key => {
const value = values[key]
const value = values[key];
if (typeof value === "object" && value) {
target[key] = reduceResults(target[key] || {}, value)
target[key] = reduceResults(target[key] || {}, value);
} else if (typeof value === "number") {
target[key] = (target[key] || 0) + values[key]
target[key] = (target[key] || 0) + values[key];
}
})
});
return target
return target;
}
function getZoneAnalyticsDashboard(zones, since, until) {
@ -69,29 +72,31 @@ function getZoneAnalyticsDashboard(zones, since, until) {
`/zones/${
zone.id
}/analytics/dashboard?since=${since.toISOString()}&until=${until.toISOString()}`
)
);
})
).then(results => {
return results.reduce(reduceResults)
})
return results.reduce(reduceResults);
});
}
function getJSONStream(path, headers) {
const acceptGzipHeaders = Object.assign({}, headers, {
"Accept-Encoding": "gzip"
})
});
return get(path, acceptGzipHeaders)
.then(res => {
return res.body.pipe(gunzip())
return res.body.pipe(gunzip());
})
.then(stream => {
return stream.pipe(ndjson.parse())
})
return stream.pipe(ndjson.parse());
});
}
function getLogs(zoneId, startTime, endTime) {
return getJSONStream(`/zones/${zoneId}/logs/requests?start=${startTime}&end=${endTime}`)
return getJSONStream(
`/zones/${zoneId}/logs/requests?start=${startTime}&end=${endTime}`
);
}
module.exports = {
@ -101,4 +106,4 @@ module.exports = {
getZoneAnalyticsDashboard,
getJSONStream,
getLogs
}
};

View File

@ -1,9 +1,12 @@
const redis = require("redis")
const redis = require("redis");
redis.debug_mode = process.env.DEBUG_REDIS != null
redis.debug_mode = process.env.DEBUG_REDIS != null;
const RedisURL = process.env.OPENREDIS_URL || process.env.REDIS_URL || "redis://localhost:6379"
const RedisURL =
process.env.OPENREDIS_URL ||
process.env.REDIS_URL ||
"redis://localhost:6379";
const client = redis.createClient(RedisURL)
const client = redis.createClient(RedisURL);
module.exports = client
module.exports = client;

View File

@ -1,108 +1,114 @@
const db = require("./RedisClient")
const CloudflareAPI = require("./CloudflareAPI")
const BlacklistAPI = require("./BlacklistAPI")
const db = require("./RedisClient");
const CloudflareAPI = require("./CloudflareAPI");
const BlacklistAPI = require("./BlacklistAPI");
function prunePackages(packagesMap) {
return Promise.all(
Object.keys(packagesMap).map(packageName =>
BlacklistAPI.includesPackage(packageName).then(blacklisted => {
if (blacklisted) {
delete packagesMap[packageName]
delete packagesMap[packageName];
}
})
)
).then(() => packagesMap)
).then(() => packagesMap);
}
function createDayKey(date) {
return `${date.getUTCFullYear()}-${date.getUTCMonth()}-${date.getUTCDate()}`
return `${date.getUTCFullYear()}-${date.getUTCMonth()}-${date.getUTCDate()}`;
}
function createHourKey(date) {
return `${createDayKey(date)}-${date.getUTCHours()}`
return `${createDayKey(date)}-${date.getUTCHours()}`;
}
function createMinuteKey(date) {
return `${createHourKey(date)}-${date.getUTCMinutes()}`
return `${createHourKey(date)}-${date.getUTCMinutes()}`;
}
function createScoresMap(array) {
const map = {}
const map = {};
for (let i = 0; i < array.length; i += 2) {
map[array[i]] = parseInt(array[i + 1], 10)
map[array[i]] = parseInt(array[i + 1], 10);
}
return map
return map;
}
function getScoresMap(key, n = 100) {
return new Promise((resolve, reject) => {
db.zrevrange(key, 0, n, "withscores", (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(createScoresMap(value))
resolve(createScoresMap(value));
}
})
})
});
});
}
function getPackageRequests(date, n = 100) {
return getScoresMap(`stats-packageRequests-${createDayKey(date)}`, n).then(prunePackages)
return getScoresMap(`stats-packageRequests-${createDayKey(date)}`, n).then(
prunePackages
);
}
function getPackageBandwidth(date, n = 100) {
return getScoresMap(`stats-packageBytes-${createDayKey(date)}`, n).then(prunePackages)
return getScoresMap(`stats-packageBytes-${createDayKey(date)}`, n).then(
prunePackages
);
}
function getProtocolRequests(date) {
return getScoresMap(`stats-protocolRequests-${createDayKey(date)}`)
return getScoresMap(`stats-protocolRequests-${createDayKey(date)}`);
}
function addDailyMetricsToTimeseries(timeseries) {
const since = new Date(timeseries.since)
const since = new Date(timeseries.since);
return Promise.all([
getPackageRequests(since),
getPackageBandwidth(since),
getProtocolRequests(since)
]).then(results => {
timeseries.requests.package = results[0]
timeseries.bandwidth.package = results[1]
timeseries.requests.protocol = results[2]
return timeseries
})
timeseries.requests.package = results[0];
timeseries.bandwidth.package = results[1];
timeseries.requests.protocol = results[2];
return timeseries;
});
}
function sumMaps(maps) {
return maps.reduce((memo, map) => {
Object.keys(map).forEach(key => {
memo[key] = (memo[key] || 0) + map[key]
})
memo[key] = (memo[key] || 0) + map[key];
});
return memo
}, {})
return memo;
}, {});
}
function addDailyMetrics(result) {
return Promise.all(result.timeseries.map(addDailyMetricsToTimeseries)).then(() => {
result.totals.requests.package = sumMaps(
result.timeseries.map(timeseries => {
return timeseries.requests.package
})
)
return Promise.all(result.timeseries.map(addDailyMetricsToTimeseries)).then(
() => {
result.totals.requests.package = sumMaps(
result.timeseries.map(timeseries => {
return timeseries.requests.package;
})
);
result.totals.bandwidth.package = sumMaps(
result.timeseries.map(timeseries => timeseries.bandwidth.package)
)
result.totals.bandwidth.package = sumMaps(
result.timeseries.map(timeseries => timeseries.bandwidth.package)
);
result.totals.requests.protocol = sumMaps(
result.timeseries.map(timeseries => timeseries.requests.protocol)
)
result.totals.requests.protocol = sumMaps(
result.timeseries.map(timeseries => timeseries.requests.protocol)
);
return result
})
return result;
}
);
}
function extractPublicInfo(data) {
@ -131,29 +137,31 @@ function extractPublicInfo(data) {
uniques: {
all: data.uniques.all
}
}
};
}
const DomainNames = ["unpkg.com", "npmcdn.com"]
const DomainNames = ["unpkg.com", "npmcdn.com"];
function fetchStats(since, until) {
return CloudflareAPI.getZones(DomainNames).then(zones => {
return CloudflareAPI.getZoneAnalyticsDashboard(zones, since, until).then(dashboard => {
return {
timeseries: dashboard.timeseries.map(extractPublicInfo),
totals: extractPublicInfo(dashboard.totals)
return CloudflareAPI.getZoneAnalyticsDashboard(zones, since, until).then(
dashboard => {
return {
timeseries: dashboard.timeseries.map(extractPublicInfo),
totals: extractPublicInfo(dashboard.totals)
};
}
})
})
);
});
}
const oneMinute = 1000 * 60
const oneHour = oneMinute * 60
const oneDay = oneHour * 24
const oneMinute = 1000 * 60;
const oneHour = oneMinute * 60;
const oneDay = oneHour * 24;
function getStats(since, until) {
const promise = fetchStats(since, until)
return until - since > oneDay ? promise.then(addDailyMetrics) : promise
const promise = fetchStats(since, until);
return until - since > oneDay ? promise.then(addDailyMetrics) : promise;
}
module.exports = {
@ -161,4 +169,4 @@ module.exports = {
createHourKey,
createMinuteKey,
getStats
}
};

View File

@ -1,9 +1,9 @@
const AuthAPI = require("../AuthAPI")
const AuthAPI = require("../AuthAPI");
describe("Auth API", () => {
beforeEach(done => {
AuthAPI.removeAllRevokedTokens().then(() => done(), done)
})
AuthAPI.removeAllRevokedTokens().then(() => done(), done);
});
it("creates tokens with the right scopes", done => {
const scopes = {
@ -11,29 +11,29 @@ describe("Auth API", () => {
add: true,
remove: true
}
}
};
AuthAPI.createToken(scopes).then(token => {
AuthAPI.verifyToken(token).then(payload => {
expect(payload.jti).toEqual(expect.any(String))
expect(payload.iss).toEqual(expect.any(String))
expect(payload.iat).toEqual(expect.any(Number))
expect(payload.scopes).toMatchObject(scopes)
done()
})
})
})
expect(payload.jti).toEqual(expect.any(String));
expect(payload.iss).toEqual(expect.any(String));
expect(payload.iat).toEqual(expect.any(Number));
expect(payload.scopes).toMatchObject(scopes);
done();
});
});
});
it("refuses to verify revoked tokens", done => {
const scopes = {}
const scopes = {};
AuthAPI.createToken(scopes).then(token => {
AuthAPI.revokeToken(token).then(() => {
AuthAPI.verifyToken(token).then(payload => {
expect(payload).toBe(null)
done()
})
})
})
})
})
expect(payload).toBe(null);
done();
});
});
});
});
});

View File

@ -1,24 +1,24 @@
const BlacklistAPI = require("../BlacklistAPI")
const BlacklistAPI = require("../BlacklistAPI");
describe("Blacklist API", () => {
beforeEach(done => {
BlacklistAPI.removeAllPackages().then(() => done(), done)
})
BlacklistAPI.removeAllPackages().then(() => done(), done);
});
it("adds and removes packages to/from the blacklist", done => {
const packageName = "bad-package"
const packageName = "bad-package";
BlacklistAPI.addPackage(packageName).then(() => {
BlacklistAPI.getPackages().then(packageNames => {
expect(packageNames).toEqual([packageName])
expect(packageNames).toEqual([packageName]);
BlacklistAPI.removePackage(packageName).then(() => {
BlacklistAPI.getPackages().then(packageNames => {
expect(packageNames).toEqual([])
done()
})
})
})
})
})
})
expect(packageNames).toEqual([]);
done();
});
});
});
});
});
});

View File

@ -1,66 +1,66 @@
const request = require("supertest")
const createServer = require("../createServer")
const clearBlacklist = require("./utils/clearBlacklist")
const withBlacklist = require("./utils/withBlacklist")
const withRevokedToken = require("./utils/withRevokedToken")
const withToken = require("./utils/withToken")
const request = require("supertest");
const createServer = require("../createServer");
const clearBlacklist = require("./utils/clearBlacklist");
const withBlacklist = require("./utils/withBlacklist");
const withRevokedToken = require("./utils/withRevokedToken");
const withToken = require("./utils/withToken");
describe("The server", () => {
let server
let server;
beforeEach(() => {
server = createServer()
})
server = createServer();
});
it("rejects invalid package names", done => {
request(server)
.get("/_invalid/index.js")
.end((err, res) => {
expect(res.statusCode).toBe(403)
done()
})
})
expect(res.statusCode).toBe(403);
done();
});
});
it("redirects invalid query params", done => {
request(server)
.get("/react?main=index&invalid")
.end((err, res) => {
expect(res.statusCode).toBe(302)
expect(res.headers.location).toBe("/react?main=index")
done()
})
})
expect(res.statusCode).toBe(302);
expect(res.headers.location).toBe("/react?main=index");
done();
});
});
it("redirects /_meta to ?meta", done => {
request(server)
.get("/_meta/react?main=index")
.end((err, res) => {
expect(res.statusCode).toBe(302)
expect(res.headers.location).toBe("/react?main=index&meta")
done()
})
})
expect(res.statusCode).toBe(302);
expect(res.headers.location).toBe("/react?main=index&meta");
done();
});
});
it("does not serve blacklisted packages", done => {
withBlacklist(["bad-package"], () => {
request(server)
.get("/bad-package/index.js")
.end((err, res) => {
expect(res.statusCode).toBe(403)
done()
})
})
})
expect(res.statusCode).toBe(403);
done();
});
});
});
describe("POST /_auth", () => {
it("creates a new auth token", done => {
request(server)
.post("/_auth")
.end((err, res) => {
expect(res.body).toHaveProperty("token")
done()
})
})
})
expect(res.body).toHaveProperty("token");
done();
});
});
});
describe("GET /_auth", () => {
describe("with no auth", () => {
@ -68,12 +68,12 @@ describe("The server", () => {
request(server)
.get("/_auth")
.end((err, res) => {
expect(res.body).toHaveProperty("auth")
expect(res.body.auth).toBe(null)
done()
})
})
})
expect(res.body).toHaveProperty("auth");
expect(res.body.auth).toBe(null);
done();
});
});
});
describe("with a revoked auth token", () => {
it("echoes back null", done => {
@ -81,13 +81,13 @@ describe("The server", () => {
request(server)
.get("/_auth?token=" + token)
.end((err, res) => {
expect(res.body).toHaveProperty("auth")
expect(res.body.auth).toBe(null)
done()
})
})
})
})
expect(res.body).toHaveProperty("auth");
expect(res.body.auth).toBe(null);
done();
});
});
});
});
describe("with a valid auth token", () => {
it("echoes back the auth payload", done => {
@ -95,39 +95,39 @@ describe("The server", () => {
request(server)
.get("/_auth?token=" + token)
.end((err, res) => {
expect(res.body).toHaveProperty("auth")
expect(typeof res.body.auth).toBe("object")
done()
})
})
})
})
})
expect(res.body).toHaveProperty("auth");
expect(typeof res.body.auth).toBe("object");
done();
});
});
});
});
});
describe("GET /_publicKey", () => {
it("echoes the public key", done => {
request(server)
.get("/_publicKey")
.end((err, res) => {
expect(res.text).toMatch(/PUBLIC KEY/)
done()
})
})
})
expect(res.text).toMatch(/PUBLIC KEY/);
done();
});
});
});
describe("POST /_blacklist", () => {
afterEach(clearBlacklist)
afterEach(clearBlacklist);
describe("with no auth", () => {
it("is forbidden", done => {
request(server)
.post("/_blacklist")
.end((err, res) => {
expect(res.statusCode).toBe(403)
done()
})
})
})
expect(res.statusCode).toBe(403);
done();
});
});
});
describe('with the "blacklist.add" scope', () => {
it("can add to the blacklist", done => {
@ -136,15 +136,17 @@ describe("The server", () => {
.post("/_blacklist")
.send({ token, packageName: "bad-package" })
.end((err, res) => {
expect(res.statusCode).toBe(200)
expect(res.headers["content-location"]).toEqual("/_blacklist/bad-package")
expect(res.body.ok).toBe(true)
done()
})
})
})
})
})
expect(res.statusCode).toBe(200);
expect(res.headers["content-location"]).toEqual(
"/_blacklist/bad-package"
);
expect(res.body.ok).toBe(true);
done();
});
});
});
});
});
describe("GET /_blacklist", () => {
describe("with no auth", () => {
@ -152,11 +154,11 @@ describe("The server", () => {
request(server)
.get("/_blacklist")
.end((err, res) => {
expect(res.statusCode).toBe(403)
done()
})
})
})
expect(res.statusCode).toBe(403);
done();
});
});
});
describe('with the "blacklist.read" scope', () => {
it("can read the blacklist", done => {
@ -164,13 +166,13 @@ describe("The server", () => {
request(server)
.get("/_blacklist?token=" + token)
.end((err, res) => {
expect(res.statusCode).toBe(200)
done()
})
})
})
})
})
expect(res.statusCode).toBe(200);
done();
});
});
});
});
});
describe("DELETE /_blacklist/:packageName", () => {
describe("with no auth", () => {
@ -178,11 +180,11 @@ describe("The server", () => {
request(server)
.delete("/_blacklist/bad-package")
.end((err, res) => {
expect(res.statusCode).toBe(403)
done()
})
})
})
expect(res.statusCode).toBe(403);
done();
});
});
});
describe('with the "blacklist.remove" scope', () => {
it("can remove a package from the blacklist", done => {
@ -191,12 +193,12 @@ describe("The server", () => {
.delete("/_blacklist/bad-package")
.send({ token })
.end((err, res) => {
expect(res.statusCode).toBe(200)
expect(res.body.ok).toBe(true)
done()
})
})
})
expect(res.statusCode).toBe(200);
expect(res.body.ok).toBe(true);
done();
});
});
});
it("can remove a scoped package from the blacklist", done => {
withToken({ blacklist: { remove: true } }, token => {
@ -204,12 +206,12 @@ describe("The server", () => {
.delete("/_blacklist/@scope/bad-package")
.send({ token })
.end((err, res) => {
expect(res.statusCode).toBe(200)
expect(res.body.ok).toBe(true)
done()
})
})
})
})
})
})
expect(res.statusCode).toBe(200);
expect(res.body.ok).toBe(true);
done();
});
});
});
});
});
});

View File

@ -1,7 +1,7 @@
const BlacklistAPI = require("../../BlacklistAPI")
const BlacklistAPI = require("../../BlacklistAPI");
function clearBlacklist(done) {
BlacklistAPI.removeAllPackages().then(done, done)
BlacklistAPI.removeAllPackages().then(done, done);
}
module.exports = clearBlacklist
module.exports = clearBlacklist;

View File

@ -1,7 +1,7 @@
const BlacklistAPI = require("../../BlacklistAPI")
const BlacklistAPI = require("../../BlacklistAPI");
function withBlacklist(blacklist, callback) {
return Promise.all(blacklist.map(BlacklistAPI.addPackage)).then(callback)
return Promise.all(blacklist.map(BlacklistAPI.addPackage)).then(callback);
}
module.exports = withBlacklist
module.exports = withBlacklist;

View File

@ -1,12 +1,12 @@
const withToken = require("./withToken")
const AuthAPI = require("../../AuthAPI")
const withToken = require("./withToken");
const AuthAPI = require("../../AuthAPI");
function withRevokedToken(scopes, callback) {
withToken(scopes, token => {
AuthAPI.revokeToken(token).then(() => {
callback(token)
})
})
callback(token);
});
});
}
module.exports = withRevokedToken
module.exports = withRevokedToken;

View File

@ -1,7 +1,7 @@
const AuthAPI = require("../../AuthAPI")
const AuthAPI = require("../../AuthAPI");
function withToken(scopes, callback) {
AuthAPI.createToken(scopes).then(callback)
AuthAPI.createToken(scopes).then(callback);
}
module.exports = withToken
module.exports = withToken;

View File

@ -1,42 +1,48 @@
const validateNpmPackageName = require("validate-npm-package-name")
const BlacklistAPI = require("../BlacklistAPI")
const validateNpmPackageName = require("validate-npm-package-name");
const BlacklistAPI = require("../BlacklistAPI");
function addToBlacklist(req, res) {
const packageName = req.body.packageName
const packageName = req.body.packageName;
if (!packageName) {
return res.status(403).send({ error: 'Missing "packageName" body parameter' })
return res
.status(403)
.send({ error: 'Missing "packageName" body parameter' });
}
const nameErrors = validateNpmPackageName(packageName).errors
const nameErrors = validateNpmPackageName(packageName).errors;
// Disallow invalid package names.
if (nameErrors) {
const reason = nameErrors.join(", ")
const reason = nameErrors.join(", ");
return res.status(403).send({
error: `Invalid package name "${packageName}" (${reason})`
})
});
}
BlacklistAPI.addPackage(packageName).then(
added => {
if (added) {
const userId = req.user.jti
console.log(`Package "${packageName}" was added to the blacklist by ${userId}`)
const userId = req.user.jti;
console.log(
`Package "${packageName}" was added to the blacklist by ${userId}`
);
}
res.set({ "Content-Location": `/_blacklist/${packageName}` }).send({
ok: true,
message: `Package "${packageName}" was ${added ? "added to" : "already in"} the blacklist`
})
message: `Package "${packageName}" was ${
added ? "added to" : "already in"
} the blacklist`
});
},
error => {
console.error(error)
console.error(error);
res.status(500).send({
error: `Unable to add "${packageName}" to the blacklist`
})
});
}
)
);
}
module.exports = addToBlacklist
module.exports = addToBlacklist;

View File

@ -1,24 +1,24 @@
const AuthAPI = require("../AuthAPI")
const AuthAPI = require("../AuthAPI");
const defaultScopes = {
blacklist: {
read: true
}
}
};
function createAuth(req, res) {
AuthAPI.createToken(defaultScopes).then(
token => {
res.send({ token })
res.send({ token });
},
error => {
console.error(error)
console.error(error);
res.status(500).send({
error: "Unable to generate auth token"
})
});
}
)
);
}
module.exports = createAuth
module.exports = createAuth;

View File

@ -1,28 +1,32 @@
const BlacklistAPI = require("../BlacklistAPI")
const BlacklistAPI = require("../BlacklistAPI");
function removeFromBlacklist(req, res) {
const packageName = req.packageName
const packageName = req.packageName;
BlacklistAPI.removePackage(packageName).then(
removed => {
if (removed) {
const userId = req.user.jti
console.log(`Package "${packageName}" was removed from the blacklist by ${userId}`)
const userId = req.user.jti;
console.log(
`Package "${packageName}" was removed from the blacklist by ${userId}`
);
}
res.send({
ok: true,
message: `Package "${packageName}" was ${removed ? "removed from" : "not in"} the blacklist`
})
message: `Package "${packageName}" was ${
removed ? "removed from" : "not in"
} the blacklist`
});
},
error => {
console.error(error)
console.error(error);
res.status(500).send({
error: `Unable to remove "${packageName}" from the blacklist`
})
});
}
)
);
}
module.exports = removeFromBlacklist
module.exports = removeFromBlacklist;

View File

@ -1,5 +1,5 @@
function showAuth(req, res) {
res.send({ auth: req.user })
res.send({ auth: req.user });
}
module.exports = showAuth
module.exports = showAuth;

View File

@ -1,17 +1,17 @@
const BlacklistAPI = require("../BlacklistAPI")
const BlacklistAPI = require("../BlacklistAPI");
function showBlacklist(req, res) {
BlacklistAPI.getPackages().then(
blacklist => {
res.send({ blacklist })
res.send({ blacklist });
},
error => {
console.error(error)
console.error(error);
res.status(500).send({
error: "Unable to fetch blacklist"
})
});
}
)
);
}
module.exports = showBlacklist
module.exports = showBlacklist;

View File

@ -1,7 +1,7 @@
const AuthAPI = require("../AuthAPI")
const AuthAPI = require("../AuthAPI");
function showPublicKey(req, res) {
res.send({ publicKey: AuthAPI.getPublicKey() })
res.send({ publicKey: AuthAPI.getPublicKey() });
}
module.exports = showPublicKey
module.exports = showPublicKey;

View File

@ -1,42 +1,46 @@
const subDays = require("date-fns/sub_days")
const startOfDay = require("date-fns/start_of_day")
const startOfSecond = require("date-fns/start_of_second")
const StatsAPI = require("../StatsAPI")
const subDays = require("date-fns/sub_days");
const startOfDay = require("date-fns/start_of_day");
const startOfSecond = require("date-fns/start_of_second");
const StatsAPI = require("../StatsAPI");
function showStats(req, res) {
let since, until
let since, until;
switch (req.query.period) {
case "last-day":
until = startOfDay(new Date())
since = subDays(until, 1)
break
until = startOfDay(new Date());
since = subDays(until, 1);
break;
case "last-week":
until = startOfDay(new Date())
since = subDays(until, 7)
break
until = startOfDay(new Date());
since = subDays(until, 7);
break;
case "last-month":
until = startOfDay(new Date())
since = subDays(until, 30)
break
until = startOfDay(new Date());
since = subDays(until, 30);
break;
default:
until = req.query.until ? new Date(req.query.until) : startOfSecond(new Date())
since = new Date(req.query.since)
until = req.query.until
? new Date(req.query.until)
: startOfSecond(new Date());
since = new Date(req.query.since);
}
if (isNaN(since.getTime())) {
return res.status(403).send({ error: "?since is not a valid date" })
return res.status(403).send({ error: "?since is not a valid date" });
}
if (isNaN(until.getTime())) {
return res.status(403).send({ error: "?until is not a valid date" })
return res.status(403).send({ error: "?until is not a valid date" });
}
if (until <= since) {
return res.status(403).send({ error: "?until date must come after ?since date" })
return res
.status(403)
.send({ error: "?until date must come after ?since date" });
}
if (until >= new Date()) {
return res.status(403).send({ error: "?until must be a date in the past" })
return res.status(403).send({ error: "?until must be a date in the past" });
}
StatsAPI.getStats(since, until).then(
@ -46,13 +50,13 @@ function showStats(req, res) {
"Cache-Control": "public, max-age=60",
"Cache-Tag": "stats"
})
.send(stats)
.send(stats);
},
error => {
console.error(error)
res.status(500).send({ error: "Unable to fetch stats" })
console.error(error);
res.status(500).send({ error: "Unable to fetch stats" });
}
)
);
}
module.exports = showStats
module.exports = showStats;

View File

@ -1,11 +1,11 @@
const parseURL = require("url").parse
const startOfDay = require("date-fns/start_of_day")
const addDays = require("date-fns/add_days")
const parsePackageURL = require("./utils/parsePackageURL")
const CloudflareAPI = require("./CloudflareAPI")
const StatsAPI = require("./StatsAPI")
const parseURL = require("url").parse;
const startOfDay = require("date-fns/start_of_day");
const addDays = require("date-fns/add_days");
const parsePackageURL = require("./utils/parsePackageURL");
const CloudflareAPI = require("./CloudflareAPI");
const StatsAPI = require("./StatsAPI");
const db = require("./RedisClient")
const db = require("./RedisClient");
/**
* Domains we want to analyze.
@ -13,98 +13,119 @@ const db = require("./RedisClient")
const DomainNames = [
"unpkg.com"
//'npmcdn.com' // We don't have log data on npmcdn.com yet :/
]
];
/**
* The window of time to download in a single fetch.
*/
const LogWindowSeconds = 30
const LogWindowSeconds = 30;
function getSeconds(date) {
return Math.floor(date.getTime() / 1000)
return Math.floor(date.getTime() / 1000);
}
function stringifySeconds(seconds) {
return new Date(seconds * 1000).toISOString()
return new Date(seconds * 1000).toISOString();
}
function toSeconds(millis) {
return Math.floor(millis / 1000)
return Math.floor(millis / 1000);
}
const oneSecond = 1000
const oneMinute = oneSecond * 60
const oneHour = oneMinute * 60
const oneSecond = 1000;
const oneMinute = oneSecond * 60;
const oneHour = oneMinute * 60;
function computeCounters(stream) {
return new Promise((resolve, reject) => {
const counters = {}
const expireat = {}
const counters = {};
const expireat = {};
function incr(key, member, by, expiry) {
counters[key] = counters[key] || {}
counters[key][member] = (counters[key][member] || 0) + by
expireat[key] = expiry
counters[key] = counters[key] || {};
counters[key][member] = (counters[key][member] || 0) + by;
expireat[key] = expiry;
}
stream
.on("error", reject)
.on("data", function(entry) {
const date = new Date(Math.round(entry.timestamp / 1000000))
const date = new Date(Math.round(entry.timestamp / 1000000));
const nextDay = startOfDay(addDays(date, 1))
const sevenDaysLater = getSeconds(addDays(nextDay, 7))
const thirtyDaysLater = getSeconds(addDays(nextDay, 30))
const dayKey = StatsAPI.createDayKey(date)
const nextDay = startOfDay(addDays(date, 1));
const sevenDaysLater = getSeconds(addDays(nextDay, 7));
const thirtyDaysLater = getSeconds(addDays(nextDay, 30));
const dayKey = StatsAPI.createDayKey(date);
const clientRequest = entry.clientRequest
const edgeResponse = entry.edgeResponse
const clientRequest = entry.clientRequest;
const edgeResponse = entry.edgeResponse;
if (edgeResponse.status === 200) {
// Q: How many requests do we serve for a package per day?
// Q: How many bytes do we serve for a package per day?
const url = parsePackageURL(parseURL(clientRequest.uri).pathname)
const packageName = url && url.packageName
const url = parsePackageURL(parseURL(clientRequest.uri).pathname);
const packageName = url && url.packageName;
if (packageName) {
incr(`stats-packageRequests-${dayKey}`, packageName, 1, thirtyDaysLater)
incr(`stats-packageBytes-${dayKey}`, packageName, edgeResponse.bytes, thirtyDaysLater)
incr(
`stats-packageRequests-${dayKey}`,
packageName,
1,
thirtyDaysLater
);
incr(
`stats-packageBytes-${dayKey}`,
packageName,
edgeResponse.bytes,
thirtyDaysLater
);
}
}
// Q: How many requests per day do we receive via a protocol?
const protocol = clientRequest.httpProtocol
const protocol = clientRequest.httpProtocol;
if (protocol) incr(`stats-protocolRequests-${dayKey}`, protocol, 1, thirtyDaysLater)
if (protocol)
incr(
`stats-protocolRequests-${dayKey}`,
protocol,
1,
thirtyDaysLater
);
// Q: How many requests do we receive from a hostname per day?
// Q: How many bytes do we serve to a hostname per day?
const referer = clientRequest.referer
const hostname = referer && parseURL(referer).hostname
const referer = clientRequest.referer;
const hostname = referer && parseURL(referer).hostname;
if (hostname) {
incr(`stats-hostnameRequests-${dayKey}`, hostname, 1, sevenDaysLater)
incr(`stats-hostnameBytes-${dayKey}`, hostname, edgeResponse.bytes, sevenDaysLater)
incr(`stats-hostnameRequests-${dayKey}`, hostname, 1, sevenDaysLater);
incr(
`stats-hostnameBytes-${dayKey}`,
hostname,
edgeResponse.bytes,
sevenDaysLater
);
}
})
.on("end", function() {
resolve({ counters, expireat })
})
})
resolve({ counters, expireat });
});
});
}
function processLogs(stream) {
return computeCounters(stream).then(({ counters, expireat }) => {
Object.keys(counters).forEach(key => {
const values = counters[key]
const values = counters[key];
Object.keys(values).forEach(member => {
db.zincrby(key, values[member], member)
})
db.zincrby(key, values[member], member);
});
if (expireat[key]) db.expireat(key, expireat[key])
})
})
if (expireat[key]) db.expireat(key, expireat[key]);
});
});
}
function ingestLogs(zone, startSeconds, endSeconds) {
@ -114,62 +135,65 @@ function ingestLogs(zone, startSeconds, endSeconds) {
zone.name,
stringifySeconds(startSeconds),
stringifySeconds(endSeconds)
)
);
const startFetchTime = Date.now()
const startFetchTime = Date.now();
resolve(
CloudflareAPI.getLogs(zone.id, startSeconds, endSeconds).then(stream => {
const endFetchTime = Date.now()
const endFetchTime = Date.now();
console.log(
"info: Fetched %ds worth of logs for %s in %dms",
endSeconds - startSeconds,
zone.name,
endFetchTime - startFetchTime
)
);
const startProcessTime = Date.now()
const startProcessTime = Date.now();
return processLogs(stream).then(() => {
const endProcessTime = Date.now()
const endProcessTime = Date.now();
console.log(
"info: Processed %ds worth of logs for %s in %dms",
endSeconds - startSeconds,
zone.name,
endProcessTime - startProcessTime
)
})
);
});
})
)
})
);
});
}
function startZone(zone) {
const startSecondsKey = `ingestLogsWorker-nextStartSeconds-${zone.name.replace(".", "-")}`
const startSecondsKey = `ingestLogsWorker-nextStartSeconds-${zone.name.replace(
".",
"-"
)}`;
function takeATurn() {
db.get(startSecondsKey, function(error, value) {
let startSeconds = value && parseInt(value, 10)
let startSeconds = value && parseInt(value, 10);
const now = Date.now()
const now = Date.now();
// Cloudflare keeps logs around for 72 hours.
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
const minSeconds = toSeconds(now - oneHour * 72)
const minSeconds = toSeconds(now - oneHour * 72);
if (startSeconds == null) {
startSeconds = minSeconds
startSeconds = minSeconds;
} else if (startSeconds < minSeconds) {
console.warn(
"warning: Dropped logs for %s from %s to %s!",
zone.name,
stringifySeconds(startSeconds),
stringifySeconds(minSeconds)
)
);
startSeconds = minSeconds
startSeconds = minSeconds;
}
// The log for a request is typically available within thirty (30) minutes
@ -180,34 +204,34 @@ function startZone(zone) {
// set of logs. This will help ensure that any congestion in the log
// pipeline has passed and a full set of logs can be ingested.
// https://support.cloudflare.com/hc/en-us/articles/216672448-Enterprise-Log-Share-REST-API
const maxSeconds = toSeconds(now - oneMinute * 30)
const maxSeconds = toSeconds(now - oneMinute * 30);
if (startSeconds < maxSeconds) {
const endSeconds = startSeconds + LogWindowSeconds
const endSeconds = startSeconds + LogWindowSeconds;
ingestLogs(zone, startSeconds, endSeconds).then(
function() {
db.set(startSecondsKey, endSeconds)
setTimeout(takeATurn)
db.set(startSecondsKey, endSeconds);
setTimeout(takeATurn);
},
function(error) {
console.error(error.stack)
process.exit(1)
console.error(error.stack);
process.exit(1);
}
)
);
} else {
setTimeout(takeATurn, (startSeconds - maxSeconds) * 1000)
setTimeout(takeATurn, (startSeconds - maxSeconds) * 1000);
}
})
});
}
takeATurn()
takeATurn();
}
Promise.all(DomainNames.map(CloudflareAPI.getZones)).then(results => {
const zones = results.reduce((memo, zones) => {
return memo.concat(zones)
})
return memo.concat(zones);
});
zones.forEach(startZone)
})
zones.forEach(startZone);
});

View File

@ -3,37 +3,38 @@
* permissions. Otherwise rejects the request.
*/
function requireAuth(scope) {
let checkScopes
let checkScopes;
if (scope.includes(".")) {
const parts = scope.split(".")
checkScopes = scopes => parts.reduce((memo, part) => memo && memo[part], scopes) != null
const parts = scope.split(".");
checkScopes = scopes =>
parts.reduce((memo, part) => memo && memo[part], scopes) != null;
} else {
checkScopes = scopes => scopes[scope] != null
checkScopes = scopes => scopes[scope] != null;
}
return function(req, res, next) {
if (req.auth && req.auth.includes(scope)) {
return next() // Already auth'd
return next(); // Already auth'd
}
const user = req.user
const user = req.user;
if (!user) {
return res.status(403).send({ error: "Missing auth token" })
return res.status(403).send({ error: "Missing auth token" });
}
if (!user.scopes || !checkScopes(user.scopes)) {
return res.status(403).send({ error: "Insufficient scopes" })
return res.status(403).send({ error: "Insufficient scopes" });
}
if (req.auth) {
req.auth.push(scope)
req.auth.push(scope);
} else {
req.auth = [scope]
req.auth = [scope];
}
next()
}
next();
};
}
module.exports = requireAuth
module.exports = requireAuth;

View File

@ -1,41 +1,41 @@
const AuthAPI = require("../AuthAPI")
const AuthAPI = require("../AuthAPI");
const ReadMethods = { GET: true, HEAD: true }
const ReadMethods = { GET: true, HEAD: true };
/**
* Sets req.user from the payload in the auth token in the request.
*/
function userToken(req, res, next) {
if (req.user) {
return next()
return next();
}
const token = (ReadMethods[req.method] ? req.query : req.body).token
const token = (ReadMethods[req.method] ? req.query : req.body).token;
if (!token) {
req.user = null
return next()
req.user = null;
return next();
}
AuthAPI.verifyToken(token).then(
payload => {
req.user = payload
next()
req.user = payload;
next();
},
error => {
if (error.name === "JsonWebTokenError") {
res.status(403).send({
error: `Bad auth token: ${error.message}`
})
});
} else {
console.error(error)
console.error(error);
res.status(500).send({
error: "Unable to verify auth"
})
});
}
}
)
);
}
module.exports = userToken
module.exports = userToken;

View File

@ -1,29 +1,29 @@
const db = require("../../RedisClient")
const db = require("../../RedisClient");
function createCache(keyPrefix) {
function createKey(key) {
return keyPrefix + "-" + key
return keyPrefix + "-" + key;
}
function set(key, value, expiry, callback) {
db.setex(createKey(key), expiry, JSON.stringify(value), callback)
db.setex(createKey(key), expiry, JSON.stringify(value), callback);
}
function get(key, callback) {
db.get(createKey(key), function(error, value) {
callback(error, value && JSON.parse(value))
})
callback(error, value && JSON.parse(value));
});
}
function del(key, callback) {
db.del(createKey(key), callback)
db.del(createKey(key), callback);
}
return {
set,
get,
del
}
};
}
module.exports = createCache
module.exports = createCache;

View File

@ -1,24 +1,24 @@
function createMutex(doWork) {
const mutex = {}
const mutex = {};
return function(key, payload, callback) {
if (mutex[key]) {
mutex[key].push(callback)
mutex[key].push(callback);
} else {
mutex[key] = [
function() {
delete mutex[key]
delete mutex[key];
},
callback
]
];
doWork(payload, function(error, value) {
mutex[key].forEach(callback => {
callback(error, value)
})
})
callback(error, value);
});
});
}
}
};
}
module.exports = createMutex
module.exports = createMutex;

View File

@ -1,17 +1,17 @@
function createSearch(query) {
const params = []
const params = [];
Object.keys(query).forEach(param => {
if (query[param] === "") {
params.push(param) // Omit the trailing "=" from param=
params.push(param); // Omit the trailing "=" from param=
} else {
params.push(`${param}=${encodeURIComponent(query[param])}`)
params.push(`${param}=${encodeURIComponent(query[param])}`);
}
})
});
const search = params.join("&")
const search = params.join("&");
return search ? `?${search}` : ""
return search ? `?${search}` : "";
}
module.exports = createSearch
module.exports = createSearch;

View File

@ -1,15 +1,15 @@
require("isomorphic-fetch")
const fs = require("fs")
const path = require("path")
const tmpdir = require("os-tmpdir")
const gunzip = require("gunzip-maybe")
const mkdirp = require("mkdirp")
const tar = require("tar-fs")
const createMutex = require("./createMutex")
require("isomorphic-fetch");
const fs = require("fs");
const path = require("path");
const tmpdir = require("os-tmpdir");
const gunzip = require("gunzip-maybe");
const mkdirp = require("mkdirp");
const tar = require("tar-fs");
const createMutex = require("./createMutex");
function createTempPath(name, version) {
const normalName = name.replace(/\//g, "-")
return path.join(tmpdir(), `unpkg-${normalName}-${version}`)
const normalName = name.replace(/\//g, "-");
return path.join(tmpdir(), `unpkg-${normalName}-${version}`);
}
function stripNamePrefix(headers) {
@ -17,12 +17,12 @@ function stripNamePrefix(headers) {
// so we shorten that to just "index.js" here. A few packages use a
// prefix other than "package/". e.g. the firebase package uses the
// "firebase_npm/" prefix. So we just strip the first dir name.
headers.name = headers.name.replace(/^[^/]+\//, "")
return headers
headers.name = headers.name.replace(/^[^/]+\//, "");
return headers;
}
function ignoreSymlinks(file, headers) {
return headers.type === "link"
return headers.type === "link";
}
function extractResponse(response, outputDir) {
@ -31,26 +31,26 @@ function extractResponse(response, outputDir) {
readable: true, // All dirs/files should be readable.
map: stripNamePrefix,
ignore: ignoreSymlinks
})
});
response.body
.pipe(gunzip())
.pipe(extract)
.on("finish", resolve)
.on("error", reject)
})
.on("error", reject);
});
}
function fetchAndExtract(tarballURL, outputDir) {
console.log(`info: Fetching ${tarballURL} and extracting to ${outputDir}`)
console.log(`info: Fetching ${tarballURL} and extracting to ${outputDir}`);
return fetch(tarballURL).then(response => {
return extractResponse(response, outputDir)
})
return extractResponse(response, outputDir);
});
}
const fetchMutex = createMutex((payload, callback) => {
const { tarballURL, outputDir } = payload
const { tarballURL, outputDir } = payload;
fs.access(outputDir, function(error) {
if (error) {
@ -59,30 +59,30 @@ const fetchMutex = createMutex((payload, callback) => {
// fetched a package for the first time. Carry on!
mkdirp(outputDir, function(error) {
if (error) {
callback(error)
callback(error);
} else {
fetchAndExtract(tarballURL, outputDir).then(() => {
callback()
}, callback)
callback();
}, callback);
}
})
});
} else {
callback(error)
callback(error);
}
} else {
// Best case: we already have this package cached on disk!
callback()
callback();
}
})
})
});
});
function getPackage(packageConfig, callback) {
const tarballURL = packageConfig.dist.tarball
const outputDir = createTempPath(packageConfig.name, packageConfig.version)
const tarballURL = packageConfig.dist.tarball;
const outputDir = createTempPath(packageConfig.name, packageConfig.version);
fetchMutex(tarballURL, { tarballURL, outputDir }, function(error) {
callback(error, outputDir)
})
callback(error, outputDir);
});
}
module.exports = getPackage
module.exports = getPackage;

View File

@ -1,33 +1,34 @@
require("isomorphic-fetch")
const createCache = require("./createCache")
const createMutex = require("./createMutex")
require("isomorphic-fetch");
const createCache = require("./createCache");
const createMutex = require("./createMutex");
const RegistryURL = process.env.NPM_REGISTRY_URL || "https://registry.npmjs.org"
const RegistryURL =
process.env.NPM_REGISTRY_URL || "https://registry.npmjs.org";
const PackageInfoCache = createCache("packageInfo")
const PackageInfoCache = createCache("packageInfo");
function fetchPackageInfo(packageName) {
console.log(`info: Fetching package info for ${packageName}`)
console.log(`info: Fetching package info for ${packageName}`);
let encodedPackageName
let encodedPackageName;
if (packageName.charAt(0) === "@") {
encodedPackageName = `@${encodeURIComponent(packageName.substring(1))}`
encodedPackageName = `@${encodeURIComponent(packageName.substring(1))}`;
} else {
encodedPackageName = encodeURIComponent(packageName)
encodedPackageName = encodeURIComponent(packageName);
}
const url = `${RegistryURL}/${encodedPackageName}`
const url = `${RegistryURL}/${encodedPackageName}`;
return fetch(url, {
headers: {
Accept: "application/json"
}
}).then(res => {
return res.status === 404 ? null : res.json()
})
return res.status === 404 ? null : res.json();
});
}
const PackageNotFound = "PackageNotFound"
const PackageNotFound = "PackageNotFound";
// This mutex prevents multiple concurrent requests to
// the registry for the same package info.
@ -40,32 +41,32 @@ const fetchMutex = createMutex((packageName, callback) => {
// In the worst case, a brand new package's info will be
// available within 5 minutes.
PackageInfoCache.set(packageName, PackageNotFound, 300, function() {
callback(null, value)
})
callback(null, value);
});
} else {
// Cache valid package info for 1 minute.
PackageInfoCache.set(packageName, value, 60, function() {
callback(null, value)
})
callback(null, value);
});
}
},
function(error) {
// Do not cache errors.
PackageInfoCache.del(packageName, function() {
callback(error)
})
callback(error);
});
}
)
})
);
});
function getPackageInfo(packageName, callback) {
PackageInfoCache.get(packageName, function(error, value) {
if (error || value != null) {
callback(error, value === PackageNotFound ? null : value)
callback(error, value === PackageNotFound ? null : value);
} else {
fetchMutex(packageName, packageName, callback)
fetchMutex(packageName, packageName, callback);
}
})
});
}
module.exports = getPackageInfo
module.exports = getPackageInfo;

View File

@ -1,15 +1,15 @@
const db = require("../../RedisClient")
const db = require("../../RedisClient");
function incrementCounter(counter, key, by) {
return new Promise((resolve, reject) => {
db.hincrby(counter, key, by, (error, value) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(value)
resolve(value);
}
})
})
});
});
}
module.exports = incrementCounter
module.exports = incrementCounter;

View File

@ -1,25 +1,25 @@
const parsePackageURL = require("../utils/parsePackageURL")
const parsePackageURL = require("../utils/parsePackageURL");
/**
* Adds various properties to the request object to do with the
* package/file being requested.
*/
function validatePackageURL(req, res, next) {
const url = parsePackageURL(req.url)
const url = parsePackageURL(req.url);
if (url == null) {
return res.status(403).send({ error: `Invalid URL: ${req.url}` })
return res.status(403).send({ error: `Invalid URL: ${req.url}` });
}
req.packageName = url.packageName
req.packageVersion = url.packageVersion
req.packageSpec = `${url.packageName}@${url.packageVersion}`
req.pathname = url.pathname
req.filename = url.filename
req.search = url.search
req.query = url.query
req.packageName = url.packageName;
req.packageVersion = url.packageVersion;
req.packageSpec = `${url.packageName}@${url.packageVersion}`;
req.pathname = url.pathname;
req.filename = url.filename;
req.search = url.search;
req.query = url.query;
next()
next();
}
module.exports = validatePackageURL
module.exports = validatePackageURL;

View File

@ -1,35 +1,35 @@
const getFileContentType = require("../getFileContentType")
const getFileContentType = require("../getFileContentType");
it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => {
expect(getFileContentType("AUTHORS")).toBe("text/plain")
expect(getFileContentType("CHANGES")).toBe("text/plain")
expect(getFileContentType("LICENSE")).toBe("text/plain")
expect(getFileContentType("Makefile")).toBe("text/plain")
expect(getFileContentType("PATENTS")).toBe("text/plain")
expect(getFileContentType("README")).toBe("text/plain")
})
expect(getFileContentType("AUTHORS")).toBe("text/plain");
expect(getFileContentType("CHANGES")).toBe("text/plain");
expect(getFileContentType("LICENSE")).toBe("text/plain");
expect(getFileContentType("Makefile")).toBe("text/plain");
expect(getFileContentType("PATENTS")).toBe("text/plain");
expect(getFileContentType("README")).toBe("text/plain");
});
it("gets a content type of text/plain for .*rc files", () => {
expect(getFileContentType(".eslintrc")).toBe("text/plain")
expect(getFileContentType(".babelrc")).toBe("text/plain")
expect(getFileContentType(".anythingrc")).toBe("text/plain")
})
expect(getFileContentType(".eslintrc")).toBe("text/plain");
expect(getFileContentType(".babelrc")).toBe("text/plain");
expect(getFileContentType(".anythingrc")).toBe("text/plain");
});
it("gets a content type of text/plain for .git* files", () => {
expect(getFileContentType(".gitignore")).toBe("text/plain")
expect(getFileContentType(".gitanything")).toBe("text/plain")
})
expect(getFileContentType(".gitignore")).toBe("text/plain");
expect(getFileContentType(".gitanything")).toBe("text/plain");
});
it("gets a content type of text/plain for .*ignore files", () => {
expect(getFileContentType(".eslintignore")).toBe("text/plain")
expect(getFileContentType(".anythingignore")).toBe("text/plain")
})
expect(getFileContentType(".eslintignore")).toBe("text/plain");
expect(getFileContentType(".anythingignore")).toBe("text/plain");
});
it("gets a content type of text/plain for .ts files", () => {
expect(getFileContentType("app.ts")).toBe("text/plain")
expect(getFileContentType("app.d.ts")).toBe("text/plain")
})
expect(getFileContentType("app.ts")).toBe("text/plain");
expect(getFileContentType("app.d.ts")).toBe("text/plain");
});
it("gets a content type of text/plain for .flow files", () => {
expect(getFileContentType("app.js.flow")).toBe("text/plain")
})
expect(getFileContentType("app.js.flow")).toBe("text/plain");
});

View File

@ -1,4 +1,4 @@
const parsePackageURL = require("../parsePackageURL")
const parsePackageURL = require("../parsePackageURL");
describe("parsePackageURL", () => {
it("parses plain packages", () => {
@ -9,8 +9,8 @@ describe("parsePackageURL", () => {
packageName: "history",
packageVersion: "1.0.0",
filename: "/umd/history.min.js"
})
})
});
});
it("parses plain packages with a hyphen in the name", () => {
expect(parsePackageURL("/query-string@5.0.0/index.js")).toEqual({
@ -20,8 +20,8 @@ describe("parsePackageURL", () => {
packageName: "query-string",
packageVersion: "5.0.0",
filename: "/index.js"
})
})
});
});
it("parses plain packages with no version specified", () => {
expect(parsePackageURL("/query-string/index.js")).toEqual({
@ -31,8 +31,8 @@ describe("parsePackageURL", () => {
packageName: "query-string",
packageVersion: "latest",
filename: "/index.js"
})
})
});
});
it("parses plain packages with version spec", () => {
expect(parsePackageURL("/query-string@>=4.0.0/index.js")).toEqual({
@ -42,8 +42,8 @@ describe("parsePackageURL", () => {
packageName: "query-string",
packageVersion: ">=4.0.0",
filename: "/index.js"
})
})
});
});
it("parses scoped packages", () => {
expect(parsePackageURL("/@angular/router@4.3.3/src/index.d.ts")).toEqual({
@ -53,8 +53,8 @@ describe("parsePackageURL", () => {
packageName: "@angular/router",
packageVersion: "4.3.3",
filename: "/src/index.d.ts"
})
})
});
});
it("parses package names with a period in them", () => {
expect(parsePackageURL("/index.js")).toEqual({
@ -64,8 +64,8 @@ describe("parsePackageURL", () => {
packageName: "index.js",
packageVersion: "latest",
filename: ""
})
})
});
});
it("parses valid query parameters", () => {
expect(parsePackageURL("/history?main=browser")).toEqual({
@ -75,11 +75,11 @@ describe("parsePackageURL", () => {
packageName: "history",
packageVersion: "latest",
filename: ""
})
})
});
});
it("returns null for invalid pathnames", () => {
expect(parsePackageURL("history")).toBe(null)
expect(parsePackageURL("/.invalid")).toBe(null)
})
})
expect(parsePackageURL("history")).toBe(null);
expect(parsePackageURL("/.invalid")).toBe(null);
});
});

View File

@ -1,5 +1,5 @@
const babel = require("babel-core")
const unpkgRewrite = require("../unpkgRewriteBabelPlugin")
const babel = require("babel-core");
const unpkgRewrite = require("../unpkgRewriteBabelPlugin");
const testCases = [
{
@ -8,7 +8,8 @@ const testCases = [
},
{
before: "import router from '@angular/router';",
after: "import router from 'https://unpkg.com/@angular/router@4.3.5?module';"
after:
"import router from 'https://unpkg.com/@angular/router@4.3.5?module';"
},
{
before: "import map from 'lodash.map';",
@ -54,23 +55,23 @@ const testCases = [
before: "export var message = 'hello';",
after: "export var message = 'hello';"
}
]
];
const dependencies = {
react: "15.6.1",
"@angular/router": "4.3.5",
"lodash.map": "4.6.0",
pn: "1.0.0"
}
};
describe("Rewriting imports/exports", () => {
testCases.forEach(testCase => {
it(`successfully rewrites "${testCase.before}"`, () => {
const result = babel.transform(testCase.before, {
plugins: [unpkgRewrite(dependencies)]
})
});
expect(result.code).toEqual(testCase.after)
})
})
})
expect(result.code).toEqual(testCase.after);
});
});
});

View File

@ -1,9 +1,9 @@
function createPackageURL(packageName, version, pathname, search) {
let url = `/${packageName}`
if (version != null) url += `@${version}`
if (pathname) url += pathname
if (search) url += search
return url
let url = `/${packageName}`;
if (version != null) url += `@${version}`;
if (pathname) url += pathname;
if (search) url += search;
return url;
}
module.exports = createPackageURL
module.exports = createPackageURL;

View File

@ -1,13 +1,22 @@
const mime = require("mime")
const mime = require("mime");
mime.define({
"text/plain": ["authors", "changes", "license", "makefile", "patents", "readme", "ts", "flow"]
})
"text/plain": [
"authors",
"changes",
"license",
"makefile",
"patents",
"readme",
"ts",
"flow"
]
});
const TextFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore)$/i
const TextFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore)$/i;
function getFileContentType(file) {
return TextFiles.test(file) ? "text/plain" : mime.lookup(file)
return TextFiles.test(file) ? "text/plain" : mime.lookup(file);
}
module.exports = getFileContentType
module.exports = getFileContentType;

View File

@ -1,15 +1,15 @@
const fs = require("fs")
const fs = require("fs");
function getFileStats(file) {
return new Promise((resolve, reject) => {
fs.lstat(file, (error, stats) => {
if (error) {
reject(error)
reject(error);
} else {
resolve(stats)
resolve(stats);
}
})
})
});
});
}
module.exports = getFileStats
module.exports = getFileStats;

View File

@ -1,44 +1,51 @@
const fs = require("fs")
const path = require("path")
const SRIToolbox = require("sri-toolbox")
const getFileContentType = require("./getFileContentType")
const getFileStats = require("./getFileStats")
const getFileType = require("./getFileType")
const fs = require("fs");
const path = require("path");
const SRIToolbox = require("sri-toolbox");
const getFileContentType = require("./getFileContentType");
const getFileStats = require("./getFileStats");
const getFileType = require("./getFileType");
function getEntries(dir, file, maximumDepth) {
return new Promise((resolve, reject) => {
fs.readdir(path.join(dir, file), function(error, files) {
if (error) {
reject(error)
reject(error);
} else {
resolve(
Promise.all(files.map(f => getFileStats(path.join(dir, file, f)))).then(statsArray => {
Promise.all(
files.map(f => getFileStats(path.join(dir, file, f)))
).then(statsArray => {
return Promise.all(
statsArray.map((stats, index) =>
getMetadataRecursive(dir, path.join(file, files[index]), stats, maximumDepth - 1)
getMetadataRecursive(
dir,
path.join(file, files[index]),
stats,
maximumDepth - 1
)
)
)
);
})
)
);
}
})
})
});
});
}
function formatTime(time) {
return new Date(time).toISOString()
return new Date(time).toISOString();
}
function getIntegrity(file) {
return new Promise((resolve, reject) => {
fs.readFile(file, function(error, data) {
if (error) {
reject(error)
reject(error);
} else {
resolve(SRIToolbox.generate({ algorithms: ["sha384"] }, data))
resolve(SRIToolbox.generate({ algorithms: ["sha384"] }, data));
}
})
})
});
});
}
function getMetadataRecursive(dir, file, stats, maximumDepth) {
@ -48,27 +55,31 @@ function getMetadataRecursive(dir, file, stats, maximumDepth) {
path: file,
size: stats.size,
type: getFileType(stats)
}
};
if (stats.isFile()) {
return getIntegrity(path.join(dir, file)).then(integrity => {
metadata.integrity = integrity
return metadata
})
metadata.integrity = integrity;
return metadata;
});
}
if (!stats.isDirectory() || maximumDepth === 0) return Promise.resolve(metadata)
if (!stats.isDirectory() || maximumDepth === 0)
return Promise.resolve(metadata);
return getEntries(dir, file, maximumDepth).then(files => {
metadata.files = files
return metadata
})
metadata.files = files;
return metadata;
});
}
function getMetadata(baseDir, path, stats, maximumDepth, callback) {
getMetadataRecursive(baseDir, path, stats, maximumDepth).then(function(metadata) {
callback(null, metadata)
}, callback)
getMetadataRecursive(baseDir, path, stats, maximumDepth).then(function(
metadata
) {
callback(null, metadata);
},
callback);
}
module.exports = getMetadata
module.exports = getMetadata;

View File

@ -1,35 +1,35 @@
const url = require("url")
const validatePackageName = require("./validatePackageName")
const url = require("url");
const validatePackageName = require("./validatePackageName");
const URLFormat = /^\/((?:@[^/@]+\/)?[^/@]+)(?:@([^/]+))?(\/.*)?$/
const URLFormat = /^\/((?:@[^/@]+\/)?[^/@]+)(?:@([^/]+))?(\/.*)?$/;
function decodeParam(param) {
if (param) {
try {
return decodeURIComponent(param)
return decodeURIComponent(param);
} catch (error) {
// Ignore invalid params.
}
}
return ""
return "";
}
function parsePackageURL(packageURL) {
const { pathname, search, query } = url.parse(packageURL, true)
const { pathname, search, query } = url.parse(packageURL, true);
const match = URLFormat.exec(pathname)
const match = URLFormat.exec(pathname);
// Disallow invalid URL formats.
if (match == null) return null
if (match == null) return null;
const packageName = match[1]
const packageName = match[1];
// Disallow invalid npm package names.
if (!validatePackageName(packageName)) return null
if (!validatePackageName(packageName)) return null;
const packageVersion = decodeParam(match[2]) || "latest"
const filename = decodeParam(match[3])
const packageVersion = decodeParam(match[2]) || "latest";
const filename = decodeParam(match[3]);
return {
// If the URL is /@scope/name@version/file.js?main=browser:
@ -39,7 +39,7 @@ function parsePackageURL(packageURL) {
packageName, // @scope/name
packageVersion, // version
filename // /file.js
}
};
}
module.exports = parsePackageURL
module.exports = parsePackageURL;

View File

@ -1,9 +1,9 @@
const fs = require("fs")
const path = require("path")
const csso = require("csso")
const fs = require("fs");
const path = require("path");
const csso = require("csso");
function readCSS(...args) {
return csso.minify(fs.readFileSync(path.resolve(...args), "utf8")).css
return csso.minify(fs.readFileSync(path.resolve(...args), "utf8")).css;
}
module.exports = readCSS
module.exports = readCSS;

View File

@ -1,11 +1,13 @@
const React = require("react")
const ReactDOMServer = require("react-dom/server")
const React = require("react");
const ReactDOMServer = require("react-dom/server");
const doctype = "<!DOCTYPE html>"
const doctype = "<!DOCTYPE html>";
function renderPage(page, props) {
const html = ReactDOMServer.renderToStaticMarkup(React.createElement(page, props))
return doctype + html
const html = ReactDOMServer.renderToStaticMarkup(
React.createElement(page, props)
);
return doctype + html;
}
module.exports = renderPage
module.exports = renderPage;

View File

@ -1,7 +1,7 @@
const URL = require("whatwg-url")
const warning = require("warning")
const URL = require("whatwg-url");
const warning = require("warning");
const BareIdentifierFormat = /^((?:@[^\/]+\/)?[^\/]+)(\/.*)?$/
const BareIdentifierFormat = /^((?:@[^\/]+\/)?[^\/]+)(\/.*)?$/;
function unpkgRewriteBabelPlugin(dependencies = {}) {
return {
@ -9,36 +9,36 @@ function unpkgRewriteBabelPlugin(dependencies = {}) {
visitor: {
"ImportDeclaration|ExportNamedDeclaration|ExportAllDeclaration"(path) {
if (!path.node.source) return // probably a variable declaration
if (!path.node.source) return; // probably a variable declaration
if (
URL.parseURL(path.node.source.value) != null ||
path.node.source.value.substr(0, 2) === "//"
)
return // valid URL or URL w/o protocol, leave it alone
return; // valid URL or URL w/o protocol, leave it alone
if ([".", "/"].indexOf(path.node.source.value.charAt(0)) >= 0) {
// local path
path.node.source.value = `${path.node.source.value}?module`
path.node.source.value = `${path.node.source.value}?module`;
} else {
// "bare" identifier
const match = BareIdentifierFormat.exec(path.node.source.value)
const packageName = match[1]
const file = match[2] || ""
const match = BareIdentifierFormat.exec(path.node.source.value);
const packageName = match[1];
const file = match[2] || "";
warning(
dependencies[packageName],
'Missing version info for package "%s" in dependencies; falling back to "latest"',
packageName
)
);
const version = dependencies[packageName] || "latest"
const version = dependencies[packageName] || "latest";
path.node.source.value = `https://unpkg.com/${packageName}@${version}${file}?module`
path.node.source.value = `https://unpkg.com/${packageName}@${version}${file}?module`;
}
}
}
}
};
}
module.exports = unpkgRewriteBabelPlugin
module.exports = unpkgRewriteBabelPlugin;

View File

@ -1,7 +1,7 @@
const validateNpmPackageName = require("validate-npm-package-name")
const validateNpmPackageName = require("validate-npm-package-name");
function validatePackageName(packageName) {
return validateNpmPackageName(packageName).errors == null
return validateNpmPackageName(packageName).errors == null;
}
module.exports = validatePackageName
module.exports = validatePackageName;