Don't cache packages on the filesystem
Should help with transient errors reported in #86, #104, and #110
This commit is contained in:
parent
700bb109a1
commit
5969ecc6ef
|
@ -21,14 +21,11 @@
|
|||
"isomorphic-fetch": "^2.2.1",
|
||||
"jsonwebtoken": "^8.1.0",
|
||||
"mime": "^1.4.0",
|
||||
"mkdirp": "^0.5.1",
|
||||
"morgan": "^1.8.1",
|
||||
"ndjson": "^1.5.0",
|
||||
"node-forge": "^0.7.1",
|
||||
"os-tmpdir": "^1.0.2",
|
||||
"pretty-bytes": "^3",
|
||||
"prop-types": "^15.5.8",
|
||||
"proper-lockfile": "^3.0.2",
|
||||
"raven": "^2.6.3",
|
||||
"react": "^15.5.4",
|
||||
"react-dom": "^15.5.4",
|
||||
|
@ -36,8 +33,9 @@
|
|||
"react-router-dom": "^4.0.0",
|
||||
"redis": "^2.7.1",
|
||||
"semver": "^5.3.0",
|
||||
"sort-by": "^1.2.0",
|
||||
"sri-toolbox": "^0.2.0",
|
||||
"tar-fs": "^1.16.2",
|
||||
"tar-stream": "^1.6.1",
|
||||
"throng": "^4.0.0",
|
||||
"validate-npm-package-name": "^3.0.0",
|
||||
"warning": "^3.0.0",
|
||||
|
|
|
@ -5,51 +5,55 @@ const babel = require("babel-core");
|
|||
|
||||
const IndexPage = require("../components/IndexPage");
|
||||
const unpkgRewrite = require("../plugins/unpkgRewrite");
|
||||
const addLeadingSlash = require("../utils/addLeadingSlash");
|
||||
const renderPage = require("../utils/renderPage");
|
||||
const getMetadata = require("../utils/getMetadata");
|
||||
const getFileContentType = require("../utils/getFileContentType");
|
||||
const getEntries = require("../utils/getEntries");
|
||||
|
||||
/**
|
||||
* Automatically generate HTML pages that show package contents.
|
||||
*/
|
||||
const AutoIndex = !process.env.DISABLE_INDEX;
|
||||
|
||||
/**
|
||||
* Maximum recursion depth for meta listings.
|
||||
*/
|
||||
const MaximumDepth = 128;
|
||||
|
||||
function serveMetadata(req, res) {
|
||||
getMetadata(
|
||||
req.packageDir,
|
||||
req.filename,
|
||||
req.stats,
|
||||
MaximumDepth,
|
||||
(error, metadata) => {
|
||||
if (error) {
|
||||
console.error(error);
|
||||
|
||||
res
|
||||
.status(500)
|
||||
.type("text")
|
||||
.send(
|
||||
`Cannot generate metadata for ${req.packageSpec}${req.filename}`
|
||||
);
|
||||
} else {
|
||||
// Cache metadata for 1 year.
|
||||
res
|
||||
.set({
|
||||
"Cache-Control": "public, max-age=31536000",
|
||||
"Cache-Tag": "meta"
|
||||
})
|
||||
.send(metadata);
|
||||
}
|
||||
}
|
||||
);
|
||||
function getContentTypeHeader(type) {
|
||||
return type === "application/javascript" ? type + "; charset=utf-8" : type;
|
||||
}
|
||||
|
||||
function rewriteBareModuleIdentifiers(file, packageConfig, callback) {
|
||||
function getMetadata(entry, entries) {
|
||||
const metadata = Object.assign(
|
||||
{
|
||||
path: addLeadingSlash(entry.name)
|
||||
},
|
||||
entry.type === "file"
|
||||
? {
|
||||
type: entry.type,
|
||||
contentType: entry.contentType,
|
||||
integrity: entry.integrity,
|
||||
lastModified: entry.lastModified,
|
||||
size: entry.size
|
||||
}
|
||||
: {
|
||||
type: entry.type
|
||||
}
|
||||
);
|
||||
|
||||
if (entry.type === "directory") {
|
||||
metadata.files = Object.keys(entries)
|
||||
.filter(
|
||||
name =>
|
||||
name !== entry.name && path.dirname(name) === (entry.name || ".")
|
||||
)
|
||||
.map(name => getMetadata(entries[name], entries));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
function serveMetadata(req, res) {
|
||||
const metadata = getMetadata(req.entry, req.entries);
|
||||
|
||||
res
|
||||
.set({
|
||||
"Cache-Control": "public,max-age=31536000", // 1 year
|
||||
"Cache-Tag": "meta"
|
||||
})
|
||||
.send(metadata);
|
||||
}
|
||||
|
||||
function rewriteBareModuleIdentifiers(code, packageConfig) {
|
||||
const dependencies = Object.assign(
|
||||
{},
|
||||
packageConfig.peerDependencies,
|
||||
|
@ -64,119 +68,89 @@ function rewriteBareModuleIdentifiers(file, packageConfig, callback) {
|
|||
plugins: [unpkgRewrite(dependencies)]
|
||||
};
|
||||
|
||||
babel.transformFile(file, options, (error, result) => {
|
||||
callback(error, result && result.code);
|
||||
});
|
||||
return babel.transform(code, options).code;
|
||||
}
|
||||
|
||||
function serveJavaScriptModule(req, res) {
|
||||
if (getFileContentType(req.filename) !== "application/javascript") {
|
||||
if (req.entry.contentType !== "application/javascript") {
|
||||
return res
|
||||
.status(403)
|
||||
.type("text")
|
||||
.send("?module mode is available only for JavaScript files");
|
||||
}
|
||||
|
||||
const file = path.join(req.packageDir, req.filename);
|
||||
try {
|
||||
const code = rewriteBareModuleIdentifiers(
|
||||
req.entry.content.toString("utf8"),
|
||||
req.packageConfig
|
||||
);
|
||||
|
||||
rewriteBareModuleIdentifiers(file, req.packageConfig, (error, code) => {
|
||||
if (error) {
|
||||
console.error(error);
|
||||
res
|
||||
.set({
|
||||
"Content-Length": Buffer.byteLength(code),
|
||||
"Content-Type": getContentTypeHeader(req.entry.contentType),
|
||||
"Cache-Control": "public,max-age=31536000", // 1 year
|
||||
ETag: etag(code),
|
||||
"Cache-Tag": "file,js-file,js-module"
|
||||
})
|
||||
.send(code);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
|
||||
const errorName = error.constructor.name;
|
||||
const errorMessage = error.message.replace(
|
||||
/^.*?\/unpkg-.+?\//,
|
||||
`/${req.packageSpec}/`
|
||||
const errorName = error.constructor.name;
|
||||
const errorMessage = error.message.replace(
|
||||
/^.*?\/unpkg-.+?\//,
|
||||
`/${req.packageSpec}/`
|
||||
);
|
||||
const codeFrame = error.codeFrame;
|
||||
const debugInfo = `${errorName}: ${errorMessage}\n\n${codeFrame}`;
|
||||
|
||||
res
|
||||
.status(500)
|
||||
.type("text")
|
||||
.send(
|
||||
`Cannot generate module for ${req.packageSpec}${
|
||||
req.filename
|
||||
}\n\n${debugInfo}`
|
||||
);
|
||||
const codeFrame = error.codeFrame;
|
||||
const debugInfo = `${errorName}: ${errorMessage}\n\n${codeFrame}`;
|
||||
|
||||
res
|
||||
.status(500)
|
||||
.type("text")
|
||||
.send(
|
||||
`Cannot generate module for ${req.packageSpec}${
|
||||
req.filename
|
||||
}\n\n${debugInfo}`
|
||||
);
|
||||
} else {
|
||||
// Cache modules for 1 year.
|
||||
res
|
||||
.set({
|
||||
"Content-Type": "application/javascript; charset=utf-8",
|
||||
"Content-Length": Buffer.byteLength(code),
|
||||
"Cache-Control": "public, max-age=31536000",
|
||||
"Cache-Tag": "file,js-file,js-module"
|
||||
})
|
||||
.send(code);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function serveStaticFile(req, res) {
|
||||
const tags = ["file"];
|
||||
|
||||
const ext = path.extname(req.filename).substr(1);
|
||||
const ext = path.extname(req.entry.name).substr(1);
|
||||
if (ext) {
|
||||
tags.push(`${ext}-file`);
|
||||
}
|
||||
|
||||
let contentType = getFileContentType(req.filename);
|
||||
if (contentType === "application/javascript") {
|
||||
contentType += "; charset=utf-8";
|
||||
}
|
||||
|
||||
// Cache files for 1 year.
|
||||
res.set({
|
||||
"Content-Type": contentType,
|
||||
"Content-Length": req.stats.size,
|
||||
"Cache-Control": "public, max-age=31536000",
|
||||
"Last-Modified": req.stats.mtime.toUTCString(),
|
||||
ETag: etag(req.stats),
|
||||
"Cache-Tag": tags.join(",")
|
||||
});
|
||||
|
||||
const file = path.join(req.packageDir, req.filename);
|
||||
const stream = fs.createReadStream(file);
|
||||
|
||||
stream.on("error", error => {
|
||||
console.error(`Cannot send file ${req.packageSpec}${req.filename}`);
|
||||
console.error(error);
|
||||
res.sendStatus(500);
|
||||
});
|
||||
|
||||
stream.pipe(res);
|
||||
res
|
||||
.set({
|
||||
"Content-Length": req.entry.size,
|
||||
"Content-Type": getContentTypeHeader(req.entry.contentType),
|
||||
"Cache-Control": "public,max-age=31536000", // 1 year
|
||||
"Last-Modified": req.entry.lastModified,
|
||||
ETag: etag(req.entry.content),
|
||||
"Cache-Tag": tags.join(",")
|
||||
})
|
||||
.send(req.entry.content);
|
||||
}
|
||||
|
||||
function serveIndex(req, res) {
|
||||
const dir = path.join(req.packageDir, req.filename);
|
||||
const html = renderPage(IndexPage, {
|
||||
packageInfo: req.packageInfo,
|
||||
version: req.packageVersion,
|
||||
filename: req.filename,
|
||||
entries: req.entries,
|
||||
entry: req.entry
|
||||
});
|
||||
|
||||
getEntries(dir).then(
|
||||
entries => {
|
||||
const html = renderPage(IndexPage, {
|
||||
packageInfo: req.packageInfo,
|
||||
version: req.packageVersion,
|
||||
dir: req.filename,
|
||||
entries
|
||||
});
|
||||
|
||||
// Cache HTML directory listings for 1 minute.
|
||||
res
|
||||
.set({
|
||||
"Cache-Control": "public, max-age=60",
|
||||
"Cache-Tag": "index"
|
||||
})
|
||||
.send(html);
|
||||
},
|
||||
error => {
|
||||
console.error(error);
|
||||
|
||||
res
|
||||
.status(500)
|
||||
.type("text")
|
||||
.send(`Cannot read entries for ${req.packageSpec}${req.filename}`);
|
||||
}
|
||||
);
|
||||
res
|
||||
.set({
|
||||
"Cache-Control": "public,max-age=60", // 1 minute
|
||||
"Cache-Tag": "index"
|
||||
})
|
||||
.send(html);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -184,21 +158,18 @@ function serveIndex(req, res) {
|
|||
*/
|
||||
function serveFile(req, res) {
|
||||
if (req.query.meta != null) {
|
||||
serveMetadata(req, res);
|
||||
} else if (req.stats.isFile()) {
|
||||
if (req.query.module != null) {
|
||||
serveJavaScriptModule(req, res);
|
||||
} else {
|
||||
serveStaticFile(req, res);
|
||||
}
|
||||
} else if (req.stats.isDirectory() && AutoIndex) {
|
||||
serveIndex(req, res);
|
||||
} else {
|
||||
res
|
||||
.status(403)
|
||||
.type("text")
|
||||
.send(`Cannot serve ${req.packageSpec}${req.filename}; it's not a file`);
|
||||
return serveMetadata(req, res);
|
||||
}
|
||||
|
||||
if (req.entry.type === "directory") {
|
||||
return serveIndex(req, res);
|
||||
}
|
||||
|
||||
if (req.query.module != null) {
|
||||
return serveJavaScriptModule(req, res);
|
||||
}
|
||||
|
||||
serveStaticFile(req, res);
|
||||
}
|
||||
|
||||
module.exports = serveFile;
|
||||
|
|
|
@ -1,32 +1,26 @@
|
|||
const path = require("path");
|
||||
const formatBytes = require("pretty-bytes");
|
||||
const sortBy = require("sort-by");
|
||||
|
||||
const getFileContentType = require("../utils/getFileContentType");
|
||||
const cloneElement = require("./utils/cloneElement");
|
||||
const e = require("./utils/createElement");
|
||||
|
||||
function formatTime(time) {
|
||||
return new Date(time).toISOString();
|
||||
function stripLeadingSegment(name) {
|
||||
return name.replace(/^[^\/]+\//, "");
|
||||
}
|
||||
|
||||
function DirectoryListing({ dir, entries }) {
|
||||
const rows = entries.map(({ file, stats }, index) => {
|
||||
const isDir = stats.isDirectory();
|
||||
const href = file + (isDir ? "/" : "");
|
||||
function getValues(object) {
|
||||
return Object.keys(object).map(key => object[key]);
|
||||
}
|
||||
|
||||
return e(
|
||||
"tr",
|
||||
{ key: file, className: index % 2 ? "odd" : "even" },
|
||||
e("td", null, e("a", { title: file, href }, file)),
|
||||
e("td", null, isDir ? "-" : getFileContentType(file)),
|
||||
e("td", null, isDir ? "-" : formatBytes(stats.size)),
|
||||
e("td", null, isDir ? "-" : formatTime(stats.mtime))
|
||||
);
|
||||
});
|
||||
function DirectoryListing({ filename, entry, entries }) {
|
||||
const rows = [];
|
||||
|
||||
if (dir !== "/") {
|
||||
rows.unshift(
|
||||
if (filename !== "/") {
|
||||
rows.push(
|
||||
e(
|
||||
"tr",
|
||||
{ key: "..", className: "odd" },
|
||||
{ key: ".." },
|
||||
e("td", null, e("a", { title: "Parent directory", href: "../" }, "..")),
|
||||
e("td", null, "-"),
|
||||
e("td", null, "-"),
|
||||
|
@ -35,6 +29,48 @@ function DirectoryListing({ dir, entries }) {
|
|||
);
|
||||
}
|
||||
|
||||
const matchingEntries = getValues(entries).filter(
|
||||
({ name }) =>
|
||||
entry.name !== name && path.dirname(name) === (entry.name || ".")
|
||||
);
|
||||
|
||||
matchingEntries
|
||||
.filter(({ type }) => type === "directory")
|
||||
.sort(sortBy("name"))
|
||||
.forEach(({ name }) => {
|
||||
const relName = stripLeadingSegment(name);
|
||||
const href = relName + "/";
|
||||
|
||||
rows.push(
|
||||
e(
|
||||
"tr",
|
||||
{ key: name },
|
||||
e("td", null, e("a", { title: relName, href }, href)),
|
||||
e("td", null, "-"),
|
||||
e("td", null, "-"),
|
||||
e("td", null, "-")
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
matchingEntries
|
||||
.filter(({ type }) => type === "file")
|
||||
.sort(sortBy("name"))
|
||||
.forEach(({ name, size, contentType, lastModified }) => {
|
||||
const relName = stripLeadingSegment(name);
|
||||
|
||||
rows.push(
|
||||
e(
|
||||
"tr",
|
||||
{ key: name },
|
||||
e("td", null, e("a", { title: relName, href: relName }, relName)),
|
||||
e("td", null, contentType),
|
||||
e("td", null, formatBytes(size)),
|
||||
e("td", null, lastModified)
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
return e(
|
||||
"table",
|
||||
null,
|
||||
|
@ -50,7 +86,15 @@ function DirectoryListing({ dir, entries }) {
|
|||
e("th", null, "Last Modified")
|
||||
)
|
||||
),
|
||||
e("tbody", null, rows)
|
||||
e(
|
||||
"tbody",
|
||||
null,
|
||||
rows.map((row, index) =>
|
||||
cloneElement(row, {
|
||||
className: index % 2 ? "odd" : "even"
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ function byVersion(a, b) {
|
|||
return semver.lt(a, b) ? -1 : semver.gt(a, b) ? 1 : 0;
|
||||
}
|
||||
|
||||
function IndexPage({ packageInfo, version, dir, entries }) {
|
||||
function IndexPage({ packageInfo, version, filename, entry, entries }) {
|
||||
const versions = Object.keys(packageInfo.versions).sort(byVersion);
|
||||
const options = versions.map(v =>
|
||||
e("option", { key: v, value: v }, `${packageInfo.name}@${v}`)
|
||||
|
@ -31,7 +31,7 @@ function IndexPage({ packageInfo, version, dir, entries }) {
|
|||
"head",
|
||||
null,
|
||||
e("meta", { charSet: "utf-8" }),
|
||||
e("title", null, `Index of ${dir}`),
|
||||
e("title", null, `Index of ${filename}`),
|
||||
s(IndexPageStyle)
|
||||
),
|
||||
e(
|
||||
|
@ -45,10 +45,10 @@ function IndexPage({ packageInfo, version, dir, entries }) {
|
|||
{ className: "version-wrapper" },
|
||||
e("select", { id: "version", defaultValue: version }, options)
|
||||
),
|
||||
e("h1", null, `Index of ${dir}`),
|
||||
e("h1", null, `Index of ${filename}`),
|
||||
x(IndexPageScript),
|
||||
e("hr"),
|
||||
e(DirectoryListing, { dir, entries }),
|
||||
e(DirectoryListing, { filename, entry, entries }),
|
||||
e("hr"),
|
||||
e("address", null, `${packageInfo.name}@${version}`)
|
||||
)
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
const React = require("react");
|
||||
module.exports = React.cloneElement;
|
|
@ -1,10 +1,13 @@
|
|||
const semver = require("semver");
|
||||
|
||||
const addLeadingSlash = require("../utils/addLeadingSlash");
|
||||
const createPackageURL = require("../utils/createPackageURL");
|
||||
const getPackageInfo = require("../utils/getPackageInfo");
|
||||
const getPackage = require("../utils/getPackage");
|
||||
const incrementCounter = require("../utils/incrementCounter");
|
||||
|
||||
function tagRedirect(req, res) {
|
||||
const version = req.packageInfo["dist-tags"][req.packageVersion];
|
||||
|
||||
// Cache tag redirects for 1 minute.
|
||||
res
|
||||
.set({
|
||||
|
@ -13,12 +16,7 @@ function tagRedirect(req, res) {
|
|||
})
|
||||
.redirect(
|
||||
302,
|
||||
createPackageURL(
|
||||
req.packageName,
|
||||
req.packageInfo["dist-tags"][req.packageVersion],
|
||||
req.filename,
|
||||
req.search
|
||||
)
|
||||
createPackageURL(req.packageName, version, req.filename, req.search)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -47,9 +45,71 @@ function semverRedirect(req, res) {
|
|||
}
|
||||
}
|
||||
|
||||
function filenameRedirect(req, res) {
|
||||
let filename;
|
||||
if (req.query.module != null) {
|
||||
// See https://github.com/rollup/rollup/wiki/pkg.module
|
||||
filename =
|
||||
req.packageConfig.module ||
|
||||
req.packageConfig["jsnext:main"] ||
|
||||
"/index.js";
|
||||
} else if (
|
||||
req.query.main &&
|
||||
req.packageConfig[req.query.main] &&
|
||||
typeof req.packageConfig[req.query.main] === "string"
|
||||
) {
|
||||
// Deprecated, see #63
|
||||
filename = req.packageConfig[req.query.main];
|
||||
|
||||
// Count which packages are using this so we can warn them when we
|
||||
// remove this functionality.
|
||||
incrementCounter(
|
||||
"package-json-custom-main",
|
||||
req.packageSpec + "?main=" + req.query.main,
|
||||
1
|
||||
);
|
||||
} else if (
|
||||
req.packageConfig.unpkg &&
|
||||
typeof req.packageConfig.unpkg === "string"
|
||||
) {
|
||||
filename = req.packageConfig.unpkg;
|
||||
} else if (
|
||||
req.packageConfig.browser &&
|
||||
typeof req.packageConfig.browser === "string"
|
||||
) {
|
||||
// Deprecated, see #63
|
||||
filename = req.packageConfig.browser;
|
||||
|
||||
// Count which packages are using this so we can warn them when we
|
||||
// remove this functionality.
|
||||
incrementCounter("package-json-browser-fallback", req.packageSpec, 1);
|
||||
} else {
|
||||
filename = req.packageConfig.main || "/index.js";
|
||||
}
|
||||
|
||||
// Redirect to the exact filename so relative imports
|
||||
// and URLs resolve correctly.
|
||||
// TODO: increase the max-age?
|
||||
res
|
||||
.set({
|
||||
"Cache-Control": "public,max-age=60",
|
||||
"Cache-Tag": "redirect,filename-redirect"
|
||||
})
|
||||
.redirect(
|
||||
302,
|
||||
createPackageURL(
|
||||
req.packageName,
|
||||
req.packageVersion,
|
||||
addLeadingSlash(filename),
|
||||
createSearch(req.query)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the package metadata and tarball from npm. Redirect to the exact
|
||||
* version if the request targets a tag or uses a semver version.
|
||||
* version if the request targets a tag or uses a semver version, or to the
|
||||
* exact filename if the request omits the filename.
|
||||
*/
|
||||
function fetchPackage(req, res, next) {
|
||||
getPackageInfo(req.packageName).then(
|
||||
|
@ -62,30 +122,21 @@ function fetchPackage(req, res, next) {
|
|||
}
|
||||
|
||||
req.packageInfo = packageInfo;
|
||||
req.packageConfig = req.packageInfo.versions[req.packageVersion];
|
||||
|
||||
if (req.packageVersion in req.packageInfo.versions) {
|
||||
// A valid request for a package we haven't downloaded yet.
|
||||
req.packageConfig = req.packageInfo.versions[req.packageVersion];
|
||||
|
||||
getPackage(req.packageConfig).then(
|
||||
outputDir => {
|
||||
req.packageDir = outputDir;
|
||||
next();
|
||||
},
|
||||
error => {
|
||||
console.error(error);
|
||||
|
||||
res
|
||||
.status(500)
|
||||
.type("text")
|
||||
.send(`Cannot fetch package ${req.packageSpec}`);
|
||||
}
|
||||
);
|
||||
} else if (req.packageVersion in req.packageInfo["dist-tags"]) {
|
||||
tagRedirect(req, res);
|
||||
} else {
|
||||
semverRedirect(req, res);
|
||||
if (!req.packageConfig) {
|
||||
if (req.packageVersion in req.packageInfo["dist-tags"]) {
|
||||
return tagRedirect(req, res);
|
||||
} else {
|
||||
return semverRedirect(req, res);
|
||||
}
|
||||
}
|
||||
|
||||
if (!req.filename) {
|
||||
return filenameRedirect(req, res);
|
||||
}
|
||||
|
||||
next();
|
||||
},
|
||||
error => {
|
||||
console.error(error);
|
||||
|
|
|
@ -1,166 +1,178 @@
|
|||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const fetch = require("isomorphic-fetch");
|
||||
const gunzip = require("gunzip-maybe");
|
||||
const tar = require("tar-stream");
|
||||
|
||||
const addLeadingSlash = require("../utils/addLeadingSlash");
|
||||
const createPackageURL = require("../utils/createPackageURL");
|
||||
const createSearch = require("../utils/createSearch");
|
||||
const incrementCounter = require("../utils/incrementCounter");
|
||||
const fetchArchive = require("../utils/fetchArchive");
|
||||
const getIntegrity = require("../utils/getIntegrity");
|
||||
const getContentType = require("../utils/getContentType");
|
||||
|
||||
/**
|
||||
* File extensions to look for when automatically resolving.
|
||||
*/
|
||||
const resolveExtensions = ["", ".js", ".json"];
|
||||
function indexRedirect(req, res, entry) {
|
||||
// Redirect to the index file so relative imports
|
||||
// resolve correctly.
|
||||
// TODO: increase the max-age?
|
||||
res
|
||||
.set({
|
||||
"Cache-Control": "public,max-age=60",
|
||||
"Cache-Tag": "redirect,index-redirect"
|
||||
})
|
||||
.redirect(
|
||||
302,
|
||||
createPackageURL(
|
||||
req.packageName,
|
||||
req.packageVersion,
|
||||
addLeadingSlash(entry.name),
|
||||
createSearch(req.query)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a path like "lib/file" into "lib/file.js" or "lib/file.json"
|
||||
* depending on which one is available, similar to require('lib/file').
|
||||
*/
|
||||
function resolveFile(base, useIndex, callback) {
|
||||
resolveExtensions.reduceRight((next, ext) => {
|
||||
const file = base + ext;
|
||||
function stripLeadingSegment(name) {
|
||||
return name.replace(/^[^\/]+\/?/, "");
|
||||
}
|
||||
|
||||
return () => {
|
||||
fs.stat(file, (error, stats) => {
|
||||
if (error) {
|
||||
if (error.code === "ENOENT" || error.code === "ENOTDIR") {
|
||||
next();
|
||||
} else {
|
||||
callback(error);
|
||||
}
|
||||
} else if (useIndex && stats.isDirectory()) {
|
||||
resolveFile(
|
||||
path.join(file, "index"),
|
||||
false,
|
||||
(error, indexFile, indexStats) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
} else if (indexFile) {
|
||||
callback(null, indexFile, indexStats);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
}
|
||||
);
|
||||
} else {
|
||||
callback(null, file, stats);
|
||||
function searchEntries(tarballStream, entryName, wantsHTML) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const entries = {};
|
||||
let foundEntry = null;
|
||||
|
||||
if (entryName === "") {
|
||||
foundEntry = entries[""] = { name: "", type: "directory" };
|
||||
}
|
||||
|
||||
tarballStream
|
||||
.on("error", reject)
|
||||
.on("finish", () => resolve({ entries, foundEntry }))
|
||||
.on("entry", (header, stream, next) => {
|
||||
const entry = {
|
||||
// Most packages have header names that look like `package/index.js`
|
||||
// so we shorten that to just `index.js` here. A few packages use a
|
||||
// prefix other than `package/`. e.g. the firebase package uses the
|
||||
// `firebase_npm/` prefix. So we just strip the first dir name.
|
||||
name: stripLeadingSegment(header.name),
|
||||
type: header.type
|
||||
};
|
||||
|
||||
// We are only interested in files that match the entryName.
|
||||
if (entry.type !== "file" || entry.name.indexOf(entryName) !== 0) {
|
||||
stream.resume();
|
||||
stream.on("end", next);
|
||||
return;
|
||||
}
|
||||
|
||||
entries[entry.name] = entry;
|
||||
|
||||
// Dynamically create "directory" entries for all directories
|
||||
// that are in this file's path. Some tarballs omit these entries
|
||||
// for some reason, so this is the brute force method.
|
||||
let dirname = path.dirname(entry.name);
|
||||
while (dirname !== ".") {
|
||||
const directoryEntry = { name: dirname, type: "directory" };
|
||||
|
||||
if (!entries[dirname]) {
|
||||
entries[dirname] = directoryEntry;
|
||||
|
||||
if (directoryEntry.name === entryName) {
|
||||
foundEntry = directoryEntry;
|
||||
}
|
||||
}
|
||||
|
||||
dirname = path.dirname(dirname);
|
||||
}
|
||||
|
||||
// Set the foundEntry variable if this entry name
|
||||
// matches exactly or if it's an index.html file
|
||||
// and the client wants HTML.
|
||||
if (
|
||||
entry.name === entryName ||
|
||||
(wantsHTML && entry.name === entryName + "/index.html")
|
||||
) {
|
||||
foundEntry = entry;
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
|
||||
stream.on("data", chunk => chunks.push(chunk));
|
||||
|
||||
stream.on("end", () => {
|
||||
const content = Buffer.concat(chunks);
|
||||
|
||||
// Set some extra properties for files that we will
|
||||
// need to serve them and for ?meta listings.
|
||||
entry.contentType = getContentType(entry.name);
|
||||
entry.integrity = getIntegrity(content);
|
||||
entry.lastModified = header.mtime.toUTCString();
|
||||
entry.size = content.length;
|
||||
|
||||
// Set the content only for the foundEntry and
|
||||
// discard the buffer for all others.
|
||||
if (entry === foundEntry) {
|
||||
entry.content = content;
|
||||
}
|
||||
|
||||
next();
|
||||
});
|
||||
});
|
||||
};
|
||||
}, callback)();
|
||||
});
|
||||
}
|
||||
|
||||
function getBasename(file) {
|
||||
return path.basename(file, path.extname(file));
|
||||
}
|
||||
const leadingSlash = /^\//;
|
||||
const trailingSlash = /\/$/;
|
||||
|
||||
/**
|
||||
* Find the file targeted by the request and get its stats. Redirect
|
||||
* inexact paths in ?module mode so relative imports resolve correctly.
|
||||
* Fetch and search the archive to try and find the requested file.
|
||||
* Redirect to the "index" file if a directory was requested.
|
||||
*/
|
||||
function findFile(req, res, next) {
|
||||
let filename = req.filename;
|
||||
let useIndex = true;
|
||||
fetchArchive(req.packageConfig).then(tarballStream => {
|
||||
const entryName = req.filename
|
||||
.replace(trailingSlash, "")
|
||||
.replace(leadingSlash, "");
|
||||
const wantsHTML = trailingSlash.test(req.filename);
|
||||
|
||||
if (req.query.module != null) {
|
||||
// They want an ES module.
|
||||
if (!filename) {
|
||||
// See https://github.com/rollup/rollup/wiki/pkg.module
|
||||
filename =
|
||||
req.packageConfig.module || req.packageConfig["jsnext:main"] || "/";
|
||||
}
|
||||
} else if (filename) {
|
||||
// They are requesting an explicit filename. Only try to find an
|
||||
// index.js if they are NOT requesting an index page.
|
||||
useIndex = filename.charAt(filename.length - 1) !== "/";
|
||||
} else if (
|
||||
req.query.main &&
|
||||
typeof req.packageConfig[req.query.main] === "string"
|
||||
) {
|
||||
// They specified a custom ?main field.
|
||||
// Deprecated, see https://github.com/unpkg/unpkg/issues/63
|
||||
filename = req.packageConfig[req.query.main];
|
||||
searchEntries(tarballStream, entryName, wantsHTML).then(
|
||||
({ entries, foundEntry }) => {
|
||||
if (!foundEntry) {
|
||||
return res
|
||||
.status(404)
|
||||
.type("text")
|
||||
.send(`Cannot find "${req.filename}" in ${req.packageSpec}`);
|
||||
}
|
||||
|
||||
// Count which packages are using this so we can warn them when we
|
||||
// remove this functionality.
|
||||
incrementCounter(
|
||||
"package-json-custom-main",
|
||||
req.packageSpec + "?main=" + req.query.main,
|
||||
1
|
||||
// If the foundEntry is a directory and there is no trailing slash
|
||||
// on the request path, we need to redirect to some "index" file
|
||||
// inside that directory. This is so our URLs work in a similar way
|
||||
// to require("lib") in node where it searches for `lib/index.js`
|
||||
// and `lib/index.json` when `lib` is a directory.
|
||||
if (foundEntry.type === "directory" && !wantsHTML) {
|
||||
const indexEntry =
|
||||
entries[path.join(entryName, "index.js")] ||
|
||||
entries[path.join(entryName, "index.json")];
|
||||
|
||||
if (indexEntry && indexEntry.type === "file") {
|
||||
return indexRedirect(req, res, indexEntry);
|
||||
} else {
|
||||
return res
|
||||
.status(404)
|
||||
.type("text")
|
||||
.send(
|
||||
`Cannot find an index in "${req.filename}" in ${
|
||||
req.packageSpec
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
req.entries = entries;
|
||||
req.entry = foundEntry;
|
||||
|
||||
next();
|
||||
}
|
||||
);
|
||||
} else if (typeof req.packageConfig.unpkg === "string") {
|
||||
// The "unpkg" field allows packages to explicitly declare the
|
||||
// file to serve at the bare URL.
|
||||
filename = req.packageConfig.unpkg;
|
||||
} else if (typeof req.packageConfig.browser === "string") {
|
||||
// Fall back to the "browser" field if declared (only support strings).
|
||||
// Deprecated, see https://github.com/unpkg/unpkg/issues/63
|
||||
filename = req.packageConfig.browser;
|
||||
|
||||
// Count which packages + versions are actually using this fallback
|
||||
// so we can warn them when we deprecate this functionality.
|
||||
incrementCounter("package-json-browser-fallback", req.packageSpec, 1);
|
||||
} else {
|
||||
// Fall back to "main" or / (same as npm).
|
||||
filename = req.packageConfig.main || "/";
|
||||
}
|
||||
|
||||
resolveFile(
|
||||
path.join(req.packageDir, filename),
|
||||
useIndex,
|
||||
(error, file, stats) => {
|
||||
if (error) console.error(error);
|
||||
|
||||
if (file == null) {
|
||||
return res
|
||||
.status(404)
|
||||
.type("text")
|
||||
.send(
|
||||
`Cannot find module "${filename}" in package ${req.packageSpec}`
|
||||
);
|
||||
}
|
||||
|
||||
filename = file.replace(req.packageDir, "");
|
||||
|
||||
if (req.query.main != null) {
|
||||
// Permanently redirect ?main requests to their exact files.
|
||||
// Deprecated, see https://github.com/unpkg/unpkg/issues/63
|
||||
delete req.query.main;
|
||||
|
||||
return res.redirect(
|
||||
301,
|
||||
createPackageURL(
|
||||
req.packageName,
|
||||
req.packageVersion,
|
||||
filename,
|
||||
createSearch(req.query)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (getBasename(req.filename) !== getBasename(filename)) {
|
||||
// Redirect to the exact file so relative imports resolve correctly.
|
||||
// Cache module redirects for 1 minute.
|
||||
return res
|
||||
.set({
|
||||
"Cache-Control": "public, max-age=60",
|
||||
"Cache-Tag": "redirect,module-redirect"
|
||||
})
|
||||
.redirect(
|
||||
302,
|
||||
createPackageURL(
|
||||
req.packageName,
|
||||
req.packageVersion,
|
||||
filename,
|
||||
createSearch(req.query)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
req.filename = filename;
|
||||
req.stats = stats;
|
||||
|
||||
next();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = findFile;
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
const getContentType = require("../getContentType");
|
||||
|
||||
it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => {
|
||||
expect(getContentType("AUTHORS")).toBe("text/plain");
|
||||
expect(getContentType("CHANGES")).toBe("text/plain");
|
||||
expect(getContentType("LICENSE")).toBe("text/plain");
|
||||
expect(getContentType("Makefile")).toBe("text/plain");
|
||||
expect(getContentType("PATENTS")).toBe("text/plain");
|
||||
expect(getContentType("README")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*rc files", () => {
|
||||
expect(getContentType(".eslintrc")).toBe("text/plain");
|
||||
expect(getContentType(".babelrc")).toBe("text/plain");
|
||||
expect(getContentType(".anythingrc")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .git* files", () => {
|
||||
expect(getContentType(".gitignore")).toBe("text/plain");
|
||||
expect(getContentType(".gitanything")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*ignore files", () => {
|
||||
expect(getContentType(".eslintignore")).toBe("text/plain");
|
||||
expect(getContentType(".anythingignore")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .ts files", () => {
|
||||
expect(getContentType("app.ts")).toBe("text/plain");
|
||||
expect(getContentType("app.d.ts")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .flow files", () => {
|
||||
expect(getContentType("app.js.flow")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .lock files", () => {
|
||||
expect(getContentType("yarn.lock")).toBe("text/plain");
|
||||
});
|
|
@ -1,35 +0,0 @@
|
|||
const getFileContentType = require("../getFileContentType");
|
||||
|
||||
it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => {
|
||||
expect(getFileContentType("AUTHORS")).toBe("text/plain");
|
||||
expect(getFileContentType("CHANGES")).toBe("text/plain");
|
||||
expect(getFileContentType("LICENSE")).toBe("text/plain");
|
||||
expect(getFileContentType("Makefile")).toBe("text/plain");
|
||||
expect(getFileContentType("PATENTS")).toBe("text/plain");
|
||||
expect(getFileContentType("README")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*rc files", () => {
|
||||
expect(getFileContentType(".eslintrc")).toBe("text/plain");
|
||||
expect(getFileContentType(".babelrc")).toBe("text/plain");
|
||||
expect(getFileContentType(".anythingrc")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .git* files", () => {
|
||||
expect(getFileContentType(".gitignore")).toBe("text/plain");
|
||||
expect(getFileContentType(".gitanything")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*ignore files", () => {
|
||||
expect(getFileContentType(".eslintignore")).toBe("text/plain");
|
||||
expect(getFileContentType(".anythingignore")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .ts files", () => {
|
||||
expect(getFileContentType("app.ts")).toBe("text/plain");
|
||||
expect(getFileContentType("app.d.ts")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .flow files", () => {
|
||||
expect(getFileContentType("app.js.flow")).toBe("text/plain");
|
||||
});
|
|
@ -0,0 +1,5 @@
|
|||
function addLeadingSlash(name) {
|
||||
return name.charAt(0) === "/" ? name : "/" + name;
|
||||
}
|
||||
|
||||
module.exports = addLeadingSlash;
|
|
@ -1,9 +0,0 @@
|
|||
const path = require("path");
|
||||
const tmpdir = require("os-tmpdir");
|
||||
|
||||
function createTempPath(name, version) {
|
||||
const hyphenName = name.replace(/\//g, "-");
|
||||
return path.join(tmpdir(), `unpkg-${hyphenName}-${version}`);
|
||||
}
|
||||
|
||||
module.exports = createTempPath;
|
|
@ -0,0 +1,13 @@
|
|||
const fetch = require("isomorphic-fetch");
|
||||
const gunzip = require("gunzip-maybe");
|
||||
const tar = require("tar-stream");
|
||||
|
||||
function fetchArchive(packageConfig) {
|
||||
const tarballURL = packageConfig.dist.tarball;
|
||||
|
||||
return fetch(tarballURL).then(res =>
|
||||
res.body.pipe(gunzip()).pipe(tar.extract())
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = fetchArchive;
|
|
@ -1,43 +0,0 @@
|
|||
const fetch = require("isomorphic-fetch");
|
||||
const gunzip = require("gunzip-maybe");
|
||||
const tar = require("tar-fs");
|
||||
|
||||
function stripNamePrefix(header) {
|
||||
// Most packages have header names that look like "package/index.js"
|
||||
// so we shorten that to just "index.js" here. A few packages use a
|
||||
// prefix other than "package/". e.g. the firebase package uses the
|
||||
// "firebase_npm/" prefix. So we just strip the first dir name.
|
||||
header.name = header.name.replace(/^[^/]+\//, "");
|
||||
return header;
|
||||
}
|
||||
|
||||
function ignoreLinks(file, header) {
|
||||
return (
|
||||
header.type === "link" ||
|
||||
header.type === "symlink" ||
|
||||
(header.type === "directory" && !header.name.includes("/")) // Empty directory, see #99
|
||||
);
|
||||
}
|
||||
|
||||
function extractResponse(response, outputDir) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const extract = tar.extract(outputDir, {
|
||||
readable: true, // All dirs/files should be readable.
|
||||
map: stripNamePrefix,
|
||||
ignore: ignoreLinks
|
||||
});
|
||||
|
||||
response.body
|
||||
.pipe(gunzip())
|
||||
.pipe(extract)
|
||||
.on("finish", resolve)
|
||||
.on("error", reject);
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPackage(tarballURL, outputDir) {
|
||||
console.log(`info: Fetching ${tarballURL} and extracting to ${outputDir}`);
|
||||
return fetch(tarballURL).then(res => extractResponse(res, outputDir));
|
||||
}
|
||||
|
||||
module.exports = fetchPackage;
|
|
@ -1,4 +1,4 @@
|
|||
require("isomorphic-fetch");
|
||||
const fetch = require("isomorphic-fetch");
|
||||
|
||||
const config = require("../config");
|
||||
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
const mime = require("mime");
|
||||
|
||||
mime.define({
|
||||
"text/plain": [
|
||||
"authors",
|
||||
"changes",
|
||||
"license",
|
||||
"makefile",
|
||||
"patents",
|
||||
"readme",
|
||||
"ts",
|
||||
"flow"
|
||||
]
|
||||
});
|
||||
|
||||
const textFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore|\.lock)$/i;
|
||||
|
||||
function getContentType(file) {
|
||||
return textFiles.test(file) ? "text/plain" : mime.lookup(file);
|
||||
}
|
||||
|
||||
module.exports = getContentType;
|
|
@ -1,26 +0,0 @@
|
|||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const getFileStats = require("./getFileStats");
|
||||
|
||||
function getEntries(dir) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(dir, function(error, files) {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(
|
||||
Promise.all(
|
||||
files.map(file => getFileStats(path.join(dir, file)))
|
||||
).then(statsArray => {
|
||||
return statsArray.map((stats, index) => {
|
||||
return { file: files[index], stats };
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getEntries;
|
|
@ -1,22 +0,0 @@
|
|||
const mime = require("mime");
|
||||
|
||||
mime.define({
|
||||
"text/plain": [
|
||||
"authors",
|
||||
"changes",
|
||||
"license",
|
||||
"makefile",
|
||||
"patents",
|
||||
"readme",
|
||||
"ts",
|
||||
"flow"
|
||||
]
|
||||
});
|
||||
|
||||
const TextFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore)$/i;
|
||||
|
||||
function getFileContentType(file) {
|
||||
return TextFiles.test(file) ? "text/plain" : mime.lookup(file);
|
||||
}
|
||||
|
||||
module.exports = getFileContentType;
|
|
@ -1,15 +0,0 @@
|
|||
const fs = require("fs");
|
||||
|
||||
function getFileStats(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.lstat(file, (error, stats) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(stats);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getFileStats;
|
|
@ -1,12 +0,0 @@
|
|||
function getFileType(stats) {
|
||||
if (stats.isFile()) return "file";
|
||||
if (stats.isDirectory()) return "directory";
|
||||
if (stats.isBlockDevice()) return "blockDevice";
|
||||
if (stats.isCharacterDevice()) return "characterDevice";
|
||||
if (stats.isSymbolicLink()) return "symlink";
|
||||
if (stats.isSocket()) return "socket";
|
||||
if (stats.isFIFO()) return "fifo";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
module.exports = getFileType;
|
|
@ -0,0 +1,7 @@
|
|||
const SRIToolbox = require("sri-toolbox");
|
||||
|
||||
function getIntegrity(data) {
|
||||
return SRIToolbox.generate({ algorithms: ["sha384"] }, data);
|
||||
}
|
||||
|
||||
module.exports = getIntegrity;
|
|
@ -1,83 +0,0 @@
|
|||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const SRIToolbox = require("sri-toolbox");
|
||||
|
||||
const getFileContentType = require("./getFileContentType");
|
||||
const getFileStats = require("./getFileStats");
|
||||
const getFileType = require("./getFileType");
|
||||
|
||||
function getEntries(dir, file, maximumDepth) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(path.join(dir, file), (error, files) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(
|
||||
Promise.all(
|
||||
files.map(f => getFileStats(path.join(dir, file, f)))
|
||||
).then(statsArray => {
|
||||
return Promise.all(
|
||||
statsArray.map((stats, index) =>
|
||||
getMetadataRecursive(
|
||||
dir,
|
||||
path.join(file, files[index]),
|
||||
stats,
|
||||
maximumDepth - 1
|
||||
)
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function formatTime(time) {
|
||||
return new Date(time).toISOString();
|
||||
}
|
||||
|
||||
function getIntegrity(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(file, (error, data) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(SRIToolbox.generate({ algorithms: ["sha384"] }, data));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getMetadataRecursive(dir, file, stats, maximumDepth) {
|
||||
const metadata = {
|
||||
lastModified: formatTime(stats.mtime),
|
||||
contentType: getFileContentType(file),
|
||||
path: file,
|
||||
size: stats.size,
|
||||
type: getFileType(stats)
|
||||
};
|
||||
|
||||
if (stats.isFile()) {
|
||||
return getIntegrity(path.join(dir, file)).then(integrity => {
|
||||
metadata.integrity = integrity;
|
||||
return metadata;
|
||||
});
|
||||
}
|
||||
|
||||
if (!stats.isDirectory() || maximumDepth === 0)
|
||||
return Promise.resolve(metadata);
|
||||
|
||||
return getEntries(dir, file, maximumDepth).then(files => {
|
||||
metadata.files = files;
|
||||
return metadata;
|
||||
});
|
||||
}
|
||||
|
||||
function getMetadata(baseDir, path, stats, maximumDepth, callback) {
|
||||
getMetadataRecursive(baseDir, path, stats, maximumDepth).then(metadata => {
|
||||
callback(null, metadata);
|
||||
}, callback);
|
||||
}
|
||||
|
||||
module.exports = getMetadata;
|
|
@ -1,78 +0,0 @@
|
|||
const fs = require("fs");
|
||||
const mkdirp = require("mkdirp");
|
||||
const lockfile = require("proper-lockfile");
|
||||
|
||||
const createMutex = require("./createMutex");
|
||||
const createTempPath = require("./createTempPath");
|
||||
const fetchPackage = require("./fetchPackage");
|
||||
|
||||
const fetchMutex = createMutex((packageConfig, callback) => {
|
||||
const tarballURL = packageConfig.dist.tarball;
|
||||
const outputDir = createTempPath(packageConfig.name, packageConfig.version);
|
||||
|
||||
fs.access(outputDir, error => {
|
||||
if (error) {
|
||||
if (error.code === "ENOENT" || error.code === "ENOTDIR") {
|
||||
// ENOENT or ENOTDIR are to be expected when we haven't yet
|
||||
// fetched a package for the first time. Carry on!
|
||||
mkdirp.sync(outputDir);
|
||||
const release = lockfile.lockSync(outputDir);
|
||||
|
||||
fetchPackage(tarballURL, outputDir).then(
|
||||
() => {
|
||||
release();
|
||||
callback(null, outputDir);
|
||||
},
|
||||
error => {
|
||||
release();
|
||||
callback(error);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
callback(error);
|
||||
}
|
||||
} else {
|
||||
lockfile.check(outputDir).then(locked => {
|
||||
if (locked) {
|
||||
// Another process on this same machine has locked the
|
||||
// directory. We need to wait for it to be unlocked
|
||||
// before we callback.
|
||||
const timer = setInterval(() => {
|
||||
lockfile.check(outputDir).then(
|
||||
locked => {
|
||||
if (!locked) {
|
||||
clearInterval(timer);
|
||||
callback(null, outputDir);
|
||||
}
|
||||
},
|
||||
error => {
|
||||
clearInterval(timer);
|
||||
callback(error);
|
||||
}
|
||||
);
|
||||
}, 10);
|
||||
|
||||
timer.unref();
|
||||
} else {
|
||||
// Best case: we already have this package cached on disk
|
||||
// and it's not locked!
|
||||
callback(null, outputDir);
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
});
|
||||
}, packageConfig => packageConfig.dist.tarball);
|
||||
|
||||
function getPackage(packageConfig) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fetchMutex(packageConfig, (error, value) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(value);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getPackage;
|
91
yarn.lock
91
yarn.lock
|
@ -1312,10 +1312,25 @@ bser@^2.0.0:
|
|||
dependencies:
|
||||
node-int64 "^0.4.0"
|
||||
|
||||
buffer-alloc-unsafe@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0"
|
||||
|
||||
buffer-alloc@^1.1.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec"
|
||||
dependencies:
|
||||
buffer-alloc-unsafe "^1.1.0"
|
||||
buffer-fill "^1.0.0"
|
||||
|
||||
buffer-equal-constant-time@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
|
||||
|
||||
buffer-fill@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c"
|
||||
|
||||
buffer-from@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.0.0.tgz#4cb8832d23612589b0406e9e2956c17f06fdf531"
|
||||
|
@ -2877,6 +2892,10 @@ fresh@0.5.2:
|
|||
version "0.5.2"
|
||||
resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
|
||||
|
||||
fs-constants@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
|
||||
|
||||
fs-minipass@^1.2.5:
|
||||
version "1.2.5"
|
||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.5.tgz#06c277218454ec288df77ada54a03b8702aacb9d"
|
||||
|
@ -4956,6 +4975,10 @@ object-keys@^1.0.8:
|
|||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d"
|
||||
|
||||
object-path@0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/object-path/-/object-path-0.6.0.tgz#b69a7d110937934f336ca561fd9be1ad7b7e0cb7"
|
||||
|
||||
object-visit@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb"
|
||||
|
@ -5060,7 +5083,7 @@ os-locale@^2.0.0:
|
|||
lcid "^1.0.0"
|
||||
mem "^1.1.0"
|
||||
|
||||
os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@^1.0.2, os-tmpdir@~1.0.2:
|
||||
os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||
|
||||
|
@ -5580,13 +5603,6 @@ prop-types@^15.5.10, prop-types@^15.5.4, prop-types@^15.5.8, prop-types@^15.6.0:
|
|||
loose-envify "^1.3.1"
|
||||
object-assign "^4.1.1"
|
||||
|
||||
proper-lockfile@^3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/proper-lockfile/-/proper-lockfile-3.0.2.tgz#d30b3b83ecb157e08fe0d411f2393bc384b77ad1"
|
||||
dependencies:
|
||||
graceful-fs "^4.1.11"
|
||||
retry "^0.10.1"
|
||||
|
||||
proxy-addr@~2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.2.tgz#6571504f47bb988ec8180253f85dd7e14952bdec"
|
||||
|
@ -5612,13 +5628,6 @@ public-encrypt@^4.0.0:
|
|||
parse-asn1 "^5.0.0"
|
||||
randombytes "^2.0.1"
|
||||
|
||||
pump@^1.0.0:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954"
|
||||
dependencies:
|
||||
end-of-stream "^1.1.0"
|
||||
once "^1.3.1"
|
||||
|
||||
pump@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909"
|
||||
|
@ -5832,6 +5841,18 @@ readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable
|
|||
string_decoder "~1.0.3"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
readable-stream@^2.3.0:
|
||||
version "2.3.6"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
|
||||
dependencies:
|
||||
core-util-is "~1.0.0"
|
||||
inherits "~2.0.3"
|
||||
isarray "~1.0.0"
|
||||
process-nextick-args "~2.0.0"
|
||||
safe-buffer "~5.1.1"
|
||||
string_decoder "~1.1.1"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
readdirp@^2.0.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78"
|
||||
|
@ -6105,10 +6126,6 @@ ret@~0.1.10:
|
|||
version "0.1.15"
|
||||
resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
|
||||
|
||||
retry@^0.10.1:
|
||||
version "0.10.1"
|
||||
resolved "https://registry.yarnpkg.com/retry/-/retry-0.10.1.tgz#e76388d217992c252750241d3d3956fed98d8ff4"
|
||||
|
||||
right-align@^0.1.1:
|
||||
version "0.1.3"
|
||||
resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef"
|
||||
|
@ -6374,6 +6391,12 @@ sockjs@0.3.19:
|
|||
faye-websocket "^0.10.0"
|
||||
uuid "^3.0.1"
|
||||
|
||||
sort-by@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/sort-by/-/sort-by-1.2.0.tgz#ed92bbff9fd2284b41f6503e38496607b225fe6f"
|
||||
dependencies:
|
||||
object-path "0.6.0"
|
||||
|
||||
sort-keys@^1.0.0:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad"
|
||||
|
@ -6580,6 +6603,12 @@ string_decoder@^1.0.0, string_decoder@~1.0.3:
|
|||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
string_decoder@~1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
stringstream@~0.0.4:
|
||||
version "0.0.5"
|
||||
resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
|
||||
|
@ -6707,15 +6736,6 @@ tapable@^0.2.7:
|
|||
version "0.2.8"
|
||||
resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.8.tgz#99372a5c999bf2df160afc0d74bed4f47948cd22"
|
||||
|
||||
tar-fs@^1.16.2:
|
||||
version "1.16.2"
|
||||
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.2.tgz#17e5239747e399f7e77344f5f53365f04af53577"
|
||||
dependencies:
|
||||
chownr "^1.0.1"
|
||||
mkdirp "^0.5.1"
|
||||
pump "^1.0.0"
|
||||
tar-stream "^1.1.2"
|
||||
|
||||
tar-pack@^3.4.0:
|
||||
version "3.4.1"
|
||||
resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f"
|
||||
|
@ -6729,13 +6749,16 @@ tar-pack@^3.4.0:
|
|||
tar "^2.2.1"
|
||||
uid-number "^0.0.6"
|
||||
|
||||
tar-stream@^1.1.2:
|
||||
version "1.5.5"
|
||||
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.5.5.tgz#5cad84779f45c83b1f2508d96b09d88c7218af55"
|
||||
tar-stream@^1.6.1:
|
||||
version "1.6.1"
|
||||
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.1.tgz#f84ef1696269d6223ca48f6e1eeede3f7e81f395"
|
||||
dependencies:
|
||||
bl "^1.0.0"
|
||||
buffer-alloc "^1.1.0"
|
||||
end-of-stream "^1.0.0"
|
||||
readable-stream "^2.0.0"
|
||||
fs-constants "^1.0.0"
|
||||
readable-stream "^2.3.0"
|
||||
to-buffer "^1.1.0"
|
||||
xtend "^4.0.0"
|
||||
|
||||
tar@^2.2.1:
|
||||
|
@ -6825,6 +6848,10 @@ to-arraybuffer@^1.0.0:
|
|||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
|
||||
|
||||
to-buffer@^1.1.0:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80"
|
||||
|
||||
to-fast-properties@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
|
||||
|
|
Loading…
Reference in New Issue