Don't cache packages on the filesystem
Should help with transient errors reported in #86, #104, and #110
This commit is contained in:
39
server/utils/__tests__/getContentType-test.js
Normal file
39
server/utils/__tests__/getContentType-test.js
Normal file
@ -0,0 +1,39 @@
|
||||
const getContentType = require("../getContentType");
|
||||
|
||||
it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => {
|
||||
expect(getContentType("AUTHORS")).toBe("text/plain");
|
||||
expect(getContentType("CHANGES")).toBe("text/plain");
|
||||
expect(getContentType("LICENSE")).toBe("text/plain");
|
||||
expect(getContentType("Makefile")).toBe("text/plain");
|
||||
expect(getContentType("PATENTS")).toBe("text/plain");
|
||||
expect(getContentType("README")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*rc files", () => {
|
||||
expect(getContentType(".eslintrc")).toBe("text/plain");
|
||||
expect(getContentType(".babelrc")).toBe("text/plain");
|
||||
expect(getContentType(".anythingrc")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .git* files", () => {
|
||||
expect(getContentType(".gitignore")).toBe("text/plain");
|
||||
expect(getContentType(".gitanything")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*ignore files", () => {
|
||||
expect(getContentType(".eslintignore")).toBe("text/plain");
|
||||
expect(getContentType(".anythingignore")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .ts files", () => {
|
||||
expect(getContentType("app.ts")).toBe("text/plain");
|
||||
expect(getContentType("app.d.ts")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .flow files", () => {
|
||||
expect(getContentType("app.js.flow")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .lock files", () => {
|
||||
expect(getContentType("yarn.lock")).toBe("text/plain");
|
||||
});
|
@ -1,35 +0,0 @@
|
||||
const getFileContentType = require("../getFileContentType");
|
||||
|
||||
it("gets a content type of text/plain for LICENSE|README|CHANGES|AUTHORS|Makefile", () => {
|
||||
expect(getFileContentType("AUTHORS")).toBe("text/plain");
|
||||
expect(getFileContentType("CHANGES")).toBe("text/plain");
|
||||
expect(getFileContentType("LICENSE")).toBe("text/plain");
|
||||
expect(getFileContentType("Makefile")).toBe("text/plain");
|
||||
expect(getFileContentType("PATENTS")).toBe("text/plain");
|
||||
expect(getFileContentType("README")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*rc files", () => {
|
||||
expect(getFileContentType(".eslintrc")).toBe("text/plain");
|
||||
expect(getFileContentType(".babelrc")).toBe("text/plain");
|
||||
expect(getFileContentType(".anythingrc")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .git* files", () => {
|
||||
expect(getFileContentType(".gitignore")).toBe("text/plain");
|
||||
expect(getFileContentType(".gitanything")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .*ignore files", () => {
|
||||
expect(getFileContentType(".eslintignore")).toBe("text/plain");
|
||||
expect(getFileContentType(".anythingignore")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .ts files", () => {
|
||||
expect(getFileContentType("app.ts")).toBe("text/plain");
|
||||
expect(getFileContentType("app.d.ts")).toBe("text/plain");
|
||||
});
|
||||
|
||||
it("gets a content type of text/plain for .flow files", () => {
|
||||
expect(getFileContentType("app.js.flow")).toBe("text/plain");
|
||||
});
|
5
server/utils/addLeadingSlash.js
Normal file
5
server/utils/addLeadingSlash.js
Normal file
@ -0,0 +1,5 @@
|
||||
function addLeadingSlash(name) {
|
||||
return name.charAt(0) === "/" ? name : "/" + name;
|
||||
}
|
||||
|
||||
module.exports = addLeadingSlash;
|
@ -1,9 +0,0 @@
|
||||
const path = require("path");
|
||||
const tmpdir = require("os-tmpdir");
|
||||
|
||||
function createTempPath(name, version) {
|
||||
const hyphenName = name.replace(/\//g, "-");
|
||||
return path.join(tmpdir(), `unpkg-${hyphenName}-${version}`);
|
||||
}
|
||||
|
||||
module.exports = createTempPath;
|
13
server/utils/fetchArchive.js
Normal file
13
server/utils/fetchArchive.js
Normal file
@ -0,0 +1,13 @@
|
||||
const fetch = require("isomorphic-fetch");
|
||||
const gunzip = require("gunzip-maybe");
|
||||
const tar = require("tar-stream");
|
||||
|
||||
function fetchArchive(packageConfig) {
|
||||
const tarballURL = packageConfig.dist.tarball;
|
||||
|
||||
return fetch(tarballURL).then(res =>
|
||||
res.body.pipe(gunzip()).pipe(tar.extract())
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = fetchArchive;
|
@ -1,43 +0,0 @@
|
||||
const fetch = require("isomorphic-fetch");
|
||||
const gunzip = require("gunzip-maybe");
|
||||
const tar = require("tar-fs");
|
||||
|
||||
function stripNamePrefix(header) {
|
||||
// Most packages have header names that look like "package/index.js"
|
||||
// so we shorten that to just "index.js" here. A few packages use a
|
||||
// prefix other than "package/". e.g. the firebase package uses the
|
||||
// "firebase_npm/" prefix. So we just strip the first dir name.
|
||||
header.name = header.name.replace(/^[^/]+\//, "");
|
||||
return header;
|
||||
}
|
||||
|
||||
function ignoreLinks(file, header) {
|
||||
return (
|
||||
header.type === "link" ||
|
||||
header.type === "symlink" ||
|
||||
(header.type === "directory" && !header.name.includes("/")) // Empty directory, see #99
|
||||
);
|
||||
}
|
||||
|
||||
function extractResponse(response, outputDir) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const extract = tar.extract(outputDir, {
|
||||
readable: true, // All dirs/files should be readable.
|
||||
map: stripNamePrefix,
|
||||
ignore: ignoreLinks
|
||||
});
|
||||
|
||||
response.body
|
||||
.pipe(gunzip())
|
||||
.pipe(extract)
|
||||
.on("finish", resolve)
|
||||
.on("error", reject);
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPackage(tarballURL, outputDir) {
|
||||
console.log(`info: Fetching ${tarballURL} and extracting to ${outputDir}`);
|
||||
return fetch(tarballURL).then(res => extractResponse(res, outputDir));
|
||||
}
|
||||
|
||||
module.exports = fetchPackage;
|
@ -1,4 +1,4 @@
|
||||
require("isomorphic-fetch");
|
||||
const fetch = require("isomorphic-fetch");
|
||||
|
||||
const config = require("../config");
|
||||
|
||||
|
22
server/utils/getContentType.js
Normal file
22
server/utils/getContentType.js
Normal file
@ -0,0 +1,22 @@
|
||||
const mime = require("mime");
|
||||
|
||||
mime.define({
|
||||
"text/plain": [
|
||||
"authors",
|
||||
"changes",
|
||||
"license",
|
||||
"makefile",
|
||||
"patents",
|
||||
"readme",
|
||||
"ts",
|
||||
"flow"
|
||||
]
|
||||
});
|
||||
|
||||
const textFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore|\.lock)$/i;
|
||||
|
||||
function getContentType(file) {
|
||||
return textFiles.test(file) ? "text/plain" : mime.lookup(file);
|
||||
}
|
||||
|
||||
module.exports = getContentType;
|
@ -1,26 +0,0 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const getFileStats = require("./getFileStats");
|
||||
|
||||
function getEntries(dir) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(dir, function(error, files) {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(
|
||||
Promise.all(
|
||||
files.map(file => getFileStats(path.join(dir, file)))
|
||||
).then(statsArray => {
|
||||
return statsArray.map((stats, index) => {
|
||||
return { file: files[index], stats };
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getEntries;
|
@ -1,22 +0,0 @@
|
||||
const mime = require("mime");
|
||||
|
||||
mime.define({
|
||||
"text/plain": [
|
||||
"authors",
|
||||
"changes",
|
||||
"license",
|
||||
"makefile",
|
||||
"patents",
|
||||
"readme",
|
||||
"ts",
|
||||
"flow"
|
||||
]
|
||||
});
|
||||
|
||||
const TextFiles = /\/?(\.[a-z]*rc|\.git[a-z]*|\.[a-z]*ignore)$/i;
|
||||
|
||||
function getFileContentType(file) {
|
||||
return TextFiles.test(file) ? "text/plain" : mime.lookup(file);
|
||||
}
|
||||
|
||||
module.exports = getFileContentType;
|
@ -1,15 +0,0 @@
|
||||
const fs = require("fs");
|
||||
|
||||
function getFileStats(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.lstat(file, (error, stats) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(stats);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getFileStats;
|
@ -1,12 +0,0 @@
|
||||
function getFileType(stats) {
|
||||
if (stats.isFile()) return "file";
|
||||
if (stats.isDirectory()) return "directory";
|
||||
if (stats.isBlockDevice()) return "blockDevice";
|
||||
if (stats.isCharacterDevice()) return "characterDevice";
|
||||
if (stats.isSymbolicLink()) return "symlink";
|
||||
if (stats.isSocket()) return "socket";
|
||||
if (stats.isFIFO()) return "fifo";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
module.exports = getFileType;
|
7
server/utils/getIntegrity.js
Normal file
7
server/utils/getIntegrity.js
Normal file
@ -0,0 +1,7 @@
|
||||
const SRIToolbox = require("sri-toolbox");
|
||||
|
||||
function getIntegrity(data) {
|
||||
return SRIToolbox.generate({ algorithms: ["sha384"] }, data);
|
||||
}
|
||||
|
||||
module.exports = getIntegrity;
|
@ -1,83 +0,0 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const SRIToolbox = require("sri-toolbox");
|
||||
|
||||
const getFileContentType = require("./getFileContentType");
|
||||
const getFileStats = require("./getFileStats");
|
||||
const getFileType = require("./getFileType");
|
||||
|
||||
function getEntries(dir, file, maximumDepth) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(path.join(dir, file), (error, files) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(
|
||||
Promise.all(
|
||||
files.map(f => getFileStats(path.join(dir, file, f)))
|
||||
).then(statsArray => {
|
||||
return Promise.all(
|
||||
statsArray.map((stats, index) =>
|
||||
getMetadataRecursive(
|
||||
dir,
|
||||
path.join(file, files[index]),
|
||||
stats,
|
||||
maximumDepth - 1
|
||||
)
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function formatTime(time) {
|
||||
return new Date(time).toISOString();
|
||||
}
|
||||
|
||||
function getIntegrity(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(file, (error, data) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(SRIToolbox.generate({ algorithms: ["sha384"] }, data));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getMetadataRecursive(dir, file, stats, maximumDepth) {
|
||||
const metadata = {
|
||||
lastModified: formatTime(stats.mtime),
|
||||
contentType: getFileContentType(file),
|
||||
path: file,
|
||||
size: stats.size,
|
||||
type: getFileType(stats)
|
||||
};
|
||||
|
||||
if (stats.isFile()) {
|
||||
return getIntegrity(path.join(dir, file)).then(integrity => {
|
||||
metadata.integrity = integrity;
|
||||
return metadata;
|
||||
});
|
||||
}
|
||||
|
||||
if (!stats.isDirectory() || maximumDepth === 0)
|
||||
return Promise.resolve(metadata);
|
||||
|
||||
return getEntries(dir, file, maximumDepth).then(files => {
|
||||
metadata.files = files;
|
||||
return metadata;
|
||||
});
|
||||
}
|
||||
|
||||
function getMetadata(baseDir, path, stats, maximumDepth, callback) {
|
||||
getMetadataRecursive(baseDir, path, stats, maximumDepth).then(metadata => {
|
||||
callback(null, metadata);
|
||||
}, callback);
|
||||
}
|
||||
|
||||
module.exports = getMetadata;
|
@ -1,78 +0,0 @@
|
||||
const fs = require("fs");
|
||||
const mkdirp = require("mkdirp");
|
||||
const lockfile = require("proper-lockfile");
|
||||
|
||||
const createMutex = require("./createMutex");
|
||||
const createTempPath = require("./createTempPath");
|
||||
const fetchPackage = require("./fetchPackage");
|
||||
|
||||
const fetchMutex = createMutex((packageConfig, callback) => {
|
||||
const tarballURL = packageConfig.dist.tarball;
|
||||
const outputDir = createTempPath(packageConfig.name, packageConfig.version);
|
||||
|
||||
fs.access(outputDir, error => {
|
||||
if (error) {
|
||||
if (error.code === "ENOENT" || error.code === "ENOTDIR") {
|
||||
// ENOENT or ENOTDIR are to be expected when we haven't yet
|
||||
// fetched a package for the first time. Carry on!
|
||||
mkdirp.sync(outputDir);
|
||||
const release = lockfile.lockSync(outputDir);
|
||||
|
||||
fetchPackage(tarballURL, outputDir).then(
|
||||
() => {
|
||||
release();
|
||||
callback(null, outputDir);
|
||||
},
|
||||
error => {
|
||||
release();
|
||||
callback(error);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
callback(error);
|
||||
}
|
||||
} else {
|
||||
lockfile.check(outputDir).then(locked => {
|
||||
if (locked) {
|
||||
// Another process on this same machine has locked the
|
||||
// directory. We need to wait for it to be unlocked
|
||||
// before we callback.
|
||||
const timer = setInterval(() => {
|
||||
lockfile.check(outputDir).then(
|
||||
locked => {
|
||||
if (!locked) {
|
||||
clearInterval(timer);
|
||||
callback(null, outputDir);
|
||||
}
|
||||
},
|
||||
error => {
|
||||
clearInterval(timer);
|
||||
callback(error);
|
||||
}
|
||||
);
|
||||
}, 10);
|
||||
|
||||
timer.unref();
|
||||
} else {
|
||||
// Best case: we already have this package cached on disk
|
||||
// and it's not locked!
|
||||
callback(null, outputDir);
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
});
|
||||
}, packageConfig => packageConfig.dist.tarball);
|
||||
|
||||
function getPackage(packageConfig) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fetchMutex(packageConfig, (error, value) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(value);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getPackage;
|
Reference in New Issue
Block a user