Skip to content

Commit

Permalink
Bump minimum supported node version to 12
Browse files Browse the repository at this point in the history
  • Loading branch information
Shubham Kanodia committed Mar 7, 2022
1 parent 068be4b commit 5de0c65
Show file tree
Hide file tree
Showing 2 changed files with 160 additions and 62 deletions.
110 changes: 76 additions & 34 deletions src/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,48 +7,62 @@
* @see https://github.com/babel/babel-loader/issues/34
* @see https://github.com/babel/babel-loader/pull/41
*/
const fs = require("fs");
const os = require("os");
const path = require("path");
const zlib = require("zlib");
const crypto = require("crypto");
const findCacheDir = require("find-cache-dir");
const { open } = require("lmdb");
const { promisify } = require("util");

const transform = require("./transform");
// Lazily instantiated when needed
let defaultCacheDirectory = null;
let cacheDB = null;

const readFile = promisify(fs.readFile);
const writeFile = promisify(fs.writeFile);
const gunzip = promisify(zlib.gunzip);
const gzip = promisify(zlib.gzip);
const makeDir = require("make-dir");

/**
* Initialize cache
* Read the contents from the compressed file.
*
* @async
* @params {String} filename
* @params {Boolean} compress
*/
const read = async function (filename, compress) {
const data = await readFile(filename + (compress ? ".gz" : ""));
const content = compress ? await gunzip(data) : data;

async function initCacheDB(cacheDir, cacheCompression) {
if (cacheDB) return cacheDB;
const fallback = cacheDir !== os.tmpdir();
return JSON.parse(content.toString());
};

try {
cacheDB = open({
path: cacheDir,
compression: cacheCompression,
sharedStructuresKey: Symbol.for(`structures`),
});
} catch (err) {
if (fallback) {
cacheDB = initCacheDB(os.tmpdir(), cacheCompression);
}
/**
* Write contents into a compressed file.
*
* @async
* @params {String} filename
* @params {Boolean} compress
* @params {String} result
*/
const write = async function (filename, compress, result) {
const content = JSON.stringify(result);

throw err;
}
}
const data = compress ? await gzip(content) : content;
return await writeFile(filename + (compress ? ".gz" : ""), data);
};

/**
* Build the cache key for the cached file
* Build the filename for the cached file
*
* @params {String} source File source code
* @params {Object} options Options used
*
* @return {String}
*/
const fileCacheKey = function (source, identifier, options) {
const filename = function (source, identifier, options) {
// md4 hashing is not supported starting with node v17.0.0
const majorNodeVersion = parseInt(process.versions.node.split(".")[0], 10);
let hashType = "md4";
Expand All @@ -62,7 +76,7 @@ const fileCacheKey = function (source, identifier, options) {

hash.update(contents);

return hash.digest("hex");
return hash.digest("hex") + ".json";
};

/**
Expand All @@ -71,21 +85,51 @@ const fileCacheKey = function (source, identifier, options) {
* @params {String} directory
* @params {Object} params
*/
const handleCache = async function (params) {
const { source, options = {}, cacheIdentifier } = params;
const handleCache = async function (directory, params) {
const {
source,
options = {},
cacheIdentifier,
cacheDirectory,
cacheCompression,
} = params;

const cacheKey = fileCacheKey(source, cacheIdentifier, options);
const file = path.join(directory, filename(source, cacheIdentifier, options));

try {
// No errors mean that the file was previously cached
// we just need to return it
return await read(file, cacheCompression);
} catch (err) {}

// Fetch cached result if it exists
const cached = await cacheDB.get(cacheKey);
if (typeof cached !== "undefined") {
return cached;
const fallback =
typeof cacheDirectory !== "string" && directory !== os.tmpdir();

// Make sure the directory exists.
try {
await makeDir(directory);
} catch (err) {
if (fallback) {
return handleCache(os.tmpdir(), params);
}

throw err;
}

// Otherwise, just transform the cacheKey
// Otherwise just transform the file
// return it to the user asap and write it in cache
const result = await transform(source, options);
cacheDB.put(cacheKey, result);

try {
await write(file, cacheCompression, result);
} catch (err) {
if (fallback) {
// Fallback to tmpdir if node_modules folder not writable
return handleCache(os.tmpdir(), params);
}

throw err;
}

return result;
};
Expand Down Expand Up @@ -129,7 +173,5 @@ module.exports = async function (params) {
directory = defaultCacheDirectory;
}

await initCacheDB(directory, params.cacheCompression);

return await handleCache(params);
return await handleCache(directory, params);
};
112 changes: 84 additions & 28 deletions test/cache.test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import test from "ava";
import fs from "fs";
import path from "path";
import rimraf from "rimraf";
import webpack from "webpack";
Expand All @@ -11,7 +12,6 @@ const defaultCacheDir = path.join(
const cacheDir = path.join(__dirname, "output/cache/cachefiles");
const outputDir = path.join(__dirname, "output/cache");
const babelLoader = path.join(__dirname, "../lib");
const { open } = require("lmdb");

const globalConfig = {
mode: "development",
Expand Down Expand Up @@ -48,7 +48,7 @@ test.beforeEach.cb(t => rimraf(defaultCacheDir, t.end));
test.afterEach.cb(t => rimraf(t.context.directory, t.end));
test.afterEach.cb(t => rimraf(t.context.cacheDirectory, t.end));

test.cb("should build a cache database in the cache directory", t => {
test.cb("should output files to cache directory", t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
Expand All @@ -73,14 +73,16 @@ test.cb("should build a cache database in the cache directory", t => {
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);

const cacheDB = open(t.context.cacheDirectory, { readOnly: true });
t.true(cacheDB.getStats().entryCount > 0);
t.end();
fs.readdir(t.context.cacheDirectory, (err, files) => {
t.is(err, null);
t.true(files.length > 0);
t.end();
});
});
});

test.serial.cb.only(
"should add entries to cache db at standard cache dir by default",
test.serial.cb(
"should output json.gz files to standard cache dir by default",
t => {
const config = Object.assign({}, globalConfig, {
output: {
Expand All @@ -106,15 +108,56 @@ test.serial.cb.only(
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);

const cacheDB = open(defaultCacheDir, { readOnly: true });
t.true(cacheDB.getStats().entryCount > 0);
t.end();
fs.readdir(defaultCacheDir, (err, files) => {
files = files.filter(file => /\b[0-9a-f]{5,40}\.json\.gz\b/.test(file));

t.is(err, null);
t.true(files.length > 0);
t.end();
});
});
},
);

test.serial.cb(
"should add entries to cache db at standard cache dir if set to true in query",
"should output non-compressed files to standard cache dir when cacheCompression is set to false",
t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
},
module: {
rules: [
{
test: /\.jsx?/,
loader: babelLoader,
exclude: /node_modules/,
options: {
cacheDirectory: true,
cacheCompression: false,
presets: ["@babel/preset-env"],
},
},
],
},
});

webpack(config, err => {
t.is(err, null);

fs.readdir(defaultCacheDir, (err, files) => {
files = files.filter(file => /\b[0-9a-f]{5,40}\b/.test(file));

t.is(err, null);
t.true(files.length > 0);
t.end();
});
});
},
);

test.serial.cb(
"should output files to standard cache dir if set to true in query",
t => {
const config = Object.assign({}, globalConfig, {
output: {
Expand All @@ -136,9 +179,14 @@ test.serial.cb(
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);

const cacheDB = open(t.context.cacheDirectory, { readOnly: true });
t.true(cacheDB.getStats().entryCount > 0);
t.end();
fs.readdir(defaultCacheDir, (err, files) => {
files = files.filter(file => /\b[0-9a-f]{5,40}\.json\.gz\b/.test(file));

t.is(err, null);

t.true(files.length > 0);
t.end();
});
});
},
);
Expand Down Expand Up @@ -172,14 +220,16 @@ test.cb("should read from cache directory if cached file exists", t => {

webpack(config, err => {
t.is(err, null);
const cacheDB = open(t.context.cacheDirectory, { readOnly: true });
t.true(cacheDB.getStats().entryCount > 0);
t.end();
fs.readdir(t.context.cacheDirectory, (err, files) => {
t.is(err, null);
t.true(files.length > 0);
t.end();
});
});
});
});

test.cb("should have one cache entry per module", t => {
test.cb("should have one file per module", t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
Expand All @@ -204,13 +254,15 @@ test.cb("should have one cache entry per module", t => {
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);

const cacheDB = open(t.context.cacheDirectory, { readOnly: true });
t.true(cacheDB.getStats().entryCount === 3);
t.end();
fs.readdir(t.context.cacheDirectory, (err, files) => {
t.is(err, null);
t.true(files.length === 3);
t.end();
});
});
});

test.cb("should add a new cache entry if the identifier changes", t => {
test.cb("should generate a new file if the identifier changes", t => {
const configs = [
Object.assign({}, globalConfig, {
output: {
Expand Down Expand Up @@ -261,9 +313,11 @@ test.cb("should add a new cache entry if the identifier changes", t => {
counter -= 1;

if (!counter) {
const cacheDB = open(t.context.cacheDirectory, { readOnly: true });
t.true(cacheDB.getStats().entryCount === 6);
t.end();
fs.readdir(t.context.cacheDirectory, (err, files) => {
t.is(err, null);
t.true(files.length === 6);
t.end();
});
}
});
});
Expand Down Expand Up @@ -320,8 +374,10 @@ test.cb("should allow to specify the .babelrc file", t => {
t.deepEqual(multiStats.stats[1].compilation.errors, []);
t.deepEqual(multiStats.stats[1].compilation.warnings, []);

const cacheDB = open(t.context.cacheDirectory, { readOnly: true });
t.true(cacheDB.getStats().entryCount === 1);
t.end();
fs.readdir(t.context.cacheDirectory, (err, files) => {
t.is(err, null);
t.true(files.length === 2);
t.end();
});
});
});

0 comments on commit 5de0c65

Please sign in to comment.