From 71d4389ff8a35330c3fedce97761094e243d4faf Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 3 May 2022 14:02:17 -0700 Subject: [PATCH] fix: move to async functions where possible (#106) This refactors the code to use async functions where possible. It also uses `@npmcli/fs` consistently (it was already a dependency, just not used). It also reorders the test files to match their sources in `./lib`. Tests were not refactored (except where needed to move to `@npmcli/fs`) in the interest of an easier PR review. --- .eslintrc.local.js | 3 + lib/content/read.js | 163 ++++---- lib/content/rm.js | 18 +- lib/content/write.js | 33 +- lib/entry-index.js | 195 +++++----- lib/get.js | 140 ++++--- lib/put.js | 35 +- lib/util/fix-owner.js | 75 ++-- lib/util/move-file.js | 83 ++--- lib/util/tmp.js | 12 +- lib/verify.js | 348 ++++++++---------- package.json | 4 - test/{content.read.js => content/read.js} | 25 +- test/{content.rm.js => content/rm.js} | 15 +- .../write.chownr.js} | 28 +- test/{content.write.js => content/write.js} | 8 +- test/{index.find.js => entry-index.find.js} | 2 +- ...{index.insert.js => entry-index.insert.js} | 20 +- test/entry-index.js | 26 +- test/{util => fixtures}/cache-content.js | 0 test/{util => fixtures}/cache-index.js | 0 test/get.js | 10 +- test/ls.js | 2 +- test/put.js | 12 +- test/rm.js | 20 +- test/strict.js | 34 -- test/util.move-file.js | 251 ------------- test/{util.fix-owner.js => util/fix-owner.js} | 6 +- test/util/move-file.js | 201 ++++++++++ test/{util.tmp.js => util/tmp.js} | 20 +- test/verify.js | 53 ++- 31 files changed, 802 insertions(+), 1040 deletions(-) rename test/{content.read.js => content/read.js} (96%) rename test/{content.rm.js => content/rm.js} (68%) rename test/{content.write.chownr.js => content/write.chownr.js} (69%) rename test/{content.write.js => content/write.js} (97%) rename test/{index.find.js => entry-index.find.js} (99%) rename test/{index.insert.js => entry-index.insert.js} (93%) rename test/{util => fixtures}/cache-content.js (100%) rename test/{util => fixtures}/cache-index.js (100%) delete mode 100644 test/strict.js delete mode 100644 test/util.move-file.js rename test/{util.fix-owner.js => util/fix-owner.js} (97%) create mode 100644 test/util/move-file.js rename test/{util.tmp.js => util/tmp.js} (82%) diff --git a/.eslintrc.local.js b/.eslintrc.local.js index fcc15bb..ce6b1e1 100644 --- a/.eslintrc.local.js +++ b/.eslintrc.local.js @@ -1,5 +1,8 @@ +'use strict' + module.exports = { rules: { + strict: 'error', 'no-shadow': 0, // XXX: fix this later }, } diff --git a/lib/content/read.js b/lib/content/read.js index 0aeb454..f5128fe 100644 --- a/lib/content/read.js +++ b/lib/content/read.js @@ -1,42 +1,35 @@ 'use strict' -const util = require('util') - -const fs = require('fs') +const fs = require('@npmcli/fs') const fsm = require('fs-minipass') const ssri = require('ssri') const contentPath = require('./path') const Pipeline = require('minipass-pipeline') -const lstat = util.promisify(fs.lstat) -const readFile = util.promisify(fs.readFile) -const copyFile = util.promisify(fs.copyFile) - module.exports = read const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -function read (cache, integrity, opts = {}) { +async function read (cache, integrity, opts = {}) { const { size } = opts - return withContentSri(cache, integrity, (cpath, sri) => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { // get size - return lstat(cpath).then(stat => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } + const stat = await fs.lstat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } - return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } - return data - }) - }) + return data } const readPipeline = (cpath, size, sri, stream) => { @@ -58,7 +51,7 @@ module.exports.sync = readSync function readSync (cache, integrity, opts = {}) { const { size } = opts return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath) + const data = fs.readFileSync(cpath, { encoding: null }) if (typeof size === 'number' && size !== data.length) { throw sizeError(size, data.length) } @@ -77,16 +70,19 @@ module.exports.readStream = readStream function readStream (cache, integrity, opts = {}) { const { size } = opts const stream = new Pipeline() - withContentSri(cache, integrity, (cpath, sri) => { - // just lstat to ensure it exists - return lstat(cpath).then((stat) => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just lstat to ensure it exists + const stat = await fs.lstat(cpath) + return { stat, cpath, sri } + }) if (typeof size === 'number' && size !== stat.size) { return stream.emit('error', sizeError(size, stat.size)) } readPipeline(cpath, stat.size, sri, stream) - }, er => stream.emit('error', er)) + }).catch(err => stream.emit('error', err)) return stream } @@ -96,7 +92,7 @@ module.exports.copy.sync = copySync function copy (cache, integrity, dest) { return withContentSri(cache, integrity, (cpath, sri) => { - return copyFile(cpath, dest) + return fs.copyFile(cpath, dest) }) } @@ -108,14 +104,17 @@ function copySync (cache, integrity, dest) { module.exports.hasContent = hasContent -function hasContent (cache, integrity) { +async function hasContent (cache, integrity) { if (!integrity) { - return Promise.resolve(false) + return false } - return withContentSri(cache, integrity, (cpath, sri) => { - return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) - }).catch((err) => { + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.lstat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { if (err.code === 'ENOENT') { return false } @@ -128,7 +127,7 @@ function hasContent (cache, integrity) { return false } } - }) + } } module.exports.hasContent.sync = hasContentSync @@ -159,61 +158,47 @@ function hasContentSync (cache, integrity) { }) } -function withContentSri (cache, integrity, fn) { - const tryFn = () => { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - return Promise - .all(digests.map((meta) => { - return withContentSri(cache, meta, fn) - .catch((err) => { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - }) - })) - .then((results) => { - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - }) +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result } - } - return new Promise((resolve, reject) => { - try { - tryFn() - .then(resolve) - .catch(reject) - } catch (err) { - reject(err) + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError } - }) + + // Throw generic error + throw results.find((r) => r instanceof Error) + } } function withContentSriSync (cache, integrity, fn) { diff --git a/lib/content/rm.js b/lib/content/rm.js index 5061236..f733305 100644 --- a/lib/content/rm.js +++ b/lib/content/rm.js @@ -8,13 +8,13 @@ const rimraf = util.promisify(require('rimraf')) module.exports = rm -function rm (cache, integrity) { - return hasContent(cache, integrity).then((content) => { - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - return rimraf(contentPath(cache, content.sri)).then(() => true) - } else { - return false - } - }) +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await rimraf(contentPath(cache, content.sri)) + return true + } else { + return false + } } diff --git a/lib/content/write.js b/lib/content/write.js index b0aa18c..a877710 100644 --- a/lib/content/write.js +++ b/lib/content/write.js @@ -4,7 +4,7 @@ const util = require('util') const contentPath = require('./path') const fixOwner = require('../util/fix-owner') -const fs = require('fs') +const fs = require('@npmcli/fs') const moveFile = require('../util/move-file') const Minipass = require('minipass') const Pipeline = require('minipass-pipeline') @@ -15,8 +15,6 @@ const ssri = require('ssri') const uniqueFilename = require('unique-filename') const fsm = require('fs-minipass') -const writeFile = util.promisify(fs.writeFile) - module.exports = write async function write (cache, data, opts = {}) { @@ -36,7 +34,7 @@ async function write (cache, data, opts = {}) { const tmp = await makeTmp(cache, opts) try { - await writeFile(tmp.target, data, { flag: 'wx' }) + await fs.writeFile(tmp.target, data, { flag: 'wx' }) await moveToDestination(tmp, cache, sri, opts) return { integrity: sri, size: data.length } } finally { @@ -115,7 +113,7 @@ async function handleContent (inputStream, cache, opts) { } } -function pipeToTmp (inputStream, cache, tmpTarget, opts) { +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { let integrity let size const hashStream = ssri.integrityStream({ @@ -143,30 +141,27 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts) { outStream ) - return pipeline.promise().then(() => ({ integrity, size })) + await pipeline.promise() + return { integrity, size } } -function makeTmp (cache, opts) { +async function makeTmp (cache, opts) { const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ + await fixOwner.mkdirfix(cache, path.dirname(tmpTarget)) + return { target: tmpTarget, moved: false, - })) + } } -function moveToDestination (tmp, cache, sri, opts) { +async function moveToDestination (tmp, cache, sri, opts) { const destination = contentPath(cache, sri) const destDir = path.dirname(destination) - return fixOwner - .mkdirfix(cache, destDir) - .then(() => { - return moveFile(tmp.target, destination) - }) - .then(() => { - tmp.moved = true - return fixOwner.chownr(cache, destination) - }) + await fixOwner.mkdirfix(cache, destDir) + await moveFile(tmp.target, destination) + tmp.moved = true + await fixOwner.chownr(cache, destination) } function sizeError (expected, found) { diff --git a/lib/entry-index.js b/lib/entry-index.js index 9d44856..cbfa619 100644 --- a/lib/entry-index.js +++ b/lib/entry-index.js @@ -2,7 +2,7 @@ const util = require('util') const crypto = require('crypto') -const fs = require('fs') +const fs = require('@npmcli/fs') const Minipass = require('minipass') const path = require('path') const ssri = require('ssri') @@ -17,11 +17,6 @@ const _rimraf = require('rimraf') const rimraf = util.promisify(_rimraf) rimraf.sync = _rimraf.sync -const appendFile = util.promisify(fs.appendFile) -const readFile = util.promisify(fs.readFile) -const readdir = util.promisify(fs.readdir) -const writeFile = util.promisify(fs.writeFile) - module.exports.NotFoundError = class NotFoundError extends Error { constructor (cache, key) { super(`No cache entry for ${key} found in ${cache}`) @@ -85,7 +80,7 @@ async function compact (cache, key, matchFn, opts = {}) { } const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fs.writeFile(tmp.target, newIndex, { flag: 'wx' }) await fixOwner.mkdirfix(cache, path.dirname(bucket)) // we use @npmcli/move-file directly here because we // want to overwrite the existing file @@ -118,7 +113,7 @@ async function compact (cache, key, matchFn, opts = {}) { module.exports.insert = insert -function insert (cache, key, integrity, opts = {}) { +async function insert (cache, key, integrity, opts = {}) { const { metadata, size } = opts const bucket = bucketPath(cache, key) const entry = { @@ -128,36 +123,32 @@ function insert (cache, key, integrity, opts = {}) { size, metadata, } - return fixOwner - .mkdirfix(cache, path.dirname(bucket)) - .then(() => { - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - }) - .then(() => fixOwner.chownr(cache, bucket)) - .catch((err) => { - if (err.code === 'ENOENT') { - return undefined - } + try { + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - }) - .then(() => { - return formatEntry(cache, entry) - }) + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + } + return formatEntry(cache, entry) } module.exports.insert.sync = insertSync @@ -187,25 +178,24 @@ function insertSync (cache, key, integrity, opts = {}) { module.exports.find = find -function find (cache, key) { +async function find (cache, key) { const bucket = bucketPath(cache, key) - return bucketEntries(bucket) - .then((entries) => { - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return null + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) } else { - throw err + return latest } - }) + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } } module.exports.find.sync = findSync @@ -257,67 +247,64 @@ function lsStream (cache) { const indexDir = bucketDir(cache) const stream = new Minipass({ objectMode: true }) - readdirOrEmpty(indexDir).then(buckets => Promise.all( - buckets.map(bucket => { + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { const bucketPath = path.join(indexDir, bucket) - return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( - subbuckets.map(subbucket => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - return readdirOrEmpty(subbucketPath).then(entries => Promise.all( - entries.map(entry => { - const entryPath = path.join(subbucketPath, entry) - return bucketEntries(entryPath).then(entries => - // using a Map here prevents duplicate keys from - // showing up twice, I guess? - entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - ).then(reduced => { - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - }).catch(err => { - if (err.code === 'ENOENT') { - return undefined - } - throw err - }) - }) - )) - }) - )) - }) - )) - .then( - () => stream.end(), - err => stream.emit('error', err) - ) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + }).catch(err => stream.emit('error', err)) return stream } module.exports.ls = ls -function ls (cache) { - return lsStream(cache).collect().then(entries => - entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) - ) +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) } module.exports.bucketEntries = bucketEntries -function bucketEntries (bucket, filter) { - return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) +async function bucketEntries (bucket, filter) { + const data = await fs.readFile(bucket, 'utf8') + return _bucketEntries(data, filter) } module.exports.bucketEntries.sync = bucketEntriesSync @@ -406,7 +393,7 @@ function formatEntry (cache, entry, keepAll) { } function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { + return fs.readdir(dir).catch((err) => { if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { return [] } diff --git a/lib/get.js b/lib/get.js index 58f357b..cc9d8f6 100644 --- a/lib/get.js +++ b/lib/get.js @@ -8,52 +8,48 @@ const index = require('./entry-index') const memo = require('./memoization') const read = require('./content/read') -function getData (cache, key, opts = {}) { +async function getData (cache, key, opts = {}) { const { integrity, memoize, size } = opts const memoized = memo.get(cache, key, opts) if (memoized && memoize !== false) { - return Promise.resolve({ + return { metadata: memoized.entry.metadata, data: memoized.data, integrity: memoized.entry.integrity, size: memoized.entry.size, - }) + } } - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } - return read(cache, entry.integrity, { integrity, size }).then((data) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } } module.exports = getData -function getDataByDigest (cache, key, opts = {}) { +async function getDataByDigest (cache, key, opts = {}) { const { integrity, memoize, size } = opts const memoized = memo.get.byDigest(cache, key, opts) if (memoized && memoize !== false) { - return Promise.resolve(memoized) + return memoized } - return read(cache, key, { integrity, size }).then((res) => { - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res - }) + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res } module.exports.byDigest = getDataByDigest @@ -131,36 +127,35 @@ function getStream (cache, key, opts = {}) { } const stream = new Pipeline() - index - .find(cache, key) - .then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) }) - .catch((err) => stream.emit('error', err)) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + }).catch((err) => stream.emit('error', err)) return stream } @@ -204,27 +199,26 @@ function info (cache, key, opts = {}) { } module.exports.info = info -function copy (cache, key, dest, opts = {}) { - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - return read.copy(cache, entry.integrity, dest, opts) - .then(() => { - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } } module.exports.copy = copy -function copyByDigest (cache, key, dest, opts = {}) { - return read.copy(cache, key, dest, opts).then(() => key) +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key } + module.exports.copy.byDigest = copyByDigest module.exports.hasContent = read.hasContent diff --git a/lib/put.js b/lib/put.js index eed5187..9fc932d 100644 --- a/lib/put.js +++ b/lib/put.js @@ -14,20 +14,16 @@ const putOpts = (opts) => ({ module.exports = putData -function putData (cache, key, data, opts = {}) { +async function putData (cache, key, data, opts = {}) { const { memoize } = opts opts = putOpts(opts) - return write(cache, data, opts).then((res) => { - return index - .insert(cache, key, res.integrity, { ...opts, size: res.size }) - .then((entry) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } - return res.integrity - }) - }) + return res.integrity } module.exports.stream = putStream @@ -68,17 +64,14 @@ function putStream (cache, key, opts = {}) { // last but not least, we write the index and emit hash and size, // and memoize if we're doing that pipeline.push(new Flush({ - flush () { + async flush () { if (!error) { - return index - .insert(cache, key, integrity, { ...opts, size }) - .then((entry) => { - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - }) + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) } }, })) diff --git a/lib/util/fix-owner.js b/lib/util/fix-owner.js index bc14def..182fcb0 100644 --- a/lib/util/fix-owner.js +++ b/lib/util/fix-owner.js @@ -33,40 +33,38 @@ const getSelf = () => { module.exports.chownr = fixOwner -function fixOwner (cache, filepath) { +async function fixOwner (cache, filepath) { if (!process.getuid) { // This platform doesn't need ownership fixing - return Promise.resolve() + return } getSelf() if (self.uid !== 0) { // almost certainly can't chown anyway - return Promise.resolve() + return } - return Promise.resolve(inferOwner(cache)).then((owner) => { - const { uid, gid } = owner + const { uid, gid } = await inferOwner(cache) - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) { + return + } - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) - }) + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') { + return null + } + + throw err + }) + ) } module.exports.chownr.sync = fixOwnerSync @@ -105,26 +103,25 @@ function fixOwnerSync (cache, filepath) { module.exports.mkdirfix = mkdirfix -function mkdirfix (cache, p, cb) { +async function mkdirfix (cache, p, cb) { // we have to infer the owner _before_ making the directory, even though // we aren't going to use the results, since the cache itself might not // exist yet. If we mkdirp it, then our current uid/gid will be assumed // to be correct if it creates the cache folder in the process. - return Promise.resolve(inferOwner(cache)).then(() => { - return mkdirp(p) - .then((made) => { - if (made) { - return fixOwner(cache, made).then(() => made) - } - }) - .catch((err) => { - if (err.code === 'EEXIST') { - return fixOwner(cache, p).then(() => null) - } - - throw err - }) - }) + await inferOwner(cache) + try { + const made = await mkdirp(p) + if (made) { + await fixOwner(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + await fixOwner(cache, p) + return null + } + throw err + } } module.exports.mkdirfix.sync = mkdirfixSync diff --git a/lib/util/move-file.js b/lib/util/move-file.js index 3739cea..a0b4041 100644 --- a/lib/util/move-file.js +++ b/lib/util/move-file.js @@ -1,18 +1,13 @@ 'use strict' -const fs = require('fs') -const util = require('util') -const chmod = util.promisify(fs.chmod) -const unlink = util.promisify(fs.unlink) -const stat = util.promisify(fs.stat) +const fs = require('@npmcli/fs') const move = require('@npmcli/move-file') const pinflight = require('promise-inflight') module.exports = moveFile -function moveFile (src, dest) { - const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || - process.platform === 'win32' +async function moveFile (src, dest) { + const isWindows = process.platform === 'win32' // This isn't quite an fs.rename -- the assumption is that // if `dest` already exists, and we get certain errors while @@ -23,47 +18,39 @@ function moveFile (src, dest) { // content their own way. // // Note that, as the name suggests, this strictly only supports file moves. - return new Promise((resolve, reject) => { - fs.link(src, dest, (err) => { - if (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - return resolve() - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - return resolve() - } else { - return reject(err) + try { + await fs.link(src, dest) + } catch (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + } else { + throw err + } + } + try { + await Promise.all([ + fs.unlink(src), + !isWindows && fs.chmod(dest, '0444'), + ]) + } catch (e) { + return pinflight('cacache-move-file:' + dest, async () => { + await fs.stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err } - } else { - return resolve() - } - }) - }) - .then(() => { - // content should never change for any reason, so make it read-only - return Promise.all([ - unlink(src), - !isWindows && chmod(dest, '0444'), - ]) - }) - .catch(() => { - return pinflight('cacache-move-file:' + dest, () => { - return stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) }) + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) }) + } } diff --git a/lib/util/tmp.js b/lib/util/tmp.js index 0a5a50e..b4437cf 100644 --- a/lib/util/tmp.js +++ b/lib/util/tmp.js @@ -7,15 +7,13 @@ const path = require('path') module.exports.mkdir = mktmpdir -function mktmpdir (cache, opts = {}) { +async function mktmpdir (cache, opts = {}) { const { tmpPrefix } = opts const tmpDir = path.join(cache, 'tmp') - return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - .then(() => { - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) - }) + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) } module.exports.withTmp = withTmp diff --git a/lib/verify.js b/lib/verify.js index a39fb6c..52692a0 100644 --- a/lib/verify.js +++ b/lib/verify.js @@ -5,7 +5,7 @@ const util = require('util') const pMap = require('p-map') const contentPath = require('./content/path') const fixOwner = require('./util/fix-owner') -const fs = require('fs') +const fs = require('@npmcli/fs') const fsm = require('fs-minipass') const glob = util.promisify(require('glob')) const index = require('./entry-index') @@ -18,11 +18,6 @@ const globify = pattern => pattern.split('\\').join('/') const hasOwnProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) -const stat = util.promisify(fs.stat) -const truncate = util.promisify(fs.truncate) -const writeFile = util.promisify(fs.writeFile) -const readFile = util.promisify(fs.readFile) - const verifyOpts = (opts) => ({ concurrency: 20, log: { silly () {} }, @@ -31,7 +26,7 @@ const verifyOpts = (opts) => ({ module.exports = verify -function verify (cache, opts) { +async function verify (cache, opts) { opts = verifyOpts(opts) opts.log.silly('verify', 'verifying cache at', cache) @@ -45,56 +40,47 @@ function verify (cache, opts) { markEndTime, ] - return steps - .reduce((promise, step, i) => { - const label = step.name - const start = new Date() - return promise.then((stats) => { - return step(cache, opts).then((s) => { - s && - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - - stats.runTime[label] = end - start - return Promise.resolve(stats) - }) + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] }) - }, Promise.resolve({})) - .then((stats) => { - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats - }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats } -function markStartTime (cache, opts) { - return Promise.resolve({ startTime: new Date() }) +async function markStartTime (cache, opts) { + return { startTime: new Date() } } -function markEndTime (cache, opts) { - return Promise.resolve({ endTime: new Date() }) +async function markEndTime (cache, opts) { + return { endTime: new Date() } } -function fixPerms (cache, opts) { +async function fixPerms (cache, opts) { opts.log.silly('verify', 'fixing cache permissions') - return fixOwner - .mkdirfix(cache, cache) - .then(() => { - // TODO - fix file permissions too - return fixOwner.chownr(cache, cache) - }) - .then(() => null) + await fixOwner.mkdirfix(cache, cache) + // TODO - fix file permissions too + await fixOwner.chownr(cache, cache) + return null } // Implements a naive mark-and-sweep tracing garbage collector. @@ -106,7 +92,7 @@ function fixPerms (cache, opts) { // 4. If content is live, verify its checksum and delete it if it fails // 5. If content is not marked as live, rimraf it. // -function garbageCollect (cache, opts) { +async function garbageCollect (cache, opts) { opts.log.silly('verify', 'garbage collecting content') const indexStream = index.lsStream(cache) const liveContent = new Set() @@ -117,156 +103,135 @@ function garbageCollect (cache, opts) { liveContent.add(entry.integrity.toString()) }) - return new Promise((resolve, reject) => { + await new Promise((resolve, reject) => { indexStream.on('end', resolve).on('error', reject) - }).then(() => { - const contentDir = contentPath.contentDir(cache) - return glob(globify(path.join(contentDir, '**')), { - follow: false, - nodir: true, - nosort: true, - }).then((files) => { - return Promise.resolve({ - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - }).then((stats) => - pMap( - files, - (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - return verifyContent(f, integrity).then((info) => { - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - return stats - }) - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - return stat(f).then((s) => { - return rimraf(f).then(() => { - stats.reclaimedSize += s.size - return stats - }) - }) - } - }, - { concurrency: opts.concurrency } - ).then(() => stats) - ) - }) }) -} - -function verifyContent (filepath, sri) { - return stat(filepath) - .then((s) => { - const contentInfo = { - size: s.size, - valid: true, - } - return ssri - .checkStream(new fsm.ReadStream(filepath), sri) - .catch((err) => { - if (err.code !== 'EINTEGRITY') { - throw err - } - - return rimraf(filepath).then(() => { - contentInfo.valid = false - }) - }) - .then(() => contentInfo) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } + const contentDir = contentPath.contentDir(cache) + const files = await glob(globify(path.join(contentDir, '**')), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await fs.stat(f) + await rimraf(f) + stats.reclaimedSize += s.size } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await fs.stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { throw err - }) + } + + await rimraf(filepath) + contentInfo.valid = false + } + return contentInfo } -function rebuildIndex (cache, opts) { +async function rebuildIndex (cache, opts) { opts.log.silly('verify', 'rebuilding index') - return index.ls(cache).then((entries) => { - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) } } - return pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ).then(() => stats) - }) + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats } -function rebuildBucket (cache, bucket, stats, opts) { - return truncate(bucket._path).then(() => { - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - return bucket.reduce((promise, entry) => { - return promise.then(() => { - const content = contentPath(cache, entry.integrity) - return stat(content) - .then(() => { - return index - .insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - .then(() => { - stats.totalEntries++ - }) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - return - } - throw err - }) +async function rebuildBucket (cache, bucket, stats, opts) { + await fs.truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await fs.stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, }) - }, Promise.resolve()) - }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } } function cleanTmp (cache, opts) { @@ -278,7 +243,7 @@ function writeVerifile (cache, opts) { const verifile = path.join(cache, '_lastverified') opts.log.silly('verify', 'writing verifile to ' + verifile) try { - return writeFile(verifile, '' + +new Date()) + return fs.writeFile(verifile, `${Date.now()}`) } finally { fixOwner.chownr.sync(cache, verifile) } @@ -286,8 +251,7 @@ function writeVerifile (cache, opts) { module.exports.lastRun = lastRun -function lastRun (cache) { - return readFile(path.join(cache, '_lastverified'), 'utf8').then( - (data) => new Date(+data) - ) +async function lastRun (cache) { + const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) } diff --git a/package.json b/package.json index 558e743..8641633 100644 --- a/package.json +++ b/package.json @@ -72,10 +72,6 @@ "@npmcli/template-oss": "3.4.3", "tap": "^16.0.0" }, - "tap": { - "100": true, - "test-regex": "test/[^/]*.js" - }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, diff --git a/test/content.read.js b/test/content/read.js similarity index 96% rename from test/content.read.js rename to test/content/read.js index 8889ee0..79431fe 100644 --- a/test/content.read.js +++ b/test/content/read.js @@ -1,16 +1,13 @@ 'use strict' -const util = require('util') - -const fs = require('fs') +const fs = require('@npmcli/fs') const path = require('path') const ssri = require('ssri') const t = require('tap') -const readFile = util.promisify(fs.readFile) -const CacheContent = require('./util/cache-content') +const CacheContent = require('../fixtures/cache-content') -const read = require('../lib/content/read') +const read = require('../../lib/content/read') // defines reusable errors const genericError = new Error('ERR') @@ -19,11 +16,11 @@ const permissionError = new Error('EPERM') permissionError.code = 'EPERM' // helpers -const getRead = (t, opts) => t.mock('../lib/content/read', opts) +const getRead = (t, opts) => t.mock('../../lib/content/read', opts) const getReadLstatFailure = (t, err) => getRead(t, { - fs: Object.assign({}, require('fs'), { - lstat (path, cb) { - cb(err) + '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { + async lstat (path) { + throw err }, lstatSync () { throw err @@ -263,9 +260,9 @@ t.test('read: returns only first result if other hashes fails', function (t) { t.test('read: opening large files', function (t) { const CACHE = t.testdir() const mockedRead = getRead(t, { - fs: Object.assign({}, require('fs'), { - lstat (path, cb) { - cb(null, { size: Number.MAX_SAFE_INTEGER }) + '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { + async lstat (path) { + return { size: Number.MAX_SAFE_INTEGER } }, }), 'fs-minipass': { @@ -472,7 +469,7 @@ t.test('copy: copies content to a destination path', (t) => { return read .copy(CACHE, INTEGRITY, DEST) .then(() => { - return readFile(DEST) + return fs.readFile(DEST) }) .then((data) => { t.same(data, CONTENT, 'file successfully copied') diff --git a/test/content.rm.js b/test/content/rm.js similarity index 68% rename from test/content.rm.js rename to test/content/rm.js index e779a92..5db1172 100644 --- a/test/content.rm.js +++ b/test/content/rm.js @@ -1,14 +1,11 @@ 'use strict' -const contentPath = require('../lib/content/path') -const fs = require('fs') -const util = require('util') +const contentPath = require('../../lib/content/path') +const fs = require('@npmcli/fs') const t = require('tap') -const stat = util.promisify(fs.stat) - -const CacheContent = require('./util/cache-content') -const rm = require('../lib/content/rm') +const CacheContent = require('../fixtures/cache-content') +const rm = require('../../lib/content/rm') t.test('removes a content entry', function (t) { const CACHE = t.testdir( @@ -17,7 +14,7 @@ t.test('removes a content entry', function (t) { }) ) return rm(CACHE, 'sha1-deadbeef') - .then(() => stat(contentPath(CACHE, 'sha1-deadbeef'))) + .then(() => fs.stat(contentPath(CACHE, 'sha1-deadbeef'))) .then(() => { throw new Error('expected an error') }) @@ -30,7 +27,7 @@ t.test('removes a content entry', function (t) { t.test('works fine if entry missing', function (t) { const CACHE = t.testdir(CacheContent({})) return rm(CACHE, 'sha1-deadbeef') - .then(() => stat(contentPath(CACHE, 'sha1-deadbeef'))) + .then(() => fs.stat(contentPath(CACHE, 'sha1-deadbeef'))) .then(() => { throw new Error('expected an error') }) diff --git a/test/content.write.chownr.js b/test/content/write.chownr.js similarity index 69% rename from test/content.write.chownr.js rename to test/content/write.chownr.js index 080909b..e653691 100644 --- a/test/content.write.chownr.js +++ b/test/content/write.chownr.js @@ -6,39 +6,21 @@ const NEWGID = process.getgid() + 1 process.getuid = () => 0 const path = require('path') -const fs = require('fs') const ssri = require('ssri') const t = require('tap') -const contentPath = require('../lib/content/path') +const contentPath = require('../../lib/content/path') t.test('infers ownership from cache folder owner', (t) => { const CACHE = t.testdir({ cache: {} }) - const stat = fs.lstat - fs.lstat = (path, cb) => { - stat(path, (er, st) => { - if (st && path === CACHE) { - st.uid = NEWUID - st.gid = NEWGID - } - cb(er, st) - }) - } - - const statSync = fs.lstatSync - fs.lstatSync = (path) => { - const st = statSync(path) - if (path === CACHE) { - st.uid = NEWUID - st.gid = NEWGID - } - return st - } const CONTENT = 'foobarbaz' const INTEGRITY = ssri.fromData(CONTENT) const updatedPaths = [] - const write = t.mock('../lib/content/write', { + const write = t.mock('../../lib/content/write', { + 'infer-owner': async function (c) { + return { uid: NEWUID, gid: NEWGID } + }, chownr: function (p, uid, gid, cb) { process.nextTick(function () { const rel = path.relative(CACHE, p) diff --git a/test/content.write.js b/test/content/write.js similarity index 97% rename from test/content.write.js rename to test/content/write.js index 737714e..f565767 100644 --- a/test/content.write.js +++ b/test/content/write.js @@ -1,15 +1,15 @@ 'use strict' -const fs = require('fs') +const fs = require('@npmcli/fs') const path = require('path') const rimraf = require('rimraf') const ssri = require('ssri') const t = require('tap') -const CacheContent = require('./util/cache-content') -const contentPath = require('../lib/content/path') +const CacheContent = require('../fixtures/cache-content') +const contentPath = require('../../lib/content/path') -const write = require('../lib/content/write') +const write = require('../../lib/content/write') t.test('basic put', (t) => { const CACHE = t.testdir() diff --git a/test/index.find.js b/test/entry-index.find.js similarity index 99% rename from test/index.find.js rename to test/entry-index.find.js index 0dbce93..3c4e4de 100644 --- a/test/index.find.js +++ b/test/entry-index.find.js @@ -1,6 +1,6 @@ 'use strict' -const CacheIndex = require('./util/cache-index') +const CacheIndex = require('./fixtures/cache-index') const path = require('path') const t = require('tap') diff --git a/test/index.insert.js b/test/entry-index.insert.js similarity index 93% rename from test/index.insert.js rename to test/entry-index.insert.js index 23b057d..a278036 100644 --- a/test/index.insert.js +++ b/test/entry-index.insert.js @@ -1,14 +1,10 @@ 'use strict' -const util = require('util') - -const CacheIndex = require('./util/cache-index') +const CacheIndex = require('./fixtures/cache-index') const contentPath = require('../lib/content/path') -const fs = require('fs') +const fs = require('@npmcli/fs') const t = require('tap') -const readFile = util.promisify(fs.readFile) - const index = require('../lib/entry-index') const key = 'foo' @@ -41,7 +37,7 @@ t.test('basic insertion', function (t) { }, 'formatted entry returned' ) - return readFile(bucket, 'utf8') + return fs.readFile(bucket, 'utf8') }) .then((data) => { t.equal(data[0], '\n', 'first entry starts with a \\n') @@ -82,7 +78,7 @@ t.test('inserts additional entries into existing key', function (t) { ) .then(() => index.insert(cache, key, integrity, { size, metadata: 2 })) .then(() => { - return readFile(bucket, 'utf8') + return fs.readFile(bucket, 'utf8') }) .then((data) => { const entries = data @@ -135,7 +131,7 @@ t.test('separates entries even if one is corrupted', function (t) { const bucket = index.bucketPath(cache, key) return index .insert(cache, key, integrity, { size }) - .then(() => readFile(bucket, 'utf8')) + .then(() => fs.readFile(bucket, 'utf8')) .then((data) => { const entry = JSON.parse(data.split('\n')[4].split('\t')[1]) delete entry.time @@ -158,7 +154,7 @@ t.test('optional arbitrary metadata', function (t) { return index .insert(cache, key, integrity, { size, metadata: metadata }) .then(() => { - return readFile(bucket, 'utf8') + return fs.readFile(bucket, 'utf8') }) .then((data) => { const entry = JSON.parse(data.split('\t')[1]) @@ -225,7 +221,7 @@ t.test('path-breaking characters', function (t) { .insert(cache, newKey, integrity, { size }) .then(() => { const bucket = index.bucketPath(cache, newKey) - return readFile(bucket, 'utf8') + return fs.readFile(bucket, 'utf8') }) .then((data) => { const entry = JSON.parse(data.split('\t')[1]) @@ -253,7 +249,7 @@ t.test('extremely long keys', function (t) { .insert(cache, newKey, integrity, { size }) .then(() => { const bucket = index.bucketPath(cache, newKey) - return readFile(bucket, 'utf8') + return fs.readFile(bucket, 'utf8') }) .then((data) => { const entry = JSON.parse(data.split('\t')[1]) diff --git a/test/entry-index.js b/test/entry-index.js index 4fa531e..514e067 100644 --- a/test/entry-index.js +++ b/test/entry-index.js @@ -1,13 +1,13 @@ 'use strict' -const fs = require('fs') +const fs = require('@npmcli/fs') const path = require('path') const ssri = require('ssri') const t = require('tap') const index = require('../lib/entry-index') -const CacheContent = require('./util/cache-content') +const CacheContent = require('./fixtures/cache-content') // defines reusable errors const genericError = new Error('ERR') @@ -17,9 +17,9 @@ missingFileError.code = 'ENOENT' const getEntryIndex = (t, opts) => t.mock('../lib/entry-index', opts) const getEntryIndexReadFileFailure = (t, err) => getEntryIndex(t, { - fs: Object.assign({}, fs, { - readFile: (path, encode, cb) => { - cb(err) + '@npmcli/fs': Object.assign({}, fs, { + readFile: async (path, encode) => { + throw err }, readFileSync: () => { throw genericError @@ -257,9 +257,9 @@ t.test('find: error on parsing json data', (t) => { const cache = t.testdir(cacheContent) // mocks readFile in order to return a borked json payload const { find } = getEntryIndex(t, { - fs: Object.assign({}, require('fs'), { - readFile: (path, encode, cb) => { - cb(null, '\ncec8d2e4685534ed189b563c8ee1cb1cb7c72874\t{"""// foo') + '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { + readFile: async (path, encode) => { + return '\ncec8d2e4685534ed189b563c8ee1cb1cb7c72874\t{"""// foo' }, }), }) @@ -291,7 +291,7 @@ t.test('find.sync: retrieve from bucket containing multiple entries', (t) => { '\n46b1607f427665a99668c02d3a4cc52061afd83a\t{"key":"bar", "integrity": "bar"}', ] const { find } = getEntryIndex(t, { - fs: Object.assign({}, require('fs'), { + '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { readFileSync: (path, encode) => entries.join(''), }), }) @@ -319,7 +319,7 @@ t.test('find.sync: unknown error on finding entries', (t) => { t.test('find.sync: retrieve entry with invalid content', (t) => { const cache = t.testdir(cacheContent) const { find } = getEntryIndex(t, { - fs: Object.assign({}, require('fs'), { + '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { readFileSync: (path, encode) => '\nb6589fc6ab0dc82cf12099d1c2d40ab994e8410c\t0', }), @@ -412,9 +412,9 @@ t.test('lsStream: missing files error', (t) => { t.test('lsStream: unknown error reading dirs', (t) => { const cache = t.testdir(cacheContent) const { lsStream } = getEntryIndex(t, { - fs: Object.assign({}, require('fs'), { - readdir: (path, cb) => { - cb(genericError) + '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { + readdir: async (path) => { + throw genericError }, }), }) diff --git a/test/util/cache-content.js b/test/fixtures/cache-content.js similarity index 100% rename from test/util/cache-content.js rename to test/fixtures/cache-content.js diff --git a/test/util/cache-index.js b/test/fixtures/cache-index.js similarity index 100% rename from test/util/cache-index.js rename to test/fixtures/cache-index.js diff --git a/test/get.js b/test/get.js index 5231da6..1fe770a 100644 --- a/test/get.js +++ b/test/get.js @@ -2,7 +2,7 @@ const util = require('util') -const fs = require('fs') +const fs = require('@npmcli/fs') const index = require('../lib/entry-index') const memo = require('../lib/memoization') const path = require('path') @@ -10,9 +10,7 @@ const rimraf = util.promisify(require('rimraf')) const t = require('tap') const ssri = require('ssri') -const readFile = util.promisify(fs.readFile) - -const CacheContent = require('./util/cache-content') +const CacheContent = require('./fixtures/cache-content') const CONTENT = Buffer.from('foobarbaz', 'utf8') const SIZE = CONTENT.length @@ -460,14 +458,14 @@ t.test('get.copy with fs.copyfile', (t) => { }, 'copy operation returns basic metadata' ) - return readFile(DEST) + return fs.readFile(DEST) }) .then((data) => { t.same(data, CONTENT, 'data copied by key matches') return rimraf(DEST) }) .then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST)) - .then(() => readFile(DEST)) + .then(() => fs.readFile(DEST)) .then((data) => { t.same(data, CONTENT, 'data copied by digest matches') return rimraf(DEST) diff --git a/test/ls.js b/test/ls.js index ada893c..4922fd1 100644 --- a/test/ls.js +++ b/test/ls.js @@ -1,6 +1,6 @@ 'use strict' -const CacheIndex = require('./util/cache-index') +const CacheIndex = require('./fixtures/cache-index') const contentPath = require('../lib/content/path') const index = require('../lib/entry-index.js') const t = require('tap') diff --git a/test/put.js b/test/put.js index 0dc4b91..d9ead19 100644 --- a/test/put.js +++ b/test/put.js @@ -1,15 +1,11 @@ 'use strict' -const util = require('util') - -const fs = require('fs') +const fs = require('@npmcli/fs') const index = require('../lib/entry-index') const memo = require('../lib/memoization') const t = require('tap') const ssri = require('ssri') -const readFile = util.promisify(fs.readFile) - const CONTENT = Buffer.from('foobarbaz', 'utf8') const KEY = 'my-test-key' const INTEGRITY = ssri.fromData(CONTENT).toString() @@ -24,7 +20,7 @@ t.test('basic bulk insertion', (t) => { .then((integrity) => { t.equal(integrity.toString(), INTEGRITY, 'returned content integrity') const dataPath = contentPath(CACHE, integrity) - return readFile(dataPath) + return fs.readFile(dataPath) }) .then((data) => { t.same(data, CONTENT, 'content was correctly inserted') @@ -40,7 +36,7 @@ t.test('basic stream insertion', (t) => { return stream.end(CONTENT).promise() .then(() => { t.equal(int.toString(), INTEGRITY, 'returned integrity matches expected') - return readFile(contentPath(CACHE, int)) + return fs.readFile(contentPath(CACHE, int)) }) .then((data) => { t.same(data, CONTENT, 'contents are identical to inserted content') @@ -102,7 +98,7 @@ t.test('optionally memoizes data on stream insertion', (t) => { return stream.end(CONTENT).promise() .then(() => { t.equal(int.toString(), INTEGRITY, 'integrity emitted as usual') - return readFile(contentPath(CACHE, int)) + return fs.readFile(contentPath(CACHE, int)) }) .then((data) => { t.same(data, CONTENT, 'contents are identical to inserted content') diff --git a/test/rm.js b/test/rm.js index 0325f25..c2169b0 100644 --- a/test/rm.js +++ b/test/rm.js @@ -1,14 +1,12 @@ 'use strict' -const util = require('util') - -const fs = require('fs') +const fs = require('@npmcli/fs') const index = require('../lib/entry-index') const path = require('path') const t = require('tap') const ssri = require('ssri') -const CacheContent = require('./util/cache-content') +const CacheContent = require('./fixtures/cache-content') const CONTENT = Buffer.from('foobarbaz') const KEY = 'my-test-key' const INTEGRITY = ssri.fromData(CONTENT) @@ -19,10 +17,6 @@ const get = require('..').get const rm = require('..').rm -const readFile = util.promisify(fs.readFile) -const mkdir = util.promisify(fs.mkdir) -const writeFile = util.promisify(fs.writeFile) -const readdir = util.promisify(fs.readdir) const cacheContent = CacheContent({ [INTEGRITY]: CONTENT, }) @@ -51,7 +45,7 @@ t.test('rm.entry removes entries, not content', (t) => { throw err }) .then(() => { - return readFile(contentPath(cache, INTEGRITY)) + return fs.readFile(contentPath(cache, INTEGRITY)) }) .then((data) => { t.same(data, CONTENT, 'content remains in cache') @@ -81,7 +75,7 @@ t.test('rm.content removes content, not entries', (t) => { throw err }) .then(() => { - return readFile(contentPath(cache, INTEGRITY)) + return fs.readFile(contentPath(cache, INTEGRITY)) }) .then(() => { throw new Error('unexpected success') @@ -102,16 +96,16 @@ t.test('rm.all deletes content and index dirs', (t) => { metadata: METADATA, }) .then(() => { - return mkdir(path.join(cache, 'tmp')) + return fs.mkdir(path.join(cache, 'tmp')) }) .then(() => { - return writeFile(path.join(cache, 'other.js'), 'hi') + return fs.writeFile(path.join(cache, 'other.js'), 'hi') }) .then(() => { return rm.all(cache) }) .then(() => { - return readdir(cache) + return fs.readdir(cache) }) .then((files) => { t.same( diff --git a/test/strict.js b/test/strict.js deleted file mode 100644 index d6597a8..0000000 --- a/test/strict.js +++ /dev/null @@ -1,34 +0,0 @@ -'use strict' - -const fs = require('fs') -const glob = require('glob') -const path = require('path') -const { test } = require('tap') - -test('all JavaScript source files use strict mode', function (t) { - const globStr = '**/*.js' - const root = path.resolve(__dirname, '../') - glob( - globStr, - { - cwd: root, - ignore: ['node_modules/**/*.js', 'coverage/**/*.js'], - }, - function (err, files) { - if (err) { - throw err - } - - const line = '\'use strict\'\n' - const bytecount = line.length - const buf = Buffer.alloc(bytecount) - files.forEach(function (f) { - const fd = fs.openSync(path.join(root, f), 'r') - fs.readSync(fd, buf, 0, bytecount, 0) - fs.closeSync(fd) - t.equal(buf.toString('utf8'), line, f + ' is using strict mode.') - }) - t.end() - } - ) -}) diff --git a/test/util.move-file.js b/test/util.move-file.js deleted file mode 100644 index eb87465..0000000 --- a/test/util.move-file.js +++ /dev/null @@ -1,251 +0,0 @@ -'use strict' - -const util = require('util') - -const fs = require('fs') -const path = require('path') -const t = require('tap') - -const moveFile = require('../lib/util/move-file') - -const readFile = util.promisify(fs.readFile) -const stat = util.promisify(fs.stat) -const open = util.promisify(fs.open) -const close = util.promisify(fs.close) -const readdir = util.promisify(fs.readdir) -const chmod = util.promisify(fs.chmod) - -t.test('move a file', function (t) { - const testDir = t.testdir({ - src: 'foo', - }) - return moveFile(testDir + '/src', testDir + '/dest') - .then(() => { - return readFile(testDir + '/dest', 'utf8') - }) - .then((data) => { - t.equal(data, 'foo', 'file data correct') - return stat(testDir + '/src').catch((err) => { - t.ok(err, 'src read error') - t.equal(err.code, 'ENOENT', 'src does not exist') - }) - }) -}) - -t.test('does not clobber existing files', function (t) { - const testDir = t.testdir({ - src: 'foo', - dest: 'bar', - }) - return moveFile(testDir + '/src', testDir + '/dest') - .then(() => { - return readFile(testDir + '/dest', 'utf8') - }) - .then((data) => { - t.equal(data, 'bar', 'conflicting file left intact') - return stat(testDir + '/src').catch((err) => { - t.ok(err, 'src read error') - t.equal(err.code, 'ENOENT', 'src file still deleted') - }) - }) -}) - -t.test('does not move a file into an existing directory', function (t) { - const testDir = t.testdir({ - src: 'foo', - dest: {}, - }) - return moveFile(testDir + '/src', testDir + '/dest') - .then(() => { - return readdir(testDir + '/dest') - }) - .then((files) => { - t.equal(files.length, 0, 'directory remains empty') - }) -}) - -t.test('does not error if destination file is open', function (t) { - const testDir = t.testdir({ - src: 'foo', - dest: 'bar', - }) - - return open(testDir + '/dest', 'r+').then((fd) => { - return moveFile(testDir + '/src', testDir + '/dest') - .then(() => { - return close(fd) - }) - .then(() => { - return readFile(testDir + '/dest', 'utf8') - }) - .then((data) => { - t.equal(data, 'bar', 'destination left intact') - return stat(testDir + '/src').catch((err) => { - t.ok(err, 'src read error') - t.equal(err.code, 'ENOENT', 'src does not exist') - }) - }) - }) -}) - -t.test('fallback to renaming on missing files post-move', function (t) { - const testDir = t.testdir({ - src: 'foo', - }) - - // Sets up a fs mock that will fail at first unlink/stat call in order - // to trigger the fallback scenario then restores the fs methods allowing - // for the rename functionality to succeed - let shouldMock = true - const missingFileError = new Error('ENOENT') - missingFileError.code = 'ENOENT' - const mockFS = { - ...fs, - promises: { - ...fs.promises, - rename: async (src, dest) => { - throw Object.assign(new Error('EXDEV'), { code: 'EXDEV' }) - }, - link: async (src, dest) => { - throw new Error('nope') - }, - unlink: async (path) => { - if (shouldMock) { - throw missingFileError - } else { - return fs.promises.unlink(path) - } - }, - lstat: async (path, cb) => { - if (shouldMock) { - shouldMock = false - throw missingFileError - } else { - return fs.promises.lstat(path) - } - }, - stat: async (path, cb) => { - if (shouldMock) { - shouldMock = false - throw missingFileError - } else { - return fs.promises.stat(path) - } - }, - }, - rename: (src, dest, cb) => { - if (shouldMock) { - cb(Object.assign(new Error('EXDEV'), { code: 'EXDEV' })) - } else { - fs.rename(src, dest, cb) - } - }, - link (src, dest, cb) { - cb(new Error('nope')) - }, - unlink (path, cb) { - if (shouldMock) { - cb(missingFileError) - } else { - fs.unlink(path, cb) - } - }, - lstat (path, cb) { - if (shouldMock && path === testDir + '/dest') { - cb(missingFileError) - shouldMock = false - } else { - fs.lstat(path, cb) - } - }, - stat (path, cb) { - if (shouldMock && path === testDir + '/dest') { - cb(missingFileError) - shouldMock = false - } else { - fs.stat(path, cb) - } - }, - } - const mockedMoveFile = t.mock('../lib/util/move-file', { - fs: mockFS, - '@npmcli/move-file': t.mock('@npmcli/move-file', { - fs: mockFS, - }), - }) - - // actual tests are the same used in the simple "move a file" test - // since the renaming fallback should accomplish the same results - t.plan(3) - return mockedMoveFile(testDir + '/src', testDir + '/dest') - .then(() => { - return readFile(testDir + '/dest', 'utf8') - }) - .then((data) => { - t.equal(data, 'foo', 'file data correct') - return stat(testDir + '/src').then(() => { - t.fail('src file should not exist, but it does!') - }).catch((err) => { - t.ok(err, 'src read error') - t.equal(err.code, 'ENOENT', 'src does not exist') - }) - }) -}) - -t.test('verify weird EPERM on Windows behavior', t => { - const gfsLink = fs.link - global.__CACACHE_TEST_FAKE_WINDOWS__ = true - const gfs = require('fs') - let calledMonkeypatch = false - gfs.link = (src, dest, cb) => { - calledMonkeypatch = true - setImmediate(() => cb(Object.assign(new Error('yolo'), { - code: 'EPERM', - }))) - gfs.link = gfsLink - global.__CACACHE_TEST_FAKE_WINDOWS__ = false - } - const testDir = t.testdir({ - eperm: { - src: 'epermmy', - }, - }) - - return moveFile(testDir + '/eperm/src', testDir + '/eperm/dest') - .then(() => t.ok(calledMonkeypatch, 'called the patched fs.link fn')) - .then(() => t.rejects(readFile('eperm/dest'), { - code: 'ENOENT', - }, 'destination file did not get written')) - .then(() => t.rejects(readFile('eperm/src'), { - code: 'ENOENT', - }, 'src file did get deleted')) -}) - -t.test( - 'errors if dest is not writable', - { - skip: process.platform === 'win32', - }, - function (t) { - const testDir = t.testdir({ - src: 'foo', - dest: {}, - }) - - return chmod(testDir + '/dest', parseInt('400', 8)) - .then(() => { - return moveFile(testDir + '/src', path.join(testDir + '/dest', 'file')) - .then(() => { - throw new Error('move succeeded and should not have') - }) - .catch((err) => { - t.ok(err, 'error was returned') - t.equal(err.code, 'EACCES', 'error is about permissions') - return readFile(testDir + '/src', 'utf8') - }) - }) - .then((data) => { - t.equal(data, 'foo', 'src contents left intact') - }) - } -) diff --git a/test/util.fix-owner.js b/test/util/fix-owner.js similarity index 97% rename from test/util.fix-owner.js rename to test/util/fix-owner.js index 020df17..1639c82 100644 --- a/test/util.fix-owner.js +++ b/test/util/fix-owner.js @@ -23,7 +23,7 @@ const patchesGetuid = (t) => { process.getuid = getuid }) } -const getFixOwner = (t, opts) => t.mock('../lib/util/fix-owner', opts) +const getFixOwner = (t, opts) => t.mock('../../lib/util/fix-owner', opts) // chownr and chownr.fix error handling tests @@ -106,7 +106,7 @@ t.test('attempt to chownr.sync on platforms that do not need ownership fix', (t) t.teardown(() => { process.getuid = getuid }) - const fixOwner = require('../lib/util/fix-owner') + const fixOwner = require('../../lib/util/fix-owner') t.plan(1) return fixOwner.chownr(CACHE, filename) @@ -165,7 +165,7 @@ t.test('attempt to chownr.sync on platforms that do not need ownership fix', (t) t.teardown(() => { process.getuid = getuid }) - const fixOwner = require('../lib/util/fix-owner') + const fixOwner = require('../../lib/util/fix-owner') t.notOk(fixOwner.chownr.sync(CACHE, filename), 'should not throw') t.end() diff --git a/test/util/move-file.js b/test/util/move-file.js new file mode 100644 index 0000000..5f77fc4 --- /dev/null +++ b/test/util/move-file.js @@ -0,0 +1,201 @@ +'use strict' + +const fs = require('@npmcli/fs') +const path = require('path') +const t = require('tap') + +const moveFile = require('../../lib/util/move-file') + +t.test('move a file', function (t) { + const testDir = t.testdir({ + src: 'foo', + }) + return moveFile(testDir + '/src', testDir + '/dest') + .then(() => { + return fs.readFile(testDir + '/dest', 'utf8') + }) + .then((data) => { + t.equal(data, 'foo', 'file data correct') + return fs.stat(testDir + '/src').catch((err) => { + t.ok(err, 'src read error') + t.equal(err.code, 'ENOENT', 'src does not exist') + }) + }) +}) + +t.test('does not clobber existing files', function (t) { + const testDir = t.testdir({ + src: 'foo', + dest: 'bar', + }) + return moveFile(testDir + '/src', testDir + '/dest') + .then(() => { + return fs.readFile(testDir + '/dest', 'utf8') + }) + .then((data) => { + t.equal(data, 'bar', 'conflicting file left intact') + return fs.stat(testDir + '/src').catch((err) => { + t.ok(err, 'src read error') + t.equal(err.code, 'ENOENT', 'src file still deleted') + }) + }) +}) + +t.test('does not move a file into an existing directory', function (t) { + const testDir = t.testdir({ + src: 'foo', + dest: {}, + }) + return moveFile(testDir + '/src', testDir + '/dest') + .then(() => { + return fs.readdir(testDir + '/dest') + }) + .then((files) => { + t.equal(files.length, 0, 'directory remains empty') + }) +}) + +t.test('does not error if destination file is open', function (t) { + const testDir = t.testdir({ + src: 'foo', + dest: 'bar', + }) + + return fs.open(testDir + '/dest', 'r+').then((fd) => { + return moveFile(testDir + '/src', testDir + '/dest') + .then(() => { + return fs.close(fd) + }) + .then(() => { + return fs.readFile(testDir + '/dest', 'utf8') + }) + .then((data) => { + t.equal(data, 'bar', 'destination left intact') + return fs.stat(testDir + '/src').catch((err) => { + t.ok(err, 'src read error') + t.equal(err.code, 'ENOENT', 'src does not exist') + }) + }) + }) +}) + +t.test('fallback to renaming on missing files post-move', async function (t) { + const testDir = t.testdir({ + src: 'foo', + }) + + const missingFileError = new Error('ENOENT') + missingFileError.code = 'ENOENT' + const mockFS = { + ...fs, + async unlink (path) { + throw missingFileError + }, + async stat (path) { + throw missingFileError + }, + } + const mockedMoveFile = t.mock('../../lib/util/move-file', { + '@npmcli/fs': mockFS, + }) + + await mockedMoveFile(testDir + '/src', testDir + '/dest') + const data = await fs.readFile(testDir + '/dest', 'utf8') + t.equal(data, 'foo', 'file data correct') + await t.rejects( + fs.stat(testDir + '/src'), + { code: 'ENOENT' }, + './src does not exist' + ) +}) + +t.test('non ENOENT error on move fallback', async function (t) { + const testDir = t.testdir({ + src: 'foo', + }) + + const missingFileError = new Error('ENOENT') + missingFileError.code = 'ENOENT' + const otherError = new Error('UNKNOWN') + otherError.code = 'OTHER' + const mockFS = { + ...fs, + async unlink (path) { + throw missingFileError + }, + async stat (path) { + throw otherError + }, + + } + const mockedMoveFile = t.mock('../../lib/util/move-file', { + '@npmcli/fs': mockFS, + }) + + await t.rejects( + mockedMoveFile(testDir + '/src', testDir + '/dest'), + { code: 'OTHER' }, + 'throws other error' + ) +}) + +t.test('verify weird EPERM on Windows behavior', t => { + const processPlatform = process.platform + Object.defineProperty(process, 'platform', { value: 'win32' }) + t.teardown(() => { + Object.defineProperty(process, 'platform', { value: processPlatform }) + }) + const gfsLink = fs.link + const gfs = require('@npmcli/fs') + let calledMonkeypatch = false + gfs.link = async (src, dest) => { + calledMonkeypatch = true + gfs.link = gfsLink + throw Object.assign(new Error('yolo'), { + code: 'EPERM', + }) + } + const testDir = t.testdir({ + eperm: { + src: 'epermmy', + }, + }) + + return moveFile(testDir + '/eperm/src', testDir + '/eperm/dest') + .then(() => t.ok(calledMonkeypatch, 'called the patched fs.link fn')) + .then(() => t.rejects(fs.readFile('eperm/dest'), { + code: 'ENOENT', + }, 'destination file did not get written')) + .then(() => t.rejects(fs.readFile('eperm/src'), { + code: 'ENOENT', + }, 'src file did get deleted')) +}) + +t.test( + 'errors if dest is not writable', + { + skip: process.platform === 'win32', + }, + function (t) { + const testDir = t.testdir({ + src: 'foo', + dest: {}, + }) + + return fs.chmod(testDir + '/dest', parseInt('400', 8)) + .then(() => { + return moveFile(testDir + '/src', path.join(testDir + '/dest', 'file')) + .then(() => { + throw new Error('move succeeded and should not have') + }) + .catch((err) => { + t.ok(err, 'error was returned') + t.equal(err.code, 'EACCES', 'error is about permissions') + return fs.readFile(testDir + '/src', 'utf8') + }) + }) + .then((data) => { + t.equal(data, 'foo', 'src contents left intact') + }) + } +) diff --git a/test/util.tmp.js b/test/util/tmp.js similarity index 82% rename from test/util.tmp.js rename to test/util/tmp.js index 8298766..5b2696a 100644 --- a/test/util.tmp.js +++ b/test/util/tmp.js @@ -1,8 +1,6 @@ 'use strict' -const util = require('util') - -const fs = require('fs') +const fs = require('@npmcli/fs') const path = require('path') const t = require('tap') @@ -10,13 +8,11 @@ const CACHE = t.testdir() const mockedFixOwner = () => Promise.resolve(1) // temporarily points to original mkdirfix implementation -mockedFixOwner.mkdirfix = require('../lib/util/fix-owner').mkdirfix -const tmp = t.mock('../lib/util/tmp', { - '../lib/util/fix-owner': mockedFixOwner, +mockedFixOwner.mkdirfix = require('../../lib/util/fix-owner').mkdirfix +const tmp = t.mock('../../lib/util/tmp', { + '../../lib/util/fix-owner': mockedFixOwner, }) -const stat = util.promisify(fs.stat) - t.test('creates a unique tmpdir inside the cache', (t) => { return tmp .mkdir(CACHE) @@ -26,7 +22,7 @@ t.test('creates a unique tmpdir inside the cache', (t) => { /^tmp[\\/].*/, 'returns a path inside tmp' ) - return stat(dir) + return fs.stat(dir) }) .then((s) => { t.ok(s.isDirectory(), 'path points to an existing directory') @@ -36,7 +32,7 @@ t.test('creates a unique tmpdir inside the cache', (t) => { t.test('provides a utility that does resource disposal on tmp', (t) => { return tmp .withTmp(CACHE, (dir) => { - return stat(dir) + return fs.stat(dir) .then((s) => { t.ok(s.isDirectory(), 'path points to an existing directory') }) @@ -44,7 +40,7 @@ t.test('provides a utility that does resource disposal on tmp', (t) => { }) .then((dir) => { return Promise.all([ - stat(dir) + fs.stat(dir) .then(() => { throw new Error('expected fail') }) @@ -55,7 +51,7 @@ t.test('provides a utility that does resource disposal on tmp', (t) => { throw err }), - stat(path.join(CACHE, 'tmp')), + fs.stat(path.join(CACHE, 'tmp')), ]).then(([nope, yes]) => { t.notOk(nope, 'tmp subdir removed') t.ok(yes.isDirectory(), 'tmp parent dir left intact') diff --git a/test/verify.js b/test/verify.js index 616ef73..5c1db88 100644 --- a/test/verify.js +++ b/test/verify.js @@ -1,15 +1,13 @@ 'use strict' -const util = require('util') - const contentPath = require('../lib/content/path') const index = require('../lib/entry-index') -const fs = require('fs') +const fs = require('@npmcli/fs') const path = require('path') const t = require('tap') const ssri = require('ssri') -const CacheContent = require('./util/cache-content') +const CacheContent = require('./fixtures/cache-content') const CONTENT = Buffer.from('foobarbaz', 'utf8') const KEY = 'my-test-key' @@ -18,12 +16,6 @@ const METADATA = { foo: 'bar' } const verify = require('..').verify -const readFile = util.promisify(fs.readFile) -const truncate = util.promisify(fs.truncate) -const stat = util.promisify(fs.stat) -const appendFile = util.promisify(fs.appendFile) -const writeFile = util.promisify(fs.writeFile) - // defines reusable errors const genericError = new Error('ERR') genericError.code = 'ERR' @@ -47,9 +39,9 @@ function mockCache (t) { t.test('removes corrupted index entries from buckets', (t) => { return mockCache(t).then((CACHE) => { const BUCKET = index.bucketPath(CACHE, KEY) - return readFile(BUCKET, 'utf8').then((BUCKETDATA) => { + return fs.readFile(BUCKET, 'utf8').then((BUCKETDATA) => { // traaaaash - return appendFile(BUCKET, '\n234uhhh') + return fs.appendFile(BUCKET, '\n234uhhh') .then(() => { return verify(CACHE) }) @@ -60,7 +52,7 @@ t.test('removes corrupted index entries from buckets', (t) => { 'content valid because of good entry' ) t.equal(stats.totalEntries, 1, 'only one entry counted') - return readFile(BUCKET, 'utf8') + return fs.readFile(BUCKET, 'utf8') }) .then((bucketData) => { const bucketEntry = JSON.parse(bucketData.split('\t')[1]) @@ -92,7 +84,7 @@ t.test('removes shadowed index entries from buckets', (t) => { 'content valid because of good entry' ) t.equal(stats.totalEntries, 1, 'only one entry counted') - return readFile(BUCKET, 'utf8') + return fs.readFile(BUCKET, 'utf8') }) .then((bucketData) => { const stringified = JSON.stringify({ @@ -159,7 +151,7 @@ t.test('accepts function for custom user filtering of index entries', (t) => { t.test('removes corrupted content', (t) => { return mockCache(t).then((CACHE) => { const cpath = contentPath(CACHE, INTEGRITY) - return truncate(cpath, CONTENT.length - 1) + return fs.truncate(cpath, CONTENT.length - 1) .then(() => { return verify(CACHE) }).then((stats) => { @@ -180,7 +172,7 @@ t.test('removes corrupted content', (t) => { }, 'reported correct collection counts' ) - return stat(cpath) + return fs.stat(cpath) .then(() => { throw new Error('expected a failure') }) @@ -227,18 +219,18 @@ t.test('cleans up contents of tmp dir', (t) => { .then((CACHE) => { const tmpFile = path.join(CACHE, 'tmp', 'x') const misc = path.join(CACHE, 'y') - return Promise.all([writeFile(tmpFile, ''), writeFile(misc, '')]).then( + return Promise.all([fs.writeFile(tmpFile, ''), fs.writeFile(misc, '')]).then( () => verify(CACHE) ).then(() => { return Promise.all([ - stat(tmpFile).catch((err) => { + fs.stat(tmpFile).catch((err) => { if (err.code === 'ENOENT') { return err } throw err }), - stat(misc), + fs.stat(misc), ]).then(([err, stat]) => { t.equal(err.code, 'ENOENT', 'tmp file was blown away') t.ok(stat, 'misc file was not touched') @@ -252,7 +244,7 @@ t.test('writes a file with last verification time', (t) => { return verify(CACHE).then(() => { return Promise.all([ verify.lastRun(CACHE), - readFile(path.join(CACHE, '_lastverified'), 'utf8').then((data) => { + fs.readFile(path.join(CACHE, '_lastverified'), 'utf8').then((data) => { return new Date(parseInt(data)) }), ]).then(([fromLastRun, fromFile]) => { @@ -267,9 +259,9 @@ t.test('missing file error when validating cache content', (t) => { const missingFileError = new Error('ENOENT') missingFileError.code = 'ENOENT' const mockVerify = getVerify(t, { - fs: Object.assign({}, fs, { - stat: (path, cb) => { - cb(missingFileError) + '@npmcli/fs': Object.assign({}, fs, { + stat: async (path) => { + throw missingFileError }, }), }) @@ -290,9 +282,9 @@ t.test('missing file error when validating cache content', (t) => { t.test('unknown error when validating content', (t) => { const mockVerify = getVerify(t, { - fs: Object.assign({}, fs, { - stat: (path, cb) => { - cb(genericError) + '@npmcli/fs': Object.assign({}, fs, { + stat: async (path) => { + throw genericError }, }), }) @@ -329,14 +321,13 @@ t.test('unknown error when rebuilding bucket', (t) => { // shouldFail controls the right time to mock the error let shouldFail = false const mockVerify = getVerify(t, { - fs: Object.assign({}, fs, { - stat: (path, cb) => { + '@npmcli/fs': Object.assign({}, fs, { + stat: async (path) => { if (shouldFail) { - return cb(genericError) + throw genericError } - - fs.stat(path, cb) shouldFail = true + return fs.stat(path) }, }), })