From 34db7671daee5196c85f78ac82b90884e52fa555 Mon Sep 17 00:00:00 2001 From: Chip Morningstar Date: Tue, 14 Mar 2023 18:38:03 -0700 Subject: [PATCH 1/4] feat: implement bundleStore Closes #7089 --- packages/swing-store/package.json | 5 +- packages/swing-store/src/bundleStore.js | 244 ++++++++++++++++++ packages/swing-store/src/snapStore.js | 17 +- packages/swing-store/src/swingStore.js | 181 +++++++++---- packages/swing-store/src/util.js | 16 ++ packages/swing-store/test/bohr-module.js | 3 + packages/swing-store/test/faux-module.js | 3 + .../swing-store/test/test-exportImport.js | 22 ++ packages/swing-store/test/test-state.js | 54 ++++ 9 files changed, 477 insertions(+), 68 deletions(-) create mode 100644 packages/swing-store/src/bundleStore.js create mode 100644 packages/swing-store/src/util.js create mode 100644 packages/swing-store/test/bohr-module.js create mode 100644 packages/swing-store/test/faux-module.js diff --git a/packages/swing-store/package.json b/packages/swing-store/package.json index d08e7ac9fcf..7dd6c32d13c 100644 --- a/packages/swing-store/package.json +++ b/packages/swing-store/package.json @@ -20,8 +20,11 @@ "dependencies": { "@agoric/assert": "^0.5.1", "@agoric/internal": "^0.2.1", - "readline-transform": "^1.0.0", + "@endo/base64": "^0.2.28", + "@endo/bundle-source": "^2.4.2", + "@endo/check-bundle": "^0.2.14", "better-sqlite3": "^8.2.0", + "readline-transform": "^1.0.0", "tmp": "^0.2.1" }, "devDependencies": { diff --git a/packages/swing-store/src/bundleStore.js b/packages/swing-store/src/bundleStore.js new file mode 100644 index 00000000000..d078837b803 --- /dev/null +++ b/packages/swing-store/src/bundleStore.js @@ -0,0 +1,244 @@ +// @ts-check +import { createHash } from 'crypto'; +import { Readable } from 'stream'; +import { Buffer } from 'buffer'; +import { encodeBase64, decodeBase64 } from '@endo/base64'; +import { checkBundle } from '@endo/check-bundle/lite.js'; +import { Fail, q } from '@agoric/assert'; +import { buffer } from './util.js'; + +/** + * @typedef { { moduleFormat: 'getExport', source: string, sourceMap: string? } } GetExportBundle + * @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap: string? } } NestedEvaluateBundle + * @typedef { { moduleFormat: 'endoZipBase64', endoZipBase64: string, endoZipBase64Sha512: string } } EndoZipBase64Bundle + * @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle_proper + */ +/** + * @typedef { { moduleFormat: string, endoZipBase64: string, endoZipBase64Sha512: string } } Bundle + * + * @typedef { import('./swingStore').SwingStoreExporter } SwingStoreExporter + * + * @typedef {{ + * addBundle: (bundleID: string, bundle: Bundle) => void; + * hasBundle: (bundleID: string) => boolean + * getBundle: (bundleID: string) => Bundle; + * deleteBundle: (bundleID: string) => void; + * }} BundleStore + * + * @typedef {{ + * exportBundle: (name: string) => AsyncIterable, + * importBundle: (artifactName: string, exporter: SwingStoreExporter, bundleID: string) => void, + * getExportRecords: () => Iterable<[key: string, value: string]>, + * getArtifactNames: () => AsyncIterable, + * }} BundleStoreInternal + * + * @typedef {{ + * dumpBundles: () => {}, + * }} BundleStoreDebug + * + */ + +/** + * @param {*} db + * @param {() => void} ensureTxn + * @param {(key: string, value: string | undefined) => void} noteExport + * @returns {BundleStore & BundleStoreInternal & BundleStoreDebug} + */ +export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { + db.exec(` + CREATE TABLE IF NOT EXISTS bundles ( + bundleID TEXT, + bundle BLOB, + PRIMARY KEY (bundleID) + ) + `); + + function bundleArtifactName(bundleID) { + return `bundle.${bundleID}`; + } + + function bundleIdFromHash(hash) { + return `b1-${hash}`; + } + + const sqlAddBundle = db.prepare(` + INSERT OR REPLACE INTO bundles + (bundleID, bundle) + VALUES (?, ?) + `); + + /** + * Store a bundle. Here the bundle itself is presumed valid. + * + * @param {string} bundleID + * @param {Bundle} bundle + */ + function addBundle(bundleID, bundle) { + const { moduleFormat, endoZipBase64, endoZipBase64Sha512 } = bundle; + moduleFormat === 'endoZipBase64' || + Fail`unsupported module format ${q(moduleFormat)}`; + bundleID === bundleIdFromHash(endoZipBase64Sha512) || + Fail`bundleID ${q(bundleID)} does not match bundle`; + ensureTxn(); + sqlAddBundle.run(bundleID, decodeBase64(endoZipBase64)); + noteExport(bundleArtifactName(bundleID), bundleID); + } + + const sqlHasBundle = db.prepare(` + SELECT count(*) + FROM bundles + WHERE bundleID = ? + `); + sqlHasBundle.pluck(true); + + function hasBundle(bundleID) { + const count = sqlHasBundle.get(bundleID); + return count !== 0; + } + + const sqlGetBundle = db.prepare(` + SELECT bundle + FROM bundles + WHERE bundleID = ? + `); + sqlGetBundle.pluck(true); + + function getBundle(bundleID) { + bundleID.startsWith('b1-') || Fail`invalid bundleID ${q(bundleID)}`; + const rawBundle = sqlGetBundle.get(bundleID); + rawBundle || Fail`bundle ${q(bundleID)} not found`; + return harden({ + moduleFormat: 'endoZipBase64', + endoZipBase64Sha512: bundleID.substring(3), + endoZipBase64: encodeBase64(rawBundle), + }); + } + + const sqlDeleteBundle = db.prepare(` + DELETE FROM bundles + WHERE bundleID = ? + `); + + function deleteBundle(bundleID) { + if (hasBundle(bundleID)) { + ensureTxn(); + sqlDeleteBundle.run(bundleID); + noteExport(bundleArtifactName(bundleID), undefined); + } + } + + /** + * Read a bundle and return it as a stream of data suitable for export to + * another store. + * + * Bundle artifact names should be strings of the form: + * `bundle.${bundleID}` + * + * @param {string} name + * + * @yields {Uint8Array} + * @returns {AsyncIterable} + */ + async function* exportBundle(name) { + typeof name === 'string' || Fail`artifact name must be a string`; + const parts = name.split('.'); + const [type, bundleID] = parts; + // prettier-ignore + (parts.length === 2 && type === 'bundle') || + Fail`expected artifact name of the form 'bundle.{bundleID}', saw ${q(name)}`; + const bundle = getBundle(bundleID); + bundle || Fail`bundle ${q(name)} not available`; + yield* Readable.from(Buffer.from(decodeBase64(bundle.endoZipBase64))); + } + + const sqlGetBundleIDs = db.prepare(` + SELECT bundleID + FROM bundles + ORDER BY bundleID + `); + sqlGetBundleIDs.pluck(true); + + /** + * Obtain artifact metadata records for bundles contained in this store. + * + * @yields {[key: string, value: string]} + * @returns {Iterable<[key: string, value: string]>} + */ + function* getExportRecords() { + for (const bundleID of sqlGetBundleIDs.iterate()) { + yield [bundleArtifactName(bundleID), bundleID]; + } + } + + async function* getArtifactNames() { + for (const bundleID of sqlGetBundleIDs.iterate()) { + yield bundleArtifactName(bundleID); + } + } + + function computeSha512(bytes) { + const hash = createHash('sha512'); + hash.update(bytes); + return hash.digest().toString('hex'); + } + + /** + * @param {string} name Artifact name of the bundle + * @param {SwingStoreExporter} exporter Whence to get the bits + * @param {string} bundleID Bundle ID of the bundle + * @returns {Promise} + */ + async function importBundle(name, exporter, bundleID) { + const parts = name.split('.'); + const [type, bundleIDkey] = parts; + // prettier-ignore + parts.length === 2 && type === 'bundle' || + Fail`expected artifact name of the form 'bundle.{bundleID}', saw '${q(name)}'`; + bundleIDkey === bundleID || + Fail`bundle artifact name ${name} doesn't match bundleID ${bundleID}`; + bundleID.startsWith('b1-') || Fail`invalid bundleID ${q(bundleID)}`; + const artifactChunks = exporter.getArtifact(name); + const inStream = Readable.from(artifactChunks); + const rawBundle = await buffer(inStream); + const bundle = harden({ + moduleFormat: 'endoZipBase64', + endoZipBase64Sha512: bundleID.substring(3), + endoZipBase64: encodeBase64(rawBundle), + }); + // Assert that the bundle contents match the ID and hash + await checkBundle(bundle, computeSha512, bundleID); + addBundle(bundleID, bundle); + } + + const sqlDumpBundles = db.prepare(` + SELECT bundleID, bundle + FROM bundles + ORDER BY bundleID + `); + + /** + * debug function to dump active bundles + */ + function dumpBundles() { + const sql = sqlDumpBundles; + const dump = {}; + for (const row of sql.iterate()) { + const { bundleID, bundle } = row; + dump[bundleID] = encodeBase64(bundle); + } + return dump; + } + + return harden({ + addBundle, + hasBundle, + getBundle, + deleteBundle, + getExportRecords, + getArtifactNames, + exportBundle, + importBundle, + + dumpBundles, + }); +} diff --git a/packages/swing-store/src/snapStore.js b/packages/swing-store/src/snapStore.js index 2f34cf8ad26..a76a9f44735 100644 --- a/packages/swing-store/src/snapStore.js +++ b/packages/swing-store/src/snapStore.js @@ -1,5 +1,4 @@ // @ts-check -import { Buffer } from 'buffer'; import { createHash } from 'crypto'; import { finished as finishedCallback, Readable } from 'stream'; import { promisify } from 'util'; @@ -7,6 +6,7 @@ import { createGzip, createGunzip } from 'zlib'; import { Fail, q } from '@agoric/assert'; import { aggregateTryFinally, PromiseAllOrErrors } from '@agoric/internal'; import { fsStreamReady } from '@agoric/internal/src/fs-stream.js'; +import { buffer } from './util.js'; /** * @typedef {object} SnapshotResult @@ -52,21 +52,6 @@ import { fsStreamReady } from '@agoric/internal/src/fs-stream.js'; * */ -/** - * This is a polyfill for the `buffer` function from Node's - * 'stream/consumers' package, which unfortunately only exists in newer versions - * of Node. - * - * @param {AsyncIterable} inStream - */ -export const buffer = async inStream => { - const chunks = []; - for await (const chunk of inStream) { - chunks.push(chunk); - } - return Buffer.concat(chunks); -}; - const finished = promisify(finishedCallback); const noPath = /** @type {import('fs').PathLike} */ ( diff --git a/packages/swing-store/src/swingStore.js b/packages/swing-store/src/swingStore.js index 43e663ef280..2a1e3f8030b 100644 --- a/packages/swing-store/src/swingStore.js +++ b/packages/swing-store/src/swingStore.js @@ -12,9 +12,25 @@ import { makeMeasureSeconds } from '@agoric/internal'; import { makeTranscriptStore } from './transcriptStore.js'; import { makeSnapStore } from './snapStore.js'; +import { makeBundleStore } from './bundleStore.js'; import { createSHA256 } from './hasher.js'; -export { makeSnapStore }; +export { makeSnapStore, makeBundleStore }; + +/** + * This is a polyfill for the `buffer` function from Node's + * 'stream/consumers' package, which unfortunately only exists in newer versions + * of Node. + * + * @param {AsyncIterable} inStream + */ +export const buffer = async inStream => { + const chunks = []; + for await (const chunk of inStream) { + chunks.push(chunk); + } + return Buffer.concat(chunks); +}; export function makeSnapStoreIO() { return { @@ -58,10 +74,15 @@ function getKeyType(key) { * @typedef { import('./transcriptStore').TranscriptStoreInternal } TranscriptStoreInternal * @typedef { import('./transcriptStore').TranscriptStoreDebug } TranscriptStoreDebug * + * @typedef { import('./bundleStore').BundleStore } BundleStore + * @typedef { import('./bundleStore').BundleStoreInternal } BundleStoreInternal + * @typedef { import('./bundleStore').BundleStoreDebug } BundleStoreDebug + * * @typedef {{ * kvStore: KVStore, // a key-value API object to load and store data on behalf of the kernel * transcriptStore: TranscriptStore, // a stream-oriented API object to append and read transcript entries * snapStore: SnapStore, + * bundleStore: BundleStore, * startCrank: () => void, * establishCrankSavepoint: (savepoint: string) => void, * rollbackCrank: (savepoint: string) => void, @@ -94,6 +115,7 @@ function getKeyType(key) { * @typedef {{ * transcriptStore: TranscriptStoreInternal, * snapStore: SnapStoreInternal, + * bundleStore: BundleStoreInternal, * }} SwingStoreInternal * * @typedef {{ @@ -180,6 +202,7 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { // ensureTxn can be a dummy, we just started one const ensureTxn = () => {}; const snapStore = makeSnapStore(db, ensureTxn, makeSnapStoreIO()); + const bundleStore = makeBundleStore(db, ensureTxn); const transcriptStore = makeTranscriptStore(db, ensureTxn, () => {}); const sqlGetAllKVData = db.prepare(` @@ -201,6 +224,7 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { } yield* snapStore.getExportRecords(true); yield* transcriptStore.getExportRecords(true); + yield* bundleStore.getExportRecords(); } /** @@ -210,6 +234,7 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { async function* getArtifactNames() { yield* snapStore.getArtifactNames(exportHistoricalSnapshots); yield* transcriptStore.getArtifactNames(exportHistoricalTranscripts); + yield* bundleStore.getArtifactNames(); } /** @@ -224,6 +249,8 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { return snapStore.exportSnapshot(name, exportHistoricalSnapshots); } else if (type === 'transcript') { return transcriptStore.exportSpan(name, exportHistoricalTranscripts); + } else if (type === 'bundle') { + return bundleStore.exportBundle(name); } else { assert.fail(`invalid artifact type ${q(type)}`); } @@ -273,8 +300,11 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { * to metadata kept in the kvStore. Persistently stored in a sqllite table. * * snapStore - large object store used to hold XS memory image snapshots of - * vats. Objects are stored in files named by the cryptographic hash of the - * data they hold, with tracking metadata kept in the kvStore. + * vats. Objects are stored in a separate table keyed to the vat and delivery + * number of the snapshot, with tracking metadata kept in the kvStore. + * + * bundleStore - large object store used to hold JavaScript code bundles. + * Bundle contents are stored in a separate table keyed by bundleID. * * All persistent data is kept within a single directory belonging to the swing * store. The individual stores present individual APIs suitable for their @@ -382,8 +412,42 @@ function makeSwingStore(dirPath, forceReset, options = {}) { // mode that defers merge work for a later attempt rather than block any // potential readers or writers. See https://sqlite.org/wal.html for details. + // PRAGMAs have to happen outside a transaction db.exec(`PRAGMA journal_mode=WAL`); db.exec(`PRAGMA synchronous=FULL`); + + // We use IMMEDIATE because the kernel is supposed to be the sole writer of + // the DB, and if some other process is holding a write lock, we want to find + // out earlier rather than later. We do not use EXCLUSIVE because we should + // allow external *readers*, and we use WAL mode. Read all of + // https://sqlite.org/lang_transaction.html, especially section 2.2 + const sqlBeginTransaction = db.prepare('BEGIN IMMEDIATE TRANSACTION'); + + // We use explicit transactions to 1: not commit writes until the host + // application calls commit() and 2: avoid expensive fsyncs until the + // appropriate commit point. All API methods that modify the database should + // call `ensureTxn` first, otherwise SQLite will automatically start a transaction + // for us, but it will commit/fsync at the end of the SQL statement. + // + // It is critical to call ensureTxn as the first step of any API call that + // might modify the database (any INSERT or DELETE, etc), to prevent SQLite + // from creating an automatic transaction, which will commit as soon as the + // SQL statement finishes. This would cause partial writes to be committed to + // the DB, and if the application crashes before the real hostStorage.commit() + // happens, it would wake up with inconsistent state. Aside from the setup + // initialization done here, the only commit point must be the + // hostStorage.commit() call. + function ensureTxn() { + db || Fail`db not initialized`; + if (!db.inTransaction) { + sqlBeginTransaction.run(); + db.inTransaction || Fail`must be in a transaction`; + } + return db; + } + + // Perform all database initialization in a single transaction + sqlBeginTransaction.run(); db.exec(` CREATE TABLE IF NOT EXISTS kvStore ( key TEXT, @@ -399,6 +463,38 @@ function makeSwingStore(dirPath, forceReset, options = {}) { PRIMARY KEY (key) ) `); + + const { dumpTranscripts, ...transcriptStore } = makeTranscriptStore( + db, + ensureTxn, + // eslint-disable-next-line no-use-before-define + noteExport, + { + keepTranscripts, + }, + ); + const { dumpSnapshots, ...snapStore } = makeSnapStore( + db, + ensureTxn, + makeSnapStoreIO(), + // eslint-disable-next-line no-use-before-define + noteExport, + { + keepSnapshots, + }, + ); + const { dumpBundles, ...bundleStore } = makeBundleStore( + db, + ensureTxn, + // eslint-disable-next-line no-use-before-define + noteExport, + ); + + const sqlCommit = db.prepare('COMMIT'); + + // At this point, all database initialization should be complete, so commit now. + sqlCommit.run(); + let exportCallback; function setExportCallback(cb) { typeof cb === 'function' || Fail`callback must be a function`; @@ -408,36 +504,8 @@ function makeSwingStore(dirPath, forceReset, options = {}) { setExportCallback(options.exportCallback); } - const sqlBeginTransaction = db.prepare('BEGIN IMMEDIATE TRANSACTION'); let inCrank = false; - // We use explicit transactions to 1: not commit writes until the host - // application calls commit() and 2: avoid expensive fsyncs until the - // appropriate commit point. All API methods should call this first, otherwise - // SQLite will automatically start a transaction for us, but it will - // commit/fsync at the end of the DB run(). We use IMMEDIATE because the - // kernel is supposed to be the sole writer of the DB, and if some other - // process is holding a write lock, we want to find out earlier rather than - // later. We do not use EXCLUSIVE because we should allow external *readers*, - // and we use WAL mode. Read all of https://sqlite.org/lang_transaction.html, - // especially section 2.2 - // - // It is critical to call ensureTxn as the first step of any API call that - // might modify the database (any INSERT or DELETE, etc), to prevent SQLite - // from creating an automatic transaction, which will commit as soon as the - // SQL statement finishes. This would cause partial writes to be committed to - // the DB, and if the application crashes before the real hostStorage.commit() - // happens, it would wake up with inconsistent state. The only commit point - // must be the hostStorage.commit(). - function ensureTxn() { - db || Fail`db not initialized`; - if (!db.inTransaction) { - sqlBeginTransaction.run(); - db.inTransaction || Fail`must be in a transaction`; - } - return db; - } - function diskUsage() { if (dirPath) { const dataFilePath = `${dirPath}/swingstore.sqlite`; @@ -635,24 +703,6 @@ function makeSwingStore(dirPath, forceReset, options = {}) { }, }; - const { dumpTranscripts, ...transcriptStore } = makeTranscriptStore( - db, - ensureTxn, - noteExport, - { - keepTranscripts, - }, - ); - const { dumpSnapshots, ...snapStore } = makeSnapStore( - db, - ensureTxn, - makeSnapStoreIO(), - noteExport, - { - keepSnapshots, - }, - ); - const savepoints = []; const sqlReleaseSavepoints = db.prepare('RELEASE SAVEPOINT t0'); @@ -750,8 +800,6 @@ function makeSwingStore(dirPath, forceReset, options = {}) { inCrank = false; } - const sqlCommit = db.prepare('COMMIT'); - /** * Commit unsaved changes. */ @@ -803,6 +851,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { kvEntries: dumpKVEntries(), transcripts: dumpTranscripts(includeHistorical), snapshots: dumpSnapshots(includeHistorical), + bundles: dumpBundles(), }); } @@ -824,10 +873,18 @@ function makeSwingStore(dirPath, forceReset, options = {}) { getSnapshotInfo: snapStore.getSnapshotInfo, }; + const bundleStorePublic = { + addBundle: bundleStore.addBundle, + hasBundle: bundleStore.hasBundle, + getBundle: bundleStore.getBundle, + deleteBundle: bundleStore.deleteBundle, + }; + const kernelStorage = { kvStore: kernelKVStore, transcriptStore: transcriptStorePublic, snapStore: snapStorePublic, + bundleStore: bundleStorePublic, startCrank, establishCrankSavepoint, rollbackCrank, @@ -849,6 +906,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { const internal = { snapStore, transcriptStore, + bundleStore, }; return harden({ @@ -928,6 +986,7 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { // tracks which of these we've seen, keyed by vatID. // vatID -> { snapshotKey: metadataKey, transcriptKey: metatdataKey } const vatArtifacts = new Map(); + const bundleArtifacts = new Map(); for await (const [key, value] of exporter.getExportData()) { const [tag] = key.split('.', 1); @@ -940,6 +999,13 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { } else { kernelStorage.kvStore.set(subKey, value); } + } else if (tag === 'bundle') { + // 'bundle' keys contain bundle IDs + if (value == null) { + bundleArtifacts.delete(key); + } else { + bundleArtifacts.set(key, value); + } } else if (tag === 'transcript' || tag === 'snapshot') { // 'transcript' and 'snapshot' keys contain artifact description info. assert(value); // make TypeScript shut up @@ -1008,6 +1074,7 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { exporter, snapshotInfo, )); + const transcriptArtifactName = `${vatInfo.transcriptKey}.${transcriptInfo.endPos}`; await internal.transcriptStore.importSpan( transcriptArtifactName, @@ -1016,6 +1083,15 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { ); fetchedArtifacts.add(transcriptArtifactName); } + const bundleArtifactNames = Array.from(bundleArtifacts.keys()).sort(); + for await (const bundleArtifactName of bundleArtifactNames) { + await internal.bundleStore.importBundle( + bundleArtifactName, + exporter, + bundleArtifacts.get(bundleArtifactName), + ); + } + if (!includeHistorical) { return store; } @@ -1042,6 +1118,9 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { exporter, artifactMetadata.get(metadataKey), ); + } else if (artifactName.startsWith('bundle.')) { + // already taken care of + continue; } else { Fail`unknown artifact type: ${artifactName}`; } diff --git a/packages/swing-store/src/util.js b/packages/swing-store/src/util.js new file mode 100644 index 00000000000..313c16574f2 --- /dev/null +++ b/packages/swing-store/src/util.js @@ -0,0 +1,16 @@ +import { Buffer } from 'buffer'; + +/** + * This is a polyfill for the `buffer` function from Node's + * 'stream/consumers' package, which unfortunately only exists in newer versions + * of Node. + * + * @param {AsyncIterable} inStream + */ +export const buffer = async inStream => { + const chunks = []; + for await (const chunk of inStream) { + chunks.push(chunk); + } + return Buffer.concat(chunks); +}; diff --git a/packages/swing-store/test/bohr-module.js b/packages/swing-store/test/bohr-module.js new file mode 100644 index 00000000000..2a084200113 --- /dev/null +++ b/packages/swing-store/test/bohr-module.js @@ -0,0 +1,3 @@ +export function bar() { + return 47; +} diff --git a/packages/swing-store/test/faux-module.js b/packages/swing-store/test/faux-module.js new file mode 100644 index 00000000000..936ec6594de --- /dev/null +++ b/packages/swing-store/test/faux-module.js @@ -0,0 +1,3 @@ +export function foo() { + return 'yo!'; +} diff --git a/packages/swing-store/test/test-exportImport.js b/packages/swing-store/test/test-exportImport.js index f6b38aeccfa..95404d48650 100644 --- a/packages/swing-store/test/test-exportImport.js +++ b/packages/swing-store/test/test-exportImport.js @@ -7,6 +7,7 @@ import fs from 'fs'; import test from 'ava'; // eslint-disable-next-line import/no-extraneous-dependencies import tmp from 'tmp'; +import bundleSource from '@endo/bundle-source'; import { initSwingStore, @@ -52,6 +53,13 @@ const tmpDir = prefix => }); }); +async function embundle(filename) { + const bundleFile = new URL(filename, import.meta.url).pathname; + const bundle = await bundleSource(bundleFile); + const bundleID = `b1-${bundle.endoZipBase64Sha512}`; + return [bundleID, bundle]; +} + function actLikeAVatRunningACrank(vat, ks, crank, doFail) { const { kvStore, transcriptStore } = ks; const { vatID } = vat; @@ -183,6 +191,14 @@ async function testExportImport( }); const { kernelStorage, debug } = ssOut; + const [bundleID1, bundle1] = await embundle('./faux-module.js'); + const [bundleID2, bundle2] = await embundle('./bohr-module.js'); + + kernelStorage.bundleStore.addBundle(bundleID1, bundle1); + kernelStorage.bundleStore.addBundle(bundleID2, bundle2); + + const [bundleIDA, bundleIDB] = [bundleID1, bundleID2].sort(); + const vats = [ { vatID: 'vatA', endPos: 0 }, { vatID: 'vatB', endPos: 0 }, @@ -225,6 +241,8 @@ async function testExportImport( t.deepEqual(exportData, feedData); t.deepEqual(exportData, [ + [`bundle.${bundleIDA}`, `${bundleIDA}`], + [`bundle.${bundleIDB}`, `${bundleIDB}`], ['kv.brigadoon', 'here during 16'], ['kv.kval', 'set in 16'], ['kv.vatA.monotonic.10', 'more and more'], @@ -281,6 +299,10 @@ async function testExportImport( ], ]); + expectedArtifactNames = Array.from(expectedArtifactNames); + expectedArtifactNames.push(`bundle.${bundleIDA}`); + expectedArtifactNames.push(`bundle.${bundleIDB}`); + const artifactNames = []; for await (const name of exporter.getArtifactNames()) { artifactNames.push(name); diff --git a/packages/swing-store/test/test-state.js b/packages/swing-store/test/test-state.js index c2629428aff..00aefa42acb 100644 --- a/packages/swing-store/test/test-state.js +++ b/packages/swing-store/test/test-state.js @@ -5,6 +5,8 @@ import '@endo/init/debug.js'; import tmp from 'tmp'; import test from 'ava'; +import bundleSource from '@endo/bundle-source'; + import { initSwingStore, openSwingStore, @@ -26,6 +28,13 @@ const tmpDir = prefix => }); }); +async function embundle(filename) { + const bundleFile = new URL(filename, import.meta.url).pathname; + const bundle = await bundleSource(bundleFile); + const bundleID = `b1-${bundle.endoZipBase64Sha512}`; + return [bundleID, bundle]; +} + function* iterate(kvStore, start, end) { if (kvStore.has(start)) { yield start; @@ -240,3 +249,48 @@ test('transcriptStore abort', async t => { const reader = ss2.readSpan('st1', 0); t.deepEqual(Array.from(reader), ['first']); // and not 'second' }); + +async function testBundleStore(t, dbDir) { + const exportLog = makeExportLog(); + const { kernelStorage, hostStorage } = initSwingStore(dbDir, { + exportCallback: exportLog.callback, + }); + const { bundleStore } = kernelStorage; + const { commit, close } = hostStorage; + + const [bundleID1, bundle1] = await embundle('./faux-module.js'); + const [bundleID2, bundle2] = await embundle('./bohr-module.js'); + + t.falsy(bundleStore.hasBundle(bundleID1)); + t.falsy(bundleStore.hasBundle(bundleID2)); + t.falsy(bundleStore.hasBundle('b1-obviouslyfake')); + + bundleStore.addBundle(bundleID1, bundle1); + bundleStore.addBundle(bundleID2, bundle2); + + t.truthy(bundleStore.hasBundle(bundleID1)); + t.truthy(bundleStore.hasBundle(bundleID2)); + t.falsy(bundleStore.hasBundle('b1-obviouslyfake')); + + bundleStore.deleteBundle(bundleID1); + + t.falsy(bundleStore.hasBundle(bundleID1)); + t.truthy(bundleStore.hasBundle(bundleID2)); + t.falsy(bundleStore.hasBundle('b1-obviouslyfake')); + + const rebundle2 = bundleStore.getBundle(bundleID2); + t.deepEqual(bundle2, rebundle2); + await commit(); + await close(); +} + +test('in-memory bundleStore read/write', async t => { + await testBundleStore(t, null); +}); + +test('persistent bundleStore read/write', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + t.is(isSwingStore(dbDir), false); + await testBundleStore(t, dbDir); +}); From 80a8d87bbad05b951132fe3e9cd6b80eb041a8f5 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Wed, 22 Mar 2023 22:49:11 -0700 Subject: [PATCH 2/4] improve types on bundleStore and exporter/importer * buffer() should accept Uint8Array arrays, not just the Node.js-specific Buffer * when the importer calls getExportData or getArtifactNames, it should accept both sync and async iterables * bundleStore.addBundle() is now defined to accept a Bundle (of any flavor), and uses an internal assertion to check that it's only being given an EndoZipBase64Bundle. This supports an upcoming change which will accept NestedEvaluateBundle too. * our Bundle definitions were using the wrong syntax for the optional sourceMap property, causing problems for tests that want { moduleFormat, source } to qualify --- packages/swing-store/src/bundleStore.js | 20 ++++++++++++-------- packages/swing-store/src/swingStore.js | 11 ++++++++--- packages/swing-store/src/util.js | 2 +- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/packages/swing-store/src/bundleStore.js b/packages/swing-store/src/bundleStore.js index d078837b803..47509d38593 100644 --- a/packages/swing-store/src/bundleStore.js +++ b/packages/swing-store/src/bundleStore.js @@ -8,14 +8,12 @@ import { Fail, q } from '@agoric/assert'; import { buffer } from './util.js'; /** - * @typedef { { moduleFormat: 'getExport', source: string, sourceMap: string? } } GetExportBundle - * @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap: string? } } NestedEvaluateBundle + * @typedef { { moduleFormat: 'getExport', source: string, sourceMap?: string } } GetExportBundle + * @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap?: string } } NestedEvaluateBundle * @typedef { { moduleFormat: 'endoZipBase64', endoZipBase64: string, endoZipBase64Sha512: string } } EndoZipBase64Bundle - * @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle_proper + * @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle */ /** - * @typedef { { moduleFormat: string, endoZipBase64: string, endoZipBase64Sha512: string } } Bundle - * * @typedef { import('./swingStore').SwingStoreExporter } SwingStoreExporter * * @typedef {{ @@ -74,9 +72,10 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { * @param {Bundle} bundle */ function addBundle(bundleID, bundle) { - const { moduleFormat, endoZipBase64, endoZipBase64Sha512 } = bundle; - moduleFormat === 'endoZipBase64' || - Fail`unsupported module format ${q(moduleFormat)}`; + const { moduleFormat } = bundle; + if (moduleFormat !== 'endoZipBase64') + throw Fail`unsupported module format ${q(moduleFormat)}`; + const { endoZipBase64, endoZipBase64Sha512 } = bundle; bundleID === bundleIdFromHash(endoZipBase64Sha512) || Fail`bundleID ${q(bundleID)} does not match bundle`; ensureTxn(); @@ -103,6 +102,10 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { `); sqlGetBundle.pluck(true); + /** + * @param {string} bundleID + * @returns {EndoZipBase64Bundle} + */ function getBundle(bundleID) { bundleID.startsWith('b1-') || Fail`invalid bundleID ${q(bundleID)}`; const rawBundle = sqlGetBundle.get(bundleID); @@ -200,6 +203,7 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { const artifactChunks = exporter.getArtifact(name); const inStream = Readable.from(artifactChunks); const rawBundle = await buffer(inStream); + /** @type {EndoZipBase64Bundle} */ const bundle = harden({ moduleFormat: 'endoZipBase64', endoZipBase64Sha512: bundleID.substring(3), diff --git a/packages/swing-store/src/swingStore.js b/packages/swing-store/src/swingStore.js index 2a1e3f8030b..207e6fd4430 100644 --- a/packages/swing-store/src/swingStore.js +++ b/packages/swing-store/src/swingStore.js @@ -126,6 +126,11 @@ function getKeyType(key) { * }} SwingStore */ +/** + * @template T + * @typedef { Iterable | AsyncIterable } AnyIterable + */ + /** * @typedef {[ * key: string, @@ -141,7 +146,7 @@ function getKeyType(key) { * the concurrent activity of other swingStore instances, the data representing * the commit point will stay consistent and available. * - * @property {() => AsyncIterable} getExportData + * @property {() => AnyIterable} getExportData * * Get a full copy of the first-stage export data (key-value pairs) from the * swingStore. This represents both the contents of the KVStore (excluding host @@ -156,7 +161,7 @@ function getKeyType(key) { * - transcript.${vatID}.${startPos} = ${{ vatID, startPos, endPos, hash }} * - transcript.${vatID}.current = ${{ vatID, startPos, endPos, hash }} * - * @property {() => AsyncIterable} getArtifactNames + * @property {() => AnyIterable} getArtifactNames * * Get a list of name of artifacts available from the swingStore. A name returned * by this method guarantees that a call to `getArtifact` on the same exporter @@ -167,7 +172,7 @@ function getKeyType(key) { * - transcript.${vatID}.${startPos}.${endPos} * - snapshot.${vatID}.${endPos} * - * @property {(name: string) => AsyncIterable} getArtifact + * @property {(name: string) => AnyIterable} getArtifact * * Retrieve an artifact by name. May throw if the artifact is not available, * which can occur if the artifact is historical and wasn't been preserved. diff --git a/packages/swing-store/src/util.js b/packages/swing-store/src/util.js index 313c16574f2..4d4c765288e 100644 --- a/packages/swing-store/src/util.js +++ b/packages/swing-store/src/util.js @@ -5,7 +5,7 @@ import { Buffer } from 'buffer'; * 'stream/consumers' package, which unfortunately only exists in newer versions * of Node. * - * @param {AsyncIterable} inStream + * @param {import('./swingStore').AnyIterable} inStream */ export const buffer = async inStream => { const chunks = []; From 0f30dc0458e03d3b20fcbad792b660b52ef2dc6b Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Thu, 23 Mar 2023 14:45:06 -0700 Subject: [PATCH 3/4] swingstore: importer should call exporter.close() We defined close() on the Exporter type, and implemented it on the exporter, and we need it to be called: it closes the DB connection and discards any (accidental) changes that the (read-only) exporter might have made. However our importer was not calling close() on the exporter it receives. This changes the importer to call close(). It also changes a unit test to not close the exporter on the importer's behalf. Note that this means exporters are consumed by the importSwingStore() process. --- packages/swing-store/src/swingStore.js | 3 +++ packages/swing-store/test/test-exportImport.js | 2 -- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/swing-store/src/swingStore.js b/packages/swing-store/src/swingStore.js index 207e6fd4430..cd8ee958ae7 100644 --- a/packages/swing-store/src/swingStore.js +++ b/packages/swing-store/src/swingStore.js @@ -1098,6 +1098,8 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { } if (!includeHistorical) { + // eslint-disable-next-line @jessie.js/no-nested-await + await exporter.close(); return store; } @@ -1131,6 +1133,7 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { } await fetchedP; } + await exporter.close(); return store; } diff --git a/packages/swing-store/test/test-exportImport.js b/packages/swing-store/test/test-exportImport.js index 95404d48650..7aef5c2a67d 100644 --- a/packages/swing-store/test/test-exportImport.js +++ b/packages/swing-store/test/test-exportImport.js @@ -335,8 +335,6 @@ async function testExportImport( const afterDump = ssIn.debug.dump(keepSnapshots); t.deepEqual(beforeDump, afterDump); } - - exporter.close(); } const expectedCurrentArtifacts = [ From 6bf4ef25917bd230f8e88fbfe313dcb6d0773340 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Thu, 23 Mar 2023 14:43:11 -0700 Subject: [PATCH 4/4] swingset: fix Bundle defintions We had the wrong typescript syntax for the optional sourceMap property. --- packages/SwingSet/src/types-external.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/SwingSet/src/types-external.js b/packages/SwingSet/src/types-external.js index 4dd1707d984..86d6fb6f548 100644 --- a/packages/SwingSet/src/types-external.js +++ b/packages/SwingSet/src/types-external.js @@ -13,8 +13,8 @@ export {}; */ /** - * @typedef { { moduleFormat: 'getExport', source: string, sourceMap: string? } } GetExportBundle - * @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap: string? } } NestedEvaluateBundle + * @typedef { { moduleFormat: 'getExport', source: string, sourceMap?: string } } GetExportBundle + * @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap?: string } } NestedEvaluateBundle * @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle * * @typedef {{