Skip to content

Commit

Permalink
Merge pull request #7176 from Agoric/7089-bundlestore
Browse files Browse the repository at this point in the history
feat: implement bundleStore
  • Loading branch information
mergify[bot] authored Mar 25, 2023
2 parents 55c7978 + 6bf4ef2 commit 324eee0
Show file tree
Hide file tree
Showing 10 changed files with 494 additions and 75 deletions.
4 changes: 2 additions & 2 deletions packages/SwingSet/src/types-external.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ export {};
*/

/**
* @typedef { { moduleFormat: 'getExport', source: string, sourceMap: string? } } GetExportBundle
* @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap: string? } } NestedEvaluateBundle
* @typedef { { moduleFormat: 'getExport', source: string, sourceMap?: string } } GetExportBundle
* @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap?: string } } NestedEvaluateBundle
* @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle
*
* @typedef {{
Expand Down
5 changes: 4 additions & 1 deletion packages/swing-store/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,11 @@
"dependencies": {
"@agoric/assert": "^0.5.1",
"@agoric/internal": "^0.2.1",
"readline-transform": "^1.0.0",
"@endo/base64": "^0.2.28",
"@endo/bundle-source": "^2.4.2",
"@endo/check-bundle": "^0.2.14",
"better-sqlite3": "^8.2.0",
"readline-transform": "^1.0.0",
"tmp": "^0.2.1"
},
"devDependencies": {
Expand Down
248 changes: 248 additions & 0 deletions packages/swing-store/src/bundleStore.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,248 @@
// @ts-check
import { createHash } from 'crypto';
import { Readable } from 'stream';
import { Buffer } from 'buffer';
import { encodeBase64, decodeBase64 } from '@endo/base64';
import { checkBundle } from '@endo/check-bundle/lite.js';
import { Fail, q } from '@agoric/assert';
import { buffer } from './util.js';

/**
* @typedef { { moduleFormat: 'getExport', source: string, sourceMap?: string } } GetExportBundle
* @typedef { { moduleFormat: 'nestedEvaluate', source: string, sourceMap?: string } } NestedEvaluateBundle
* @typedef { { moduleFormat: 'endoZipBase64', endoZipBase64: string, endoZipBase64Sha512: string } } EndoZipBase64Bundle
* @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle
*/
/**
* @typedef { import('./swingStore').SwingStoreExporter } SwingStoreExporter
*
* @typedef {{
* addBundle: (bundleID: string, bundle: Bundle) => void;
* hasBundle: (bundleID: string) => boolean
* getBundle: (bundleID: string) => Bundle;
* deleteBundle: (bundleID: string) => void;
* }} BundleStore
*
* @typedef {{
* exportBundle: (name: string) => AsyncIterable<Uint8Array>,
* importBundle: (artifactName: string, exporter: SwingStoreExporter, bundleID: string) => void,
* getExportRecords: () => Iterable<[key: string, value: string]>,
* getArtifactNames: () => AsyncIterable<string>,
* }} BundleStoreInternal
*
* @typedef {{
* dumpBundles: () => {},
* }} BundleStoreDebug
*
*/

/**
* @param {*} db
* @param {() => void} ensureTxn
* @param {(key: string, value: string | undefined) => void} noteExport
* @returns {BundleStore & BundleStoreInternal & BundleStoreDebug}
*/
export function makeBundleStore(db, ensureTxn, noteExport = () => {}) {
db.exec(`
CREATE TABLE IF NOT EXISTS bundles (
bundleID TEXT,
bundle BLOB,
PRIMARY KEY (bundleID)
)
`);

function bundleArtifactName(bundleID) {
return `bundle.${bundleID}`;
}

function bundleIdFromHash(hash) {
return `b1-${hash}`;
}

const sqlAddBundle = db.prepare(`
INSERT OR REPLACE INTO bundles
(bundleID, bundle)
VALUES (?, ?)
`);

/**
* Store a bundle. Here the bundle itself is presumed valid.
*
* @param {string} bundleID
* @param {Bundle} bundle
*/
function addBundle(bundleID, bundle) {
const { moduleFormat } = bundle;
if (moduleFormat !== 'endoZipBase64')
throw Fail`unsupported module format ${q(moduleFormat)}`;
const { endoZipBase64, endoZipBase64Sha512 } = bundle;
bundleID === bundleIdFromHash(endoZipBase64Sha512) ||
Fail`bundleID ${q(bundleID)} does not match bundle`;
ensureTxn();
sqlAddBundle.run(bundleID, decodeBase64(endoZipBase64));
noteExport(bundleArtifactName(bundleID), bundleID);
}

const sqlHasBundle = db.prepare(`
SELECT count(*)
FROM bundles
WHERE bundleID = ?
`);
sqlHasBundle.pluck(true);

function hasBundle(bundleID) {
const count = sqlHasBundle.get(bundleID);
return count !== 0;
}

const sqlGetBundle = db.prepare(`
SELECT bundle
FROM bundles
WHERE bundleID = ?
`);
sqlGetBundle.pluck(true);

/**
* @param {string} bundleID
* @returns {EndoZipBase64Bundle}
*/
function getBundle(bundleID) {
bundleID.startsWith('b1-') || Fail`invalid bundleID ${q(bundleID)}`;
const rawBundle = sqlGetBundle.get(bundleID);
rawBundle || Fail`bundle ${q(bundleID)} not found`;
return harden({
moduleFormat: 'endoZipBase64',
endoZipBase64Sha512: bundleID.substring(3),
endoZipBase64: encodeBase64(rawBundle),
});
}

const sqlDeleteBundle = db.prepare(`
DELETE FROM bundles
WHERE bundleID = ?
`);

function deleteBundle(bundleID) {
if (hasBundle(bundleID)) {
ensureTxn();
sqlDeleteBundle.run(bundleID);
noteExport(bundleArtifactName(bundleID), undefined);
}
}

/**
* Read a bundle and return it as a stream of data suitable for export to
* another store.
*
* Bundle artifact names should be strings of the form:
* `bundle.${bundleID}`
*
* @param {string} name
*
* @yields {Uint8Array}
* @returns {AsyncIterable<Uint8Array>}
*/
async function* exportBundle(name) {
typeof name === 'string' || Fail`artifact name must be a string`;
const parts = name.split('.');
const [type, bundleID] = parts;
// prettier-ignore
(parts.length === 2 && type === 'bundle') ||
Fail`expected artifact name of the form 'bundle.{bundleID}', saw ${q(name)}`;
const bundle = getBundle(bundleID);
bundle || Fail`bundle ${q(name)} not available`;
yield* Readable.from(Buffer.from(decodeBase64(bundle.endoZipBase64)));
}

const sqlGetBundleIDs = db.prepare(`
SELECT bundleID
FROM bundles
ORDER BY bundleID
`);
sqlGetBundleIDs.pluck(true);

/**
* Obtain artifact metadata records for bundles contained in this store.
*
* @yields {[key: string, value: string]}
* @returns {Iterable<[key: string, value: string]>}
*/
function* getExportRecords() {
for (const bundleID of sqlGetBundleIDs.iterate()) {
yield [bundleArtifactName(bundleID), bundleID];
}
}

async function* getArtifactNames() {
for (const bundleID of sqlGetBundleIDs.iterate()) {
yield bundleArtifactName(bundleID);
}
}

function computeSha512(bytes) {
const hash = createHash('sha512');
hash.update(bytes);
return hash.digest().toString('hex');
}

/**
* @param {string} name Artifact name of the bundle
* @param {SwingStoreExporter} exporter Whence to get the bits
* @param {string} bundleID Bundle ID of the bundle
* @returns {Promise<void>}
*/
async function importBundle(name, exporter, bundleID) {
const parts = name.split('.');
const [type, bundleIDkey] = parts;
// prettier-ignore
parts.length === 2 && type === 'bundle' ||
Fail`expected artifact name of the form 'bundle.{bundleID}', saw '${q(name)}'`;
bundleIDkey === bundleID ||
Fail`bundle artifact name ${name} doesn't match bundleID ${bundleID}`;
bundleID.startsWith('b1-') || Fail`invalid bundleID ${q(bundleID)}`;
const artifactChunks = exporter.getArtifact(name);
const inStream = Readable.from(artifactChunks);
const rawBundle = await buffer(inStream);
/** @type {EndoZipBase64Bundle} */
const bundle = harden({
moduleFormat: 'endoZipBase64',
endoZipBase64Sha512: bundleID.substring(3),
endoZipBase64: encodeBase64(rawBundle),
});
// Assert that the bundle contents match the ID and hash
await checkBundle(bundle, computeSha512, bundleID);
addBundle(bundleID, bundle);
}

const sqlDumpBundles = db.prepare(`
SELECT bundleID, bundle
FROM bundles
ORDER BY bundleID
`);

/**
* debug function to dump active bundles
*/
function dumpBundles() {
const sql = sqlDumpBundles;
const dump = {};
for (const row of sql.iterate()) {
const { bundleID, bundle } = row;
dump[bundleID] = encodeBase64(bundle);
}
return dump;
}

return harden({
addBundle,
hasBundle,
getBundle,
deleteBundle,
getExportRecords,
getArtifactNames,
exportBundle,
importBundle,

dumpBundles,
});
}
17 changes: 1 addition & 16 deletions packages/swing-store/src/snapStore.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
// @ts-check
import { Buffer } from 'buffer';
import { createHash } from 'crypto';
import { finished as finishedCallback, Readable } from 'stream';
import { promisify } from 'util';
import { createGzip, createGunzip } from 'zlib';
import { Fail, q } from '@agoric/assert';
import { aggregateTryFinally, PromiseAllOrErrors } from '@agoric/internal';
import { fsStreamReady } from '@agoric/internal/src/fs-stream.js';
import { buffer } from './util.js';

/**
* @typedef {object} SnapshotResult
Expand Down Expand Up @@ -52,21 +52,6 @@ import { fsStreamReady } from '@agoric/internal/src/fs-stream.js';
*
*/

/**
* This is a polyfill for the `buffer` function from Node's
* 'stream/consumers' package, which unfortunately only exists in newer versions
* of Node.
*
* @param {AsyncIterable<Buffer>} inStream
*/
export const buffer = async inStream => {
const chunks = [];
for await (const chunk of inStream) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
};

const finished = promisify(finishedCallback);

const noPath = /** @type {import('fs').PathLike} */ (
Expand Down
Loading

0 comments on commit 324eee0

Please sign in to comment.