From bfc697e036c60e51b31d1d0f384deeaf7cc50fcd Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Mon, 11 May 2020 15:29:32 +1000 Subject: [PATCH 01/23] CID s/toBaseEncodedString/toString --- lib/util.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/util.js b/lib/util.js index 1efb88b..53e1e74 100644 --- a/lib/util.js +++ b/lib/util.js @@ -13,9 +13,7 @@ function toKey (key, method) { } function cidToKey (cid) { - // toBaseEncodedString() is supposed to do this automatically but let's be explicit to be - // sure & future-proof - return cid.toBaseEncodedString(cid.version === 0 ? 'base58btc' : 'base32') + return cid.toString() // cidv0 gets base58btc, v1 gets base32 } function verifyRoots (roots) { From 634884b9dcab7f86ee51725ba3e3fed0104e7f17 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Mon, 11 May 2020 15:31:09 +1000 Subject: [PATCH 02/23] update deps --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 3563019..070426c 100644 --- a/package.json +++ b/package.json @@ -40,9 +40,9 @@ "standard": "^14.3.4" }, "dependencies": { - "@ipld/block": "^4.0.0", + "@ipld/block": "^4.0.2", "cids": "^0.8.0", - "interface-datastore": "^1.0.4", + "interface-datastore": "^1.0.2", "multicodec": "^1.0.1", "varint": "^5.0.0" } From b13a0a8f489b3efd87279dd7ef57f909a21ccf1d Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Wed, 13 May 2020 17:45:30 +1000 Subject: [PATCH 03/23] feat: switch to @ipld/multiformats --- car-browser.js | 6 +- car.js | 24 ++++---- datastore.js | 11 ++-- lib/coding-browser.js | 79 ++++++++++++-------------- lib/coding.js | 36 ++++++------ lib/raw.js | 17 +++--- lib/reader-browser.js | 4 +- lib/reader-writer-iface.js | 15 +++-- lib/reader.js | 12 ++-- lib/util.js | 37 ++++++++---- lib/writer-stream.js | 12 ++-- package.json | 9 ++- test/fixture-data.js | 93 ++++++++++++++++++------------- test/test-decode.js | 18 +++--- test/test-encode.js | 26 +++++---- test/test-errors.js | 16 +++--- test/test-large.js | 23 +++++--- test/test-query.js | 19 ++++--- test/test-raw.js | 36 ++++++------ test/test-readbuffer.js | 11 ++-- test/test-readfile-writestream.js | 25 +++++---- test/test-readfile.js | 11 ++-- test/test-readstream.js | 21 ++++--- test/test-readstreamcomplete.js | 11 ++-- 24 files changed, 316 insertions(+), 256 deletions(-) diff --git a/car-browser.js b/car-browser.js index 3c183e5..3771ab9 100644 --- a/car-browser.js +++ b/car-browser.js @@ -23,10 +23,10 @@ const CarDatastore = require('./datastore') * @param {Buffer|Uint8Array} buffer the byte contents of a CAR archive * @returns {CarDatastore} a read-only CarDatastore. */ -async function readBuffer (buffer) { - const reader = await createBufferReader(buffer) +async function readBuffer (multiformats, buffer) { + const reader = await createBufferReader(multiformats, buffer) const writer = new NoWriter() - return new CarDatastore(reader, writer) + return new CarDatastore(multiformats, reader, writer) } module.exports.readBuffer = readBuffer diff --git a/car.js b/car.js index 8cbbb83..2bd45e0 100644 --- a/car.js +++ b/car.js @@ -35,10 +35,10 @@ const { indexer, readRaw } = require('./lib/raw') * @param {string} file a path to a file containing CAR archive data. * @returns {CarDatastore} a read-only CarDatastore. */ -async function readFileComplete (file) { - const reader = await createFileReader(file) +async function readFileComplete (multiformats, file) { + const reader = await createFileReader(multiformats, file) const writer = new NoWriter() - return new CarDatastore(reader, writer) + return new CarDatastore(multiformats, reader, writer) } /** @@ -65,10 +65,10 @@ async function readFileComplete (file) { * archive as a binary stream. * @returns {CarDatastore} a read-only CarDatastore. */ -async function readStreamComplete (stream) { - const reader = await createStreamCompleteReader(stream) +async function readStreamComplete (multiformats, stream) { + const reader = await createStreamCompleteReader(multiformats, stream) const writer = new NoWriter() - return new CarDatastore(reader, writer) + return new CarDatastore(multiformats, reader, writer) } /** @@ -98,10 +98,10 @@ async function readStreamComplete (stream) { * archive as a binary stream. * @returns {CarDatastore} a read-only CarDatastore. */ -async function readStreaming (stream) { - const reader = await createStreamingReader(stream) +async function readStreaming (multiformats, stream) { + const reader = await createStreamingReader(multiformats, stream) const writer = new NoWriter() - return new CarDatastore(reader, writer) + return new CarDatastore(multiformats, reader, writer) } /** @@ -129,10 +129,10 @@ async function readStreaming (stream) { * @param {WritableStream} stream a writable stream * @returns {CarDatastore} an append-only, streaming CarDatastore. */ -async function writeStream (stream) { +async function writeStream (multiformats, stream) { const reader = new Reader() - const writer = await createStreamWriter(stream) - return new CarDatastore(reader, writer) + const writer = await createStreamWriter(multiformats, stream) + return new CarDatastore(multiformats, reader, writer) } async function traverseBlock (block, get, car, concurrency = 1, seen = new Set()) { diff --git a/datastore.js b/datastore.js index 39cac55..c4b6ce8 100644 --- a/datastore.js +++ b/datastore.js @@ -9,7 +9,8 @@ const { toKey } = require('./lib/util') * @class */ class CarDatastore { - constructor (reader, writer) { + constructor (multiformats, reader, writer) { + this._multiformats = multiformats this._reader = reader this._writer = writer } @@ -31,7 +32,7 @@ class CarDatastore { * @return {Buffer} the IPLD block data referenced by the CID. */ async get (key) { - key = toKey(key, 'get') + key = toKey(this._multiformats, key, 'get') return this._reader.get(key) } @@ -52,7 +53,7 @@ class CarDatastore { * @return {boolean} indicating whether the key exists in this Datastore. */ async has (key) { - key = toKey(key, 'has') + key = toKey(this._multiformats, key, 'has') return this._reader.has(key) } @@ -75,7 +76,7 @@ class CarDatastore { * `CID`. */ async put (key, value) { - key = toKey(key, 'put') + key = toKey(this._multiformats, key, 'put') if (!(value instanceof Uint8Array)) { throw new TypeError('put() can only receive Uint8Arrays or Buffers') } @@ -94,7 +95,7 @@ class CarDatastore { * the block. */ async delete (key) { - key = toKey(key, 'delete') + key = toKey(this._multiformats, key, 'delete') return this._writer.delete(key) } diff --git a/lib/coding-browser.js b/lib/coding-browser.js index f9b335c..f89dbac 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -1,7 +1,5 @@ const varint = require('varint') -const CID = require('cids') -const multicodec = require('multicodec') -const Block = require('@ipld/block') +const { isBlock, isCID } = require('./util') function bufferReader (buf) { let pos = 0 @@ -37,12 +35,11 @@ async function readVarint (reader) { return i } -async function readHeader (reader) { +async function readHeader (multiformats, reader) { const length = await readVarint(reader) const header = await reader.exactly(length) reader.seek(length) - const block = Block.decoder(header, 'dag-cbor') - return block.decode() + return multiformats.decode(header, 'dag-cbor') } async function readMultihash (reader) { @@ -61,13 +58,13 @@ async function readMultihash (reader) { return multihash } -async function readCid (reader) { +async function readCid (multiformats, reader) { const first = await reader.exactly(2) if (first[0] === 0x12 && first[1] === 0x20) { // cidv0 32-byte sha2-256 const bytes = await reader.exactly(34) reader.seek(34) - return new CID(bytes) + return new multiformats.CID(0, 0x70, new Uint8Array(bytes)) } const version = await readVarint(reader) @@ -76,42 +73,42 @@ async function readCid (reader) { } const codec = await readVarint(reader) const multihash = await readMultihash(reader) - return new CID(version, multicodec.getName(codec), multihash) + return new multiformats.CID(version, codec, new Uint8Array(multihash)) } -async function readBlockHead (reader) { +async function readBlockHead (multiformats, reader) { // length includes a CID + Block, where CID has a variable length // we have to deal with const totalLength = await readVarint(reader) const start = reader.pos - const cid = await readCid(reader) + const cid = await readCid(multiformats, reader) const length = totalLength - (reader.pos - start) // subtract CID length return { cid, length } } -async function readBlock (reader) { - const { cid, length } = await readBlockHead(reader) - const buf = await reader.exactly(length) +async function readBlock (multiformats, reader) { + const { cid, length } = await readBlockHead(multiformats, reader) + const binary = await reader.exactly(length) reader.seek(length) - return Block.create(buf, cid) + return { cid, binary } } -async function readBlockIndex (reader) { - const head = await readBlockHead(reader) +async function readBlockIndex (multiformats, reader) { + const head = await readBlockHead(multiformats, reader) head.offset = reader.pos reader.seek(head.length) return head } -function Decoder (reader) { - const headerPromise = readHeader(reader) +function Decoder (multiformats, reader) { + const headerPromise = readHeader(multiformats, reader) function blockReader (index) { return async function * blockIterator () { await headerPromise try { while ((await reader.upTo(8)).length > 0) { - yield await (index ? readBlockIndex(reader) : readBlock(reader)) + yield await (index ? readBlockIndex(multiformats, reader) : readBlock(multiformats, reader)) } } finally { await reader.close() @@ -125,8 +122,8 @@ function Decoder (reader) { } } -async function decode (reader) { - const decoder = Decoder(reader) +async function decode (multiformats, reader) { + const decoder = Decoder(multiformats, reader) const header = await decoder.header() const decoded = { version: header.version, @@ -139,7 +136,7 @@ async function decode (reader) { return decoded } -function Encoder (writer) { +function Encoder (multiformats, writer) { // none of this is wrapped in a mutex, that needs to happen above this to // avoid overwrites return { @@ -148,31 +145,29 @@ function Encoder (writer) { roots = [roots] } for (const root of roots) { - if (!CID.isCID(root)) { + if (!isCID(root)) { throw new TypeError('Roots must be CIDs') } } - const header = Block.encoder({ version: 1, roots }, 'dag-cbor').encode() + const header = await multiformats.encode({ version: 1, roots }, 'dag-cbor') await writer(Buffer.from(varint.encode(header.length))) await writer(header) }, async writeBlock (block) { - if (!Block.isBlock(block)) { - throw new TypeError('Block list must contain @ipld/block objects') + if (!isBlock(block)) { + throw new TypeError('Block list must be of type { cid, binary }') } - const cid = await block.cid() - const encoded = block.encode() - await writer(Buffer.from(varint.encode(cid.buffer.length + encoded.length))) - await writer(cid.buffer) - await writer(encoded) + await writer(Buffer.from(varint.encode(block.cid.buffer.length + block.binary.length))) + await writer(block.cid.buffer) + await writer(block.binary) } } } -async function encode (writer, roots, blocks) { - const encoder = Encoder(writer) +async function encode (multiformats, writer, roots, blocks) { + const encoder = Encoder(multiformats, writer) await encoder.setRoots(roots) for (const block of blocks) { await encoder.writeBlock(block) @@ -188,8 +183,8 @@ async function encode (writer, roots, blocks) { * `{ version, roots[], blocks[] }`. * * Where `version` is always `1`, `roots` is an array of - * [CID](https://ghub.io/cids)s and `blocks` is an array of IPLD - * [Block](https://ghub.io/@ipld/block)s. + * [CID](https://ghub.io/cids)s and `blocks` is an array of IPLD blocks of the + * form `{ cid, binary }`. * * Not intended to be part of the public API of datastore-car but may currently be * invoked via `require('datastore-car/lib/coding').decodeBuffer`, or @@ -204,9 +199,9 @@ async function encode (writer, roots, blocks) { * `{ version, roots[], blocks[] }`. */ -async function decodeBuffer (buf) { +async function decodeBuffer (multiformats, buf) { const reader = bufferReader(buf) - return decode(reader) + return decode(multiformats, reader) } /* unnecessary, but this is possible: @@ -219,7 +214,7 @@ function BufferDecoder (buf) { /** * @name Car.encodeBuffer * @description - * Encode a set of IPLD [Block](https://ghub.io/@ipld/block)s in CAR format, + * Encode a set of IPLD blocks of the form `{ cid, binary }` in CAR format, * returning it as a single `Buffer`. * @function * @memberof Car @@ -228,15 +223,15 @@ function BufferDecoder (buf) { * @param {CID[]} roots an array of root [CID](https://ghub.io/cids)s to set in * the header of the archive. These are intended to be the merkle roots of all * blocks. - * @param {Block[]} blocks an array of IPLD [Block](https://ghub.io/@ipld/block)s + * @param {object[]} blocks an array of IPLD blocks of the form `{ cid, binary }` * to append to the archive. * @returns {Buffer} a `Buffer` representing the created archive. */ -async function encodeBuffer (roots, blocks) { +async function encodeBuffer (multiformats, roots, blocks) { const bl = [] const writer = (buf) => { bl.push(buf) } - await encode(writer, roots, blocks) + await encode(multiformats, writer, roots, blocks) return Buffer.concat(bl) } diff --git a/lib/coding.js b/lib/coding.js index 873fc96..51f12f2 100644 --- a/lib/coding.js +++ b/lib/coding.js @@ -106,7 +106,7 @@ async function fileReader (file, options) { * * Where `version` is always `1`, `roots` is an array of * [CID](https://ghub.io/cids)s and `blocks` is an array of IPLD - * [Block](https://ghub.io/@ipld/block)s. + * blocks of the form `{ cid, binary }`. * * Not intended to be part of the public API of datastore-car but may currently be * invoked via `require('datastore-car/lib/coding').decodeFile`. @@ -119,14 +119,14 @@ async function fileReader (file, options) { * `{ version, roots[], blocks[] }`. */ -async function decodeFile (file, options) { +async function decodeFile (multiformats, file, options) { const reader = await fileReader(file, options) - return decode(reader) + return decode(multiformats, reader) } -async function FileDecoder (file, options) { +async function FileDecoder (multiformats, file, options) { const reader = await fileReader(file, options) - return Decoder(reader) + return Decoder(multiformats, reader) } /** @@ -139,7 +139,7 @@ async function FileDecoder (file, options) { * * Where `version` is always `1`, `roots` is an array of * [CID](https://ghub.io/cids)s and `blocks` is an array of IPLD - * [Block](https://ghub.io/@ipld/block)s. + * blocks of the form `{ cid, binary }`. * * Not intended to be part of the public API of datastore-car but may currently be * invoked via `require('datastore-car/lib/coding').decodeStream`. @@ -152,20 +152,20 @@ async function FileDecoder (file, options) { * `{ version, roots[], blocks[] }`. */ -async function decodeStream (stream) { +async function decodeStream (multiformats, stream) { const reader = streamReader(stream) - return decode(reader) + return decode(multiformats, reader) } -function StreamDecoder (stream) { +function StreamDecoder (multiformats, stream) { const reader = streamReader(stream) - return Decoder(reader) + return Decoder(multiformats, reader) } /** * @name coding.encodeFile * @description - * Encode a set of IPLD [Block](https://ghub.io/@ipld/block)s in CAR format, + * Encode a set of IPLD blocks of the form `{ cid, binary }` in CAR format, * writing to a file. * * Not intended to be part of the public API of datastore-car but may currently be @@ -178,18 +178,18 @@ function StreamDecoder (stream) { * @param {CID[]} roots an array of root [CID](https://ghub.io/cids)s to set in * the header of the archive. These are intended to be the merkle roots of all * blocks. - * @param {Block[]} blocks an array of IPLD [Block](https://ghub.io/@ipld/block)s + * @param {object[]} blocks an array of IPLD blocks of the form `{ cid, binary }` * to append to the archive. */ -function encodeFile (file, roots, blocks) { - return pipelineAsync(encodeStream(roots, blocks), fs.createWriteStream(file)) +function encodeFile (multiformats, file, roots, blocks) { + return pipelineAsync(encodeStream(multiformats, roots, blocks), fs.createWriteStream(file)) } /** * @name coding.encodeStream * @description - * Encode a set of IPLD [Block](https://ghub.io/@ipld/block)s in CAR format, + * Encode a set of IPLD blocks of the form `{ cid, binary }`, * writing the data to a stream. * * There is currently no method to stream blocks into an encodeStream so you @@ -204,15 +204,15 @@ function encodeFile (file, roots, blocks) { * @param {CID[]} roots an array of root [CID](https://ghub.io/cids)s to set in * the header of the archive. These are intended to be the merkle roots of all * blocks. - * @param {Block[]} blocks an array of IPLD [Block](https://ghub.io/@ipld/block)s + * @param {object[]} blocks an array of IPLD blocks of the form `{ cid, binary }`. * to append to the archive. * @returns {ReadableStream} a stream that the CAR will be written to. */ -function encodeStream (roots, blocks) { +function encodeStream (multiformats, roots, blocks) { const stream = new PassThrough() const writer = createStreamWriter(stream) - encode(writer, roots, blocks) + encode(multiformats, writer, roots, blocks) .then(() => { stream.end() }).catch((err) => { diff --git a/lib/raw.js b/lib/raw.js index 9b71b1e..0667214 100644 --- a/lib/raw.js +++ b/lib/raw.js @@ -1,7 +1,6 @@ const { promisify } = require('util') const fs = require('fs') fs.read = promisify(fs.read) -const Block = require('@ipld/block') const { StreamDecoder, FileDecoder } = require('./coding') /** @@ -79,12 +78,12 @@ const { StreamDecoder, FileDecoder } = require('./coding') * the CID of the block located at start=`offset` with a length of `number` in * the CAR archive provided. */ -async function indexer (inp) { +async function indexer (multiformats, inp) { let decoder if (typeof inp === 'string') { - decoder = await FileDecoder(inp) + decoder = await FileDecoder(multiformats, inp) } else if (typeof inp === 'object' && typeof inp.pipe === 'function') { - decoder = StreamDecoder(inp) + decoder = StreamDecoder(multiformats, inp) } else { throw new TypeError('indexer() requires a file path or a ReadableStream') } @@ -107,23 +106,23 @@ async function indexer (inp) { * `fs.open()` or a `FileHandle` on `fs.promises.open()`. * @param {Object} blockIndex an index object of the style provided by * `CarDatastore.indexer()` (`{ cid, offset, length }`). - * @returns {Block} an IPLD [Block](https://ghub.io/@ipld/block) object. + * @returns {object} an IPLD block of the form `{ cid, binary }`. */ async function readRaw (fd, blockIndex) { const cid = blockIndex.cid - const buf = Buffer.alloc(blockIndex.length) + const binary = Buffer.alloc(blockIndex.length) let read if (typeof fd === 'number') { - read = (await fs.read(fd, buf, 0, blockIndex.length, blockIndex.offset)).bytesRead + read = (await fs.read(fd, binary, 0, blockIndex.length, blockIndex.offset)).bytesRead } else if (typeof fd === 'object' && typeof fd.read === 'function') { - read = (await fd.read(buf, 0, blockIndex.length, blockIndex.offset)).bytesRead + read = (await fd.read(binary, 0, blockIndex.length, blockIndex.offset)).bytesRead } else { throw new TypeError('Bad fd') } if (read !== blockIndex.length) { throw new Error(`Failed to read entire block (${read} instead of ${blockIndex.length})`) } - return Block.create(buf, cid) + return { cid, binary } } module.exports.indexer = indexer diff --git a/lib/reader-browser.js b/lib/reader-browser.js index b2e4c96..f5b25b3 100644 --- a/lib/reader-browser.js +++ b/lib/reader-browser.js @@ -1,8 +1,8 @@ const coding = require('./coding-browser') const { createFromDecoded } = require('./reader-writer-iface') -async function createBufferReader (data) { - const decoded = await coding.decodeBuffer(data) +async function createBufferReader (multiformats, data) { + const decoded = await coding.decodeBuffer(multiformats, data) return createFromDecoded(decoded) } diff --git a/lib/reader-writer-iface.js b/lib/reader-writer-iface.js index 16dc326..3a630f9 100644 --- a/lib/reader-writer-iface.js +++ b/lib/reader-writer-iface.js @@ -1,5 +1,5 @@ const { Errors } = require('interface-datastore') -const { verifyRoots, cidToKey } = require('./util') +const { verifyRoots } = require('./util') class Reader { /* c8 ignore next 3 */ @@ -53,15 +53,15 @@ class DecodedReader extends Reader { } has (key) { - return this._carData.keys.indexOf(key) > -1 + return this._carData.keys.indexOf(key.toString()) > -1 } async get (key) { - const index = this._carData.keys.indexOf(key) + const index = this._carData.keys.indexOf(key.toString()) if (index < 0) { throw Errors.notFoundError() } - return this._carData.blocks[index].encode() + return this._carData.blocks[index].binary } keys () { @@ -94,19 +94,18 @@ class StreamingReader extends Reader { async * iterator (keysOnly) { // TODO: optimise `keysOnly` case by skipping over decoding blocks and just read the CIDs for await (const block of this._decoder.blocks()) { - const key = cidToKey(await block.cid()) + const key = block.cid.toString() if (keysOnly) { yield { key } } else { - const value = block.encode() - yield { key, value } + yield { key, value: block.binary } } } } } async function createFromDecoded (decoded) { - const cids = await Promise.all(decoded.blocks.map((b) => b.cid())) + const cids = decoded.blocks.map((b) => b.cid) decoded.keys = cids.map((c) => c.toString()) return new DecodedReader(decoded) } diff --git a/lib/reader.js b/lib/reader.js index 999ba7f..e9b438e 100644 --- a/lib/reader.js +++ b/lib/reader.js @@ -1,18 +1,18 @@ const coding = require('./coding') const { createFromDecoded, StreamingReader } = require('./reader-writer-iface') -async function createStreamCompleteReader (stream) { - const decoded = await coding.decodeStream(stream) +async function createStreamCompleteReader (multiformats, stream) { + const decoded = await coding.decodeStream(multiformats, stream) return createFromDecoded(decoded) } -async function createStreamingReader (stream) { - const decoder = coding.StreamDecoder(stream) +async function createStreamingReader (multiformats, stream) { + const decoder = coding.StreamDecoder(multiformats, stream) return new StreamingReader(decoder) } -async function createFileReader (data) { - const decoded = await coding.decodeFile(data) +async function createFileReader (multiformats, data) { + const decoded = await coding.decodeFile(multiformats, data) return createFromDecoded(decoded) } diff --git a/lib/util.js b/lib/util.js index 53e1e74..63aecf3 100644 --- a/lib/util.js +++ b/lib/util.js @@ -1,19 +1,33 @@ -const CID = require('cids') +// { cid, binary } +function isBlock (block) { + return typeof block === 'object' && isCID(block.cid) && isBuffer(block.binary) +} + +function isCID (key) { + return typeof key === 'object' && + isBuffer(key.buffer) && + isBuffer(key.multihash) && + (key.version === 0 || key.version === 1) && + (typeof key.code === 'number' || typeof key.codec === 'string') +} -function toKey (key, method) { - if (!CID.isCID(key)) { +function isBuffer (b) { + return (Buffer.isBuffer(b) || ( + b instanceof Uint8Array && + b.constructor.name === 'Uint8Array')) +} + +function toKey (multiformats, key, method) { + if (!isCID(key)) { try { - key = new CID(key.toString()) + key = new multiformats.CID(key.toString()) } catch (e) { + console.error(e) throw new TypeError(`${method}() only accepts CIDs or CID strings`) } } - return cidToKey(key) -} - -function cidToKey (cid) { - return cid.toString() // cidv0 gets base58btc, v1 gets base32 + return key // cidToKey(key) } function verifyRoots (roots) { @@ -21,13 +35,14 @@ function verifyRoots (roots) { roots = [roots] } for (const root of roots) { - if (!CID.isCID(root)) { + if (!isCID(root)) { throw new TypeError('Roots may only be a CID or an array of CIDs') } } return roots } +module.exports.isBlock = isBlock +module.exports.isCID = isCID module.exports.toKey = toKey -module.exports.cidToKey = cidToKey module.exports.verifyRoots = verifyRoots diff --git a/lib/writer-stream.js b/lib/writer-stream.js index 6456a2f..c9ba26e 100644 --- a/lib/writer-stream.js +++ b/lib/writer-stream.js @@ -1,14 +1,12 @@ -const CID = require('cids') -const Block = require('@ipld/block') const { Writer } = require('./reader-writer-iface') const { Encoder, createStreamWriter } = require('./coding') class StreamWriter extends Writer { - constructor (outStream) { + constructor (multiformats, outStream) { super() this._outStream = outStream const writer = createStreamWriter(outStream) - this._encoder = Encoder(writer) + this._encoder = Encoder(multiformats, writer) this._mutex = null } @@ -33,7 +31,7 @@ class StreamWriter extends Writer { // no roots, too late to set any now but we need to write the header this.setRoots([]) } - this._mutex = this._mutex.then(() => this._encoder.writeBlock(Block.create(value, new CID(key)))) + this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid: key, binary: value })) return this._mutex } @@ -49,8 +47,8 @@ class StreamWriter extends Writer { } } -function create (stream) { - return new StreamWriter(stream) +function create (multiformats, stream) { + return new StreamWriter(multiformats, stream) } module.exports = create diff --git a/package.json b/package.json index 070426c..5833445 100644 --- a/package.json +++ b/package.json @@ -31,19 +31,22 @@ }, "homepage": "https://github.com/rvagg/js-datastore-car#readme", "devDependencies": { + "@ipld/dag-cbor": "github:ipld/js-dag-cbor#multiformats", "bl": "^4.0.2", + "cids": "^0.8.0", "garbage": "0.0.0", "hundreds": "0.0.7", + "hundreds": "0.0.2", + "ipld-dag-pb": "^0.18.5", "jsdoc4readme": "^1.3.0", "mocha": "^7.2.0", "polendina": "^1.0.0", "standard": "^14.3.4" }, "dependencies": { - "@ipld/block": "^4.0.2", - "cids": "^0.8.0", "interface-datastore": "^1.0.2", - "multicodec": "^1.0.1", + "ipld-dag-cbor": "^0.15.2", + "multiformats": "0.0.2", "varint": "^5.0.0" } } diff --git a/test/fixture-data.js b/test/fixture-data.js index eaa2e4a..c6ec104 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -1,54 +1,71 @@ const assert = require('assert') - +const { DAGNode, DAGLink, util: pbUtil } = require('ipld-dag-pb') +// TODO: remove this +const { serialize: dagCborSerialize, cid: dagCborCid } = require('ipld-dag-cbor').util +// TODO: remove this const CID = require('cids') -const Block = require('@ipld/block') -const { DAGNode, DAGLink } = require('ipld-dag-pb') -const pbUtil = require('ipld-dag-pb').util +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) -const rawBlocks = 'aaaa bbbb cccc zzzz'.split(' ').map((s) => Block.encoder(Buffer.from(s), 'raw')) +let rawBlocks const pbBlocks = [] const cborBlocks = [] -const allBlocks = [['raw', rawBlocks.slice(0, 3)], ['pb', pbBlocks], ['cbor', cborBlocks]] +let allBlocks let allBlocksFlattened -const acid = new CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm') +const acid = new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm') function toCBORStruct (name, link) { return { name, link } } -async function toBlock (pnd) { - const buf = pbUtil.serialize(pnd) - const cid = await pbUtil.cid(buf, { cidVersion: 0 }) - return Block.create(buf, cid) +async function toPbBlock (pnd) { + const binary = pbUtil.serialize(pnd) + const cid = new multiformats.CID((await pbUtil.cid(binary, { cidVersion: 0 })).toString()) + return { cid, binary } +} + +async function toCborBlock (cb) { + const binary = dagCborSerialize(cb) + const cid = new multiformats.CID((await dagCborCid(binary)).toString()) + return { cid, binary } +} + +async function toRawBlock (binary) { + const mh = await multiformats.multihash.hash(binary, 'sha2-256') + const cid = new multiformats.CID(1, multiformats.get('raw').code, mh) + return { cid, binary } } async function makeData () { - if (!pbBlocks.length) { + if (!rawBlocks) { + rawBlocks = await Promise.all('aaaa bbbb cccc zzzz'.split(' ').map((s) => toRawBlock(Buffer.from(s)))) + const pnd1 = new DAGNode(null, [ - new DAGLink('cat', await (rawBlocks[0].encode()).byteLength, await rawBlocks[0].cid()) + new DAGLink('cat', rawBlocks[0].binary.byteLength, new CID(rawBlocks[0].cid.toString())) ]) - pbBlocks.push(await toBlock(pnd1)) + pbBlocks.push(await toPbBlock(pnd1)) const pnd2 = new DAGNode(null, [ - new DAGLink('dog', await (rawBlocks[1].encode()).byteLength, await rawBlocks[1].cid()), - new DAGLink('first', pnd1.size, await pbBlocks[0].cid()) + new DAGLink('dog', rawBlocks[1].binary.byteLength, new CID(rawBlocks[1].cid.toString())), + new DAGLink('first', pnd1.size, new CID(pbBlocks[0].cid.toString()).toString()) ]) - pbBlocks.push(await toBlock(pnd2)) + pbBlocks.push(await toPbBlock(pnd2)) const pnd3 = new DAGNode(null, [ - new DAGLink('bear', await (rawBlocks[2].encode()).byteLength, await rawBlocks[2].cid()), - new DAGLink('second', pnd2.size, await pbBlocks[1].cid()) + new DAGLink('bear', rawBlocks[2].binary.byteLength, new CID(rawBlocks[2].cid.toString())), + new DAGLink('second', pnd2.size, new CID(pbBlocks[1].cid.toString())) ]) - pbBlocks.push(await toBlock(pnd3)) + pbBlocks.push(await toPbBlock(pnd3)) - const cbstructs = [toCBORStruct('blip', await pbBlocks[2].cid()), toCBORStruct('limbo', null)] + const cbstructs = [toCBORStruct('blip', pbBlocks[2].cid), toCBORStruct('limbo', null)] for (const b of cbstructs) { - cborBlocks.push(Block.encoder(b, 'dag-cbor')) + cborBlocks.push(await toCborBlock(b)) } - } - allBlocksFlattened = allBlocks.reduce((p, c) => p.concat(c[1]), []) + allBlocks = [['raw', rawBlocks.slice(0, 3)], ['pb', pbBlocks], ['cbor', cborBlocks]] + allBlocksFlattened = allBlocks.reduce((p, c) => p.concat(c[1]), []) + } return { rawBlocks, @@ -64,23 +81,23 @@ async function verifyDecoded (decoded, singleRoot) { assert.strictEqual(decoded.version, 1) assert.strictEqual(decoded.roots.length, singleRoot ? 1 : 2) - assert.strictEqual(decoded.roots[0].toString(), (await cborBlocks[0].cid()).toString()) + assert.strictEqual(decoded.roots[0].toString(), cborBlocks[0].cid.toString()) if (!singleRoot) { - assert.strictEqual(decoded.roots[1].toString(), (await cborBlocks[1].cid()).toString()) + assert.strictEqual(decoded.roots[1].toString(), cborBlocks[1].cid.toString()) } assert.strictEqual(decoded.blocks.length, allBlocksFlattened.length) const expectedBlocks = allBlocksFlattened.slice() const expectedCids = [] for (const block of expectedBlocks) { - expectedCids.push((await block.cid()).toString()) + expectedCids.push(block.cid.toString()) } for (const block of decoded.blocks) { - const cid = await block.cid() + const cid = block.cid const index = expectedCids.indexOf(cid.toString()) assert.ok(index >= 0, 'got expected block') - assert.strictEqual(expectedBlocks[index].encode().toString('hex'), block.encode().toString('hex'), 'got expected block content') + assert.strictEqual(expectedBlocks[index].binary.toString('hex'), block.binary.toString('hex'), 'got expected block content') expectedBlocks.splice(index, 1) expectedCids.splice(index, 1) } @@ -101,19 +118,19 @@ async function verifyHas (carDs, modified) { for (let i = 0; i < blocks.length; i++) { if (modified && i === 1) { // second of each type is removed from modified - await verifyHasnt(await blocks[i].cid(), `block #${i} (${type} / ${await blocks[i].cid()})`) + await verifyHasnt(blocks[i].cid, `block #${i} (${type} / ${blocks[i].cid})`) } else { - await verifyHas(await blocks[i].cid(), `block #${i} (${type} / ${await blocks[i].cid()})`) + await verifyHas(blocks[i].cid, `block #${i} (${type} / ${blocks[i].cid})`) } } if (modified && type === 'raw') { - await verifyHas(await rawBlocks[3].cid(), `block #3 (${type})`) // zzzz + await verifyHas(rawBlocks[3].cid, `block #3 (${type})`) // zzzz } } // not a block we have - await verifyHasnt(await Block.encoder(Buffer.from('dddd'), 'raw').cid(), 'dddd') + await verifyHasnt((await toRawBlock(Buffer.from('dddd'))).cid, 'dddd') } function compareBlockData (actual, expected, id) { @@ -122,10 +139,10 @@ function compareBlockData (actual, expected, id) { async function verifyBlocks (carDs, modified) { async function verifyBlock (block, index, type) { - const expected = await block.encode() + const expected = block.binary let actual try { - actual = await carDs.get(await block.cid()) + actual = await carDs.get(block.cid) } catch (err) { assert.ifError(err, `get block length #${index} (${type})`) } @@ -137,7 +154,7 @@ async function verifyBlocks (carDs, modified) { const block = blocks[i] if (modified && i === 1) { - await assert.rejects(carDs.get(await block.cid()), { + await assert.rejects(carDs.get(block.cid), { name: 'Error', message: 'Not Found' }) @@ -154,8 +171,8 @@ async function verifyBlocks (carDs, modified) { } async function verifyRoots (carDs, modified) { - // const expected = await cborBlocks[modified ? 1 : 2].cid() - const expected = [await cborBlocks[0].cid(), await cborBlocks[1].cid()] + // const expected = cborBlocks[modified ? 1 : 2].cid + const expected = [cborBlocks[0].cid, cborBlocks[1].cid] assert.deepStrictEqual(await carDs.getRoots(), expected) } diff --git a/test/test-decode.js b/test/test-decode.js index aab7b14..154a256 100644 --- a/test/test-decode.js +++ b/test/test-decode.js @@ -5,34 +5,38 @@ const { promisify } = require('util') const path = require('path') const fs = require('fs') fs.readFile = promisify(fs.readFile) -const { verifyDecoded } = require('./fixture-data') +const { makeData, verifyDecoded } = require('./fixture-data') const coding = require('../lib/coding') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) describe('Decode', () => { + before(makeData) + it('decodeFile', async () => { - const decoded = await coding.decodeFile(path.join(__dirname, 'go.car')) + const decoded = await coding.decodeFile(multiformats, path.join(__dirname, 'go.car')) return verifyDecoded(decoded) }) it('decodeFile small buffer', async () => { - const decoded = await coding.decodeFile(path.join(__dirname, 'go.car'), { bufferSize: 8 }) + const decoded = await coding.decodeFile(multiformats, path.join(__dirname, 'go.car'), { bufferSize: 8 }) return verifyDecoded(decoded) }) it('decodeBuffer', async () => { - const decoded = await coding.decodeBuffer(await fs.readFile(path.join(__dirname, 'go.car'))) + const decoded = await coding.decodeBuffer(multiformats, await fs.readFile(path.join(__dirname, 'go.car'))) return verifyDecoded(decoded) }) it('decodeStream', async () => { - const decoded = await coding.decodeStream(fs.createReadStream(path.join(__dirname, 'go.car'))) + const decoded = await coding.decodeStream(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) return verifyDecoded(decoded) }) it('decode errors', async () => { const buf = await fs.readFile(path.join(__dirname, 'go.car')) // truncated - await assert.rejects(coding.decodeBuffer(buf.slice(0, buf.length - 10)), { + await assert.rejects(coding.decodeBuffer(multiformats, buf.slice(0, buf.length - 10)), { name: 'Error', message: 'Unexpected end of Buffer' }) @@ -41,7 +45,7 @@ describe('Decode', () => { const buf2 = Buffer.alloc(buf.length) buf.copy(buf2) buf2[101] = 0 // first block's CID - await assert.rejects(coding.decodeBuffer(buf2), { + await assert.rejects(coding.decodeBuffer(multiformats, buf2), { name: 'Error', message: 'Unexpected CID version (0)' }) diff --git a/test/test-encode.js b/test/test-encode.js index 3258689..33c877e 100644 --- a/test/test-encode.js +++ b/test/test-encode.js @@ -8,6 +8,8 @@ fs.readFile = promisify(fs.readFile) fs.unlink = promisify(fs.unlink) const { verifyDecoded, makeData } = require('./fixture-data') const coding = require('../lib/coding') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) describe('Encode', () => { let roots, allBlocks @@ -21,7 +23,7 @@ describe('Encode', () => { allBlocks = data.allBlocksFlattened roots = [] for (const block of data.cborBlocks) { - roots.push(await block.cid()) + roots.push(block.cid) } }) @@ -29,26 +31,26 @@ describe('Encode', () => { after(clean) it('encodeFile', async () => { - await coding.encodeFile('test.car', roots, allBlocks) - const decoded = await coding.decodeFile('test.car') + await coding.encodeFile(multiformats, 'test.car', roots, allBlocks) + const decoded = await coding.decodeFile(multiformats, 'test.car') return verifyDecoded(decoded) }) it('encodeBuffer', async () => { - const buf = await coding.encodeBuffer(roots, allBlocks) - const decoded = await coding.decodeBuffer(buf) + const buf = await coding.encodeBuffer(multiformats, roots, allBlocks) + const decoded = await coding.decodeBuffer(multiformats, buf) return verifyDecoded(decoded) }) it('encodeBuffer single root', async () => { - const buf = await coding.encodeBuffer(roots[0], allBlocks) - const decoded = await coding.decodeBuffer(buf) + const buf = await coding.encodeBuffer(multiformats, roots[0], allBlocks) + const decoded = await coding.decodeBuffer(multiformats, buf) return verifyDecoded(decoded, true) }) it('encodeStream', async () => { const stream = bl() - const carStream = coding.encodeStream(roots, allBlocks) + const carStream = coding.encodeStream(multiformats, roots, allBlocks) carStream.pipe(stream) await new Promise((resolve, reject) => { carStream.on('finish', resolve) @@ -56,19 +58,19 @@ describe('Encode', () => { stream.on('error', reject) }) - const decoded = await coding.decodeStream(stream) + const decoded = await coding.decodeStream(multiformats, stream) return verifyDecoded(decoded) }) it('encode errors', async () => { - await assert.rejects(coding.encodeBuffer(['blip'], allBlocks), { + await assert.rejects(coding.encodeBuffer(multiformats, ['blip'], allBlocks), { name: 'TypeError', message: 'Roots must be CIDs' }) - await assert.rejects(coding.encodeBuffer(roots, ['blip']), { + await assert.rejects(coding.encodeBuffer(multiformats, roots, ['blip']), { name: 'TypeError', - message: 'Block list must contain @ipld/block objects' + message: 'Block list must be of type { cid, binary }' }) }) }) diff --git a/test/test-errors.js b/test/test-errors.js index edd82d6..6241be7 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -5,31 +5,33 @@ const path = require('path') const assert = require('assert') const { readBuffer, readFileComplete, writeStream } = require('../') const { acid, car } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) describe('Errors', () => { it('unimplemented methods', async () => { - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) await assert.rejects(carDs.batch()) await assert.rejects(carDs.batch('foo')) await carDs.close() }) it('bad gets', async () => { - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) await assert.rejects(carDs.get('blip')) // not a CID key await assert.doesNotReject(carDs.get(acid)) // sanity check await carDs.close() }) it('bad has\'', async () => { - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) await assert.rejects(carDs.has('blip')) // not a CID key await assert.doesNotReject(carDs.has(acid)) // sanity check await carDs.close() }) it('bad queries', async () => { - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) assert.throws(() => carDs.query('blip')) assert.throws(() => carDs.query(false)) assert.throws(() => carDs.query(null)) @@ -37,7 +39,7 @@ describe('Errors', () => { }) it('bad root type', async () => { - const carDs = await writeStream(fs.createWriteStream('test.car')) + const carDs = await writeStream(multiformats, fs.createWriteStream('test.car')) assert.rejects(carDs.setRoots('blip')) assert.rejects(carDs.setRoots(['blip'])) assert.rejects(carDs.setRoots([acid, false])) @@ -45,7 +47,7 @@ describe('Errors', () => { }) it('bad puts', async () => { - const carDs = await writeStream(fs.createWriteStream('test.car')) + const carDs = await writeStream(multiformats, fs.createWriteStream('test.car')) await assert.rejects(carDs.put(acid, 'blip')) // not a Buffer value await assert.rejects(carDs.put('blip', Buffer.from('blip'))) // not a CID key await carDs.close() @@ -54,7 +56,7 @@ describe('Errors', () => { it('truncated file', async () => { const data = await fs.readFile(path.join(__dirname, 'go.car')) await fs.writeFile('test.car', data.slice(0, data.length - 5)) - await assert.rejects(readFileComplete('test.car'), { + await assert.rejects(readFileComplete(multiformats, 'test.car'), { name: 'Error', message: 'Unexpected end of file' }) diff --git a/test/test-large.js b/test/test-large.js index b950784..031c324 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -4,34 +4,38 @@ const assert = require('assert') const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) const garbage = require('garbage') -const Block = require('@ipld/block') +// TODO: removeme +const { serialize: dagCborSerialize, deserialize: dagCborDeserialize, cid: dagCborCid } = require('ipld-dag-cbor').util const { writeStream, readFileComplete, readStreaming } = require('../') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) describe('Large CAR', () => { const objects = [] const cids = [] it('create, no roots', async () => { - const carDs = await writeStream(fs.createWriteStream('./test.car')) + const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) for (let i = 0; i < 500; i++) { const obj = garbage.object(1000) objects.push(obj) - const block = Block.encoder(obj, 'dag-cbor') - const cid = await block.cid() + const binary = dagCborSerialize(obj) + const cid = new multiformats.CID((await dagCborCid(binary)).toString()) cids.push(cid.toString()) - await carDs.put(cid, block.encode()) + await carDs.put(cid, binary) } return carDs.close() }) it('readFileComplete', async () => { - const carDs = await readFileComplete('./test.car') + const carDs = await readFileComplete(multiformats, './test.car') let i = 0 for await (const { key, value } of carDs.query()) { assert.strictEqual(key, cids[i], `cid #${i} ${key} <> ${cids[i]}`) - assert.deepStrictEqual(Block.create(value, key).decode(), objects[i], `object #${i}`) + const obj = dagCborDeserialize(value) + assert.deepStrictEqual(obj, objects[i], `object #${i}`) i++ } @@ -39,11 +43,12 @@ describe('Large CAR', () => { }) it('readStream', async () => { - const carDs = await readStreaming(fs.createReadStream('./test.car')) + const carDs = await readStreaming(multiformats, fs.createReadStream('./test.car')) let i = 0 for await (const { key, value } of carDs.query()) { assert.strictEqual(key, cids[i], `cid #${i} ${key} <> ${cids[i]}`) - assert.deepStrictEqual(Block.create(value, key).decode(), objects[i], `object #${i}`) + const obj = dagCborDeserialize(value) + assert.deepStrictEqual(obj, objects[i], `object #${i}`) i++ } diff --git a/test/test-query.js b/test/test-query.js index 7e30317..e4b3bef 100644 --- a/test/test-query.js +++ b/test/test-query.js @@ -4,6 +4,9 @@ const assert = require('assert') const path = require('path') const { readBuffer, readFile } = require('../') const { car, makeData, compareBlockData } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) if (!assert.rejects) { // browser polyfill is incomplete @@ -17,9 +20,9 @@ if (!assert.rejects) { } } -const factories = [['readBuffer', () => readBuffer(car)]] +const factories = [['readBuffer', () => readBuffer(multiformats, car)]] if (readFile) { // not in browser - factories.push(['readFile', () => readFile(path.join(__dirname, 'go.car'))]) + factories.push(['readFile', () => readFile(multiformats, path.join(__dirname, 'go.car'))]) } for (const [factoryName, factoryFn] of factories) { @@ -36,7 +39,7 @@ for (const [factoryName, factoryFn] of factories) { const blocks_ = blocks.slice() const cids = [] for (const block of blocks) { - cids.push((await block.cid()).toString()) + cids.push(block.cid.toString()) } let i = 0 for await (const entry of carDs.query()) { @@ -44,7 +47,7 @@ for (const [factoryName, factoryFn] of factories) { if (foundIndex < 0) { assert.fail(`Unexpected CID/key found: ${entry.key}`) } - compareBlockData(entry.value, blocks_[foundIndex].encode(), `#${i++}`) + compareBlockData(entry.value, blocks_[foundIndex].binary, `#${i++}`) cids.splice(foundIndex, 1) blocks_.splice(foundIndex, 1) } @@ -57,7 +60,7 @@ for (const [factoryName, factoryFn] of factories) { const blocks_ = blocks.slice() const cids = [] for (const block of blocks) { - cids.push((await block.cid()).toString()) + cids.push(block.cid.toString()) } for await (const entry of carDs.query({ keysOnly: true })) { const foundIndex = cids.findIndex((cid) => cid === entry.key) @@ -77,8 +80,8 @@ for (const [factoryName, factoryFn] of factories) { const blocks_ = [] const cids = [] for (const block of blocks) { - const cid = await block.cid() - if (cid.codec === 'dag-cbor') { + const cid = new multiformats.CID(block.cid.toString()) + if (cid.code === multiformats.get('dag-cbor').code) { cids.push(cid.toString()) blocks_.push(block) } @@ -90,7 +93,7 @@ for (const [factoryName, factoryFn] of factories) { if (foundIndex < 0) { assert.fail(`Unexpected CID/key found: ${entry.key}`) } - compareBlockData(entry.value, blocks_[foundIndex].encode(), `#${i++}`) + compareBlockData(entry.value, blocks_[foundIndex].binary, `#${i++}`) cids.splice(foundIndex, 1) blocks_.splice(foundIndex, 1) } diff --git a/test/test-raw.js b/test/test-raw.js index 8f4563b..c969b58 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -6,24 +6,26 @@ const path = require('path') const fs = require('fs') fs.open = promisify(fs.open) fs.close = promisify(fs.close) -const CID = require('cids') const { indexer, readRaw } = require('../') const { makeData } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) describe('Raw', () => { const expectedRoots = [ - new CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), - new CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm') + new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), + new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm') ] const expectedIndex = [ - { cid: new CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), length: 55, offset: 137 }, - { cid: new CID('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), length: 97, offset: 228 }, - { cid: new CID('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), length: 4, offset: 362 }, - { cid: new CID('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), length: 94, offset: 402 }, - { cid: new CID('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), length: 4, offset: 533 }, - { cid: new CID('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), length: 47, offset: 572 }, - { cid: new CID('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), length: 4, offset: 656 }, - { cid: new CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), length: 18, offset: 697 } + { cid: new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), length: 55, offset: 137 }, + { cid: new multiformats.CID('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), length: 97, offset: 228 }, + { cid: new multiformats.CID('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), length: 4, offset: 362 }, + { cid: new multiformats.CID('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), length: 94, offset: 402 }, + { cid: new multiformats.CID('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), length: 4, offset: 533 }, + { cid: new multiformats.CID('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), length: 47, offset: 572 }, + { cid: new multiformats.CID('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), length: 4, offset: 656 }, + { cid: new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), length: 18, offset: 697 } ] let allBlocksFlattened @@ -42,12 +44,12 @@ describe('Raw', () => { } it('index a CAR (stream)', async () => { - const index = await indexer(fs.createReadStream(path.join(__dirname, 'go.car'))) + const index = await indexer(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyIndex(index) }) it('index a CAR (file)', async () => { - const index = await indexer(path.join(__dirname, 'go.car')) + const index = await indexer(multiformats, path.join(__dirname, 'go.car')) await verifyIndex(index) }) @@ -55,15 +57,15 @@ describe('Raw', () => { const expectedBlocks = allBlocksFlattened.slice() const expectedCids = [] for (const block of expectedBlocks) { - expectedCids.push((await block.cid()).toString()) + expectedCids.push(block.cid.toString()) } for (const blockIndex of expectedIndex) { const block = await readRaw(fd, blockIndex) - const cid = await block.cid() + const cid = block.cid const index = expectedCids.indexOf(cid.toString()) assert.ok(index >= 0, 'got expected block') - assert.strictEqual(expectedBlocks[index].encode().toString('hex'), block.encode().toString('hex'), 'got expected block content') + assert.strictEqual(expectedBlocks[index].binary.toString('hex'), block.binary.toString('hex'), 'got expected block content') expectedBlocks.splice(index, 1) expectedCids.splice(index, 1) } @@ -83,7 +85,7 @@ describe('Raw', () => { }) it('errors', async () => { - await assert.rejects(indexer(), { + await assert.rejects(indexer(multiformats), { name: 'TypeError', message: 'indexer() requires a file path or a ReadableStream' }) diff --git a/test/test-readbuffer.js b/test/test-readbuffer.js index 13ff1e2..34a7ab3 100644 --- a/test/test-readbuffer.js +++ b/test/test-readbuffer.js @@ -3,6 +3,9 @@ const assert = require('assert') const { readBuffer } = require('../') const { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) if (!assert.rejects) { // browser polyfill is incomplete @@ -25,24 +28,24 @@ describe('Read Buffer', () => { }) it('read existing', async () => { - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(await rawBlocks[3].cid())) // doesn't exist + await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) it('verify only roots', async () => { // tests deferred open for getRoots() - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) await verifyRoots(carDs) await carDs.close() }) // when we instantiate from a Buffer, CarDatastore should be immutable it('immutable', async () => { - const carDs = await readBuffer(car) + const carDs = await readBuffer(multiformats, car) await assert.rejects(carDs.put(acid, Buffer.from('blip'))) await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) await assert.rejects(carDs.setRoots(acid)) diff --git a/test/test-readfile-writestream.js b/test/test-readfile-writestream.js index e238970..eee2490 100644 --- a/test/test-readfile-writestream.js +++ b/test/test-readfile-writestream.js @@ -5,6 +5,9 @@ const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) const { writeStream, readFileComplete } = require('../') const { makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) let rawBlocks let pbBlocks @@ -21,17 +24,17 @@ describe('Read File & Write Stream', () => { }) it('writeStream', async () => { - const carDs = await writeStream(fs.createWriteStream('./test.car')) - await carDs.setRoots([await cborBlocks[0].cid(), await cborBlocks[1].cid()]) + const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) + await carDs.setRoots([cborBlocks[0].cid, cborBlocks[1].cid]) for (const block of rawBlocks.slice(0, 3).concat(pbBlocks).concat(cborBlocks)) { // add all but raw zzzz - await carDs.put(await block.cid(), block.encode()) + await carDs.put(block.cid, block.binary) } await carDs.close() }) it('readFileComplete', async () => { - const carDs = await readFileComplete('./test.car') + const carDs = await readFileComplete(multiformats, './test.car') await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -39,13 +42,13 @@ describe('Read File & Write Stream', () => { }) it('writeStream no await', async () => { - const roots = [await cborBlocks[0].cid(), await cborBlocks[1].cid()] + const roots = [cborBlocks[0].cid, cborBlocks[1].cid] const blocks = [] for (const block of rawBlocks.slice(0, 3).concat(pbBlocks).concat(cborBlocks)) { - blocks.push([await block.cid(), block.encode()]) + blocks.push([block.cid, block.binary]) } - const carDs = await writeStream(fs.createWriteStream('./test.car')) + const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) carDs.setRoots(roots) for (const [cid, encoded] of blocks) { carDs.put(cid, encoded) @@ -54,7 +57,7 @@ describe('Read File & Write Stream', () => { }) it('readFileComplete post no-await write', async () => { - const carDs = await readFileComplete('./test.car') + const carDs = await readFileComplete(multiformats, './test.car') await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -62,9 +65,9 @@ describe('Read File & Write Stream', () => { }) it('writeStream errors', async () => { - const carDs = await writeStream(fs.createWriteStream('./test.car')) - await carDs.put(await cborBlocks[0].cid(), await cborBlocks[0].encode()) - await assert.rejects(carDs.delete(await cborBlocks[0].cid())) + const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) + await carDs.put(cborBlocks[0].cid, cborBlocks[0].binary) + await assert.rejects(carDs.delete(cborBlocks[0].cid)) await carDs.close() await assert.rejects(carDs.close()) }) diff --git a/test/test-readfile.js b/test/test-readfile.js index 6e9ad23..eb92c3d 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -4,6 +4,9 @@ const path = require('path') const assert = require('assert') const { readFileComplete } = require('../car') const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) let rawBlocks @@ -14,24 +17,24 @@ describe('Read File', () => { }) it('read existing', async () => { - const carDs = await readFileComplete(path.join(__dirname, 'go.car')) + const carDs = await readFileComplete(multiformats, path.join(__dirname, 'go.car')) await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(await rawBlocks[3].cid())) // doesn't exist + await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) it('verify only roots', async () => { // tests deferred open for getRoots() - const carDs = await readFileComplete(path.join(__dirname, 'go.car')) + const carDs = await readFileComplete(multiformats, path.join(__dirname, 'go.car')) await verifyRoots(carDs) await carDs.close() }) // when we instantiate from a File, CarDatastore should be immutable it('immutable', async () => { - const carDs = await readFileComplete(path.join(__dirname, 'go.car')) + const carDs = await readFileComplete(multiformats, path.join(__dirname, 'go.car')) await assert.rejects(carDs.put(acid, Buffer.from('blip'))) await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) await assert.rejects(carDs.setRoots(acid)) diff --git a/test/test-readstream.js b/test/test-readstream.js index 26d1c17..dc86071 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -5,6 +5,9 @@ const path = require('path') const assert = require('assert') const { readStreaming } = require('../car') const { acid, makeData, compareBlockData, verifyRoots } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) describe('Read Stream', () => { let allBlocks @@ -17,10 +20,10 @@ describe('Read Stream', () => { const blocks_ = allBlocks.slice() const cids = [] for (const block of blocks_) { - cids.push((await block.cid()).toString()) + cids.push(block.cid.toString()) } - const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) let i = 0 for await (const entry of carDs.query()) { @@ -29,7 +32,7 @@ describe('Read Stream', () => { if (foundIndex < 0) { assert.fail(`Unexpected CID/key found: ${entry.key}`) } - compareBlockData(entry.value, blocks_[foundIndex].encode(), `#${i++}`) + compareBlockData(entry.value, blocks_[foundIndex].binary, `#${i++}`) cids.splice(foundIndex, 1) blocks_.splice(foundIndex, 1) } @@ -45,10 +48,10 @@ describe('Read Stream', () => { const blocks_ = allBlocks.slice() const cids = [] for (const block of blocks_) { - cids.push((await block.cid()).toString()) + cids.push(await block.cid.toString()) } - const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) // test before await verifyRoots(carDs) @@ -68,15 +71,15 @@ describe('Read Stream', () => { }) it('verify only roots', async () => { - const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyRoots(carDs) await carDs.close() }) it('errors & immutability', async () => { - const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) - await assert.rejects(carDs.has(await allBlocks[0].cid())) - await assert.rejects(carDs.get(await allBlocks[0].cid())) + const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + await assert.rejects(carDs.has(allBlocks[0].cid)) + await assert.rejects(carDs.get(allBlocks[0].cid)) // when we instantiate from a Stream, CarDatastore should be immutable await assert.rejects(carDs.put(acid, Buffer.from('blip'))) diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index c0a4d9f..f57322c 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -5,6 +5,9 @@ const path = require('path') const assert = require('assert') const { readStreamComplete } = require('../car') const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) let rawBlocks @@ -15,24 +18,24 @@ describe('Read Stream', () => { }) it('read existing', async () => { - const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreamComplete(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(await rawBlocks[3].cid())) // doesn't exist + await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) it('verify only roots', async () => { // tests deferred open for getRoots() - const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreamComplete(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyRoots(carDs) await carDs.close() }) // when we instantiate from a Stream, CarDatastore should be immutable it('immutable', async () => { - const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreamComplete(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) await assert.rejects(carDs.put(acid, Buffer.from('blip'))) await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) await assert.rejects(carDs.setRoots(acid)) From e13b5e3ea2fb54765a977acf873d8f152cc5c445 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Thu, 14 May 2020 20:01:43 +1000 Subject: [PATCH 04/23] export wrapper, remove more old stuff --- car.js | 27 +++++++++++++++++++-------- lib/util.js | 4 ++-- package.json | 3 +-- test/fixture-data.js | 13 ++++++++----- test/test-decode.js | 4 ++-- test/test-encode.js | 4 ++-- test/test-errors.js | 25 +++++++++++++------------ test/test-large.js | 19 +++++++++---------- test/test-query.js | 8 ++++---- test/test-raw.js | 15 +++++++++------ test/test-readbuffer.js | 10 +++++----- test/test-readfile-writestream.js | 14 +++++++------- test/test-readfile.js | 10 +++++----- test/test-readstream.js | 12 ++++++------ test/test-readstreamcomplete.js | 10 +++++----- 15 files changed, 97 insertions(+), 81 deletions(-) diff --git a/car.js b/car.js index 2bd45e0..09c8550 100644 --- a/car.js +++ b/car.js @@ -135,6 +135,7 @@ async function writeStream (multiformats, stream) { return new CarDatastore(multiformats, reader, writer) } +<<<<<<< HEAD async function traverseBlock (block, get, car, concurrency = 1, seen = new Set()) { const cid = await block.cid() await car.put(cid, block.encodeUnsafe()) @@ -184,11 +185,21 @@ async function completeGraph (root, get, car, concurrency) { await car.close() } -module.exports.readBuffer = readBuffer -module.exports.readFileComplete = readFileComplete -module.exports.readStreamComplete = readStreamComplete -module.exports.readStreaming = readStreaming -module.exports.writeStream = writeStream -module.exports.indexer = indexer -module.exports.readRaw = readRaw -module.exports.completeGraph = completeGraph +module.exports = (multiformats) => { + function wrap (fn) { + return function (...args) { + return fn(multiformats, ...args) + } + } + + return { + readBuffer: wrap(readBuffer), + readFileComplete: wrap(readFileComplete), + readStreamComplete: wrap(readStreamComplete), + readStreaming: wrap(readStreaming), + writeStream: wrap(writeStream), + indexer: wrap(indexer), + readRaw, + completeGraph + } +} diff --git a/lib/util.js b/lib/util.js index 63aecf3..03f5d53 100644 --- a/lib/util.js +++ b/lib/util.js @@ -8,7 +8,8 @@ function isCID (key) { isBuffer(key.buffer) && isBuffer(key.multihash) && (key.version === 0 || key.version === 1) && - (typeof key.code === 'number' || typeof key.codec === 'string') + typeof key.code === 'number' + // don't handle old style CIDs } function isBuffer (b) { @@ -22,7 +23,6 @@ function toKey (multiformats, key, method) { try { key = new multiformats.CID(key.toString()) } catch (e) { - console.error(e) throw new TypeError(`${method}() only accepts CIDs or CID strings`) } } diff --git a/package.json b/package.json index 5833445..e211ec4 100644 --- a/package.json +++ b/package.json @@ -45,8 +45,7 @@ }, "dependencies": { "interface-datastore": "^1.0.2", - "ipld-dag-cbor": "^0.15.2", - "multiformats": "0.0.2", + "multiformats": "github:rvagg/js-multiformats#rvagg/non-enumerable-basecache", "varint": "^5.0.0" } } diff --git a/test/fixture-data.js b/test/fixture-data.js index c6ec104..f7c7b7e 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -1,11 +1,10 @@ const assert = require('assert') const { DAGNode, DAGLink, util: pbUtil } = require('ipld-dag-pb') // TODO: remove this -const { serialize: dagCborSerialize, cid: dagCborCid } = require('ipld-dag-cbor').util -// TODO: remove this const CID = require('cids') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) let rawBlocks const pbBlocks = [] @@ -26,8 +25,9 @@ async function toPbBlock (pnd) { } async function toCborBlock (cb) { - const binary = dagCborSerialize(cb) - const cid = new multiformats.CID((await dagCborCid(binary)).toString()) + const binary = await multiformats.encode(cb, 'dag-cbor') + const mh = await multiformats.multihash.hash(binary, 'sha2-256') + const cid = new multiformats.CID(1, multiformats.get('dag-cbor').code, mh) return { cid, binary } } @@ -97,7 +97,10 @@ async function verifyDecoded (decoded, singleRoot) { const cid = block.cid const index = expectedCids.indexOf(cid.toString()) assert.ok(index >= 0, 'got expected block') - assert.strictEqual(expectedBlocks[index].binary.toString('hex'), block.binary.toString('hex'), 'got expected block content') + assert.strictEqual( + Buffer.from(expectedBlocks[index].binary).toString('hex'), + Buffer.from(block.binary).toString('hex'), + 'got expected block content') expectedBlocks.splice(index, 1) expectedCids.splice(index, 1) } diff --git a/test/test-decode.js b/test/test-decode.js index 154a256..3780f3c 100644 --- a/test/test-decode.js +++ b/test/test-decode.js @@ -5,10 +5,10 @@ const { promisify } = require('util') const path = require('path') const fs = require('fs') fs.readFile = promisify(fs.readFile) -const { makeData, verifyDecoded } = require('./fixture-data') -const coding = require('../lib/coding') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) +const { makeData, verifyDecoded } = require('./fixture-data') +const coding = require('../lib/coding') describe('Decode', () => { before(makeData) diff --git a/test/test-encode.js b/test/test-encode.js index 33c877e..cdd267b 100644 --- a/test/test-encode.js +++ b/test/test-encode.js @@ -6,10 +6,10 @@ const fs = require('fs') const bl = require('bl') fs.readFile = promisify(fs.readFile) fs.unlink = promisify(fs.unlink) -const { verifyDecoded, makeData } = require('./fixture-data') -const coding = require('../lib/coding') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) +const { verifyDecoded, makeData } = require('./fixture-data') +const coding = require('../lib/coding') describe('Encode', () => { let roots, allBlocks diff --git a/test/test-errors.js b/test/test-errors.js index 6241be7..4b5f84b 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -3,35 +3,36 @@ const fs = require('fs').promises const path = require('path') const assert = require('assert') -const { readBuffer, readFileComplete, writeStream } = require('../') -const { acid, car } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) +const { readBuffer, readFileComplete, writeStream } = require('../')(multiformats) +const { acid, car } = require('./fixture-data') describe('Errors', () => { it('unimplemented methods', async () => { - const carDs = await readBuffer(multiformats, car) + const carDs = await readBuffer(car) await assert.rejects(carDs.batch()) await assert.rejects(carDs.batch('foo')) await carDs.close() }) it('bad gets', async () => { - const carDs = await readBuffer(multiformats, car) - await assert.rejects(carDs.get('blip')) // not a CID key + const carDs = await readBuffer(car) + await assert.rejects(carDs.get('blip'), /only accepts CIDs or CID strings/) // not a CID key + await assert.rejects(carDs.get(null)) // not a CID key await assert.doesNotReject(carDs.get(acid)) // sanity check await carDs.close() }) it('bad has\'', async () => { - const carDs = await readBuffer(multiformats, car) - await assert.rejects(carDs.has('blip')) // not a CID key + const carDs = await readBuffer(car) + await assert.rejects(carDs.has('blip'), /only accepts CIDs or CID strings/) // not a CID key await assert.doesNotReject(carDs.has(acid)) // sanity check await carDs.close() }) it('bad queries', async () => { - const carDs = await readBuffer(multiformats, car) + const carDs = await readBuffer(car) assert.throws(() => carDs.query('blip')) assert.throws(() => carDs.query(false)) assert.throws(() => carDs.query(null)) @@ -39,7 +40,7 @@ describe('Errors', () => { }) it('bad root type', async () => { - const carDs = await writeStream(multiformats, fs.createWriteStream('test.car')) + const carDs = await writeStream(fs.createWriteStream('test.car')) assert.rejects(carDs.setRoots('blip')) assert.rejects(carDs.setRoots(['blip'])) assert.rejects(carDs.setRoots([acid, false])) @@ -47,16 +48,16 @@ describe('Errors', () => { }) it('bad puts', async () => { - const carDs = await writeStream(multiformats, fs.createWriteStream('test.car')) + const carDs = await writeStream(fs.createWriteStream('test.car')) await assert.rejects(carDs.put(acid, 'blip')) // not a Buffer value - await assert.rejects(carDs.put('blip', Buffer.from('blip'))) // not a CID key + await assert.rejects(carDs.put('blip', Buffer.from('blip')), /only accepts CIDs or CID strings/) // not a CID key await carDs.close() }) it('truncated file', async () => { const data = await fs.readFile(path.join(__dirname, 'go.car')) await fs.writeFile('test.car', data.slice(0, data.length - 5)) - await assert.rejects(readFileComplete(multiformats, 'test.car'), { + await assert.rejects(readFileComplete('test.car'), { name: 'Error', message: 'Unexpected end of file' }) diff --git a/test/test-large.js b/test/test-large.js index 031c324..29ca854 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -4,24 +4,23 @@ const assert = require('assert') const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) const garbage = require('garbage') -// TODO: removeme -const { serialize: dagCborSerialize, deserialize: dagCborDeserialize, cid: dagCborCid } = require('ipld-dag-cbor').util -const { writeStream, readFileComplete, readStreaming } = require('../') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) +const { writeStream, readFileComplete, readStreaming } = require('../')(multiformats) describe('Large CAR', () => { const objects = [] const cids = [] it('create, no roots', async () => { - const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) + const carDs = await writeStream(fs.createWriteStream('./test.car')) for (let i = 0; i < 500; i++) { const obj = garbage.object(1000) objects.push(obj) - const binary = dagCborSerialize(obj) - const cid = new multiformats.CID((await dagCborCid(binary)).toString()) + const binary = await multiformats.encode(obj, 'dag-cbor') + const mh = await multiformats.multihash.hash(binary, 'sha2-256') + const cid = new multiformats.CID(1, multiformats.get('dag-cbor').code, mh) cids.push(cid.toString()) await carDs.put(cid, binary) } @@ -30,11 +29,11 @@ describe('Large CAR', () => { }) it('readFileComplete', async () => { - const carDs = await readFileComplete(multiformats, './test.car') + const carDs = await readFileComplete('./test.car') let i = 0 for await (const { key, value } of carDs.query()) { assert.strictEqual(key, cids[i], `cid #${i} ${key} <> ${cids[i]}`) - const obj = dagCborDeserialize(value) + const obj = await multiformats.decode(value, 'dag-cbor') assert.deepStrictEqual(obj, objects[i], `object #${i}`) i++ } @@ -43,11 +42,11 @@ describe('Large CAR', () => { }) it('readStream', async () => { - const carDs = await readStreaming(multiformats, fs.createReadStream('./test.car')) + const carDs = await readStreaming(fs.createReadStream('./test.car')) let i = 0 for await (const { key, value } of carDs.query()) { assert.strictEqual(key, cids[i], `cid #${i} ${key} <> ${cids[i]}`) - const obj = dagCborDeserialize(value) + const obj = await multiformats.decode(value, 'dag-cbor') assert.deepStrictEqual(obj, objects[i], `object #${i}`) i++ } diff --git a/test/test-query.js b/test/test-query.js index e4b3bef..5a95b89 100644 --- a/test/test-query.js +++ b/test/test-query.js @@ -2,11 +2,11 @@ const assert = require('assert') const path = require('path') -const { readBuffer, readFile } = require('../') -const { car, makeData, compareBlockData } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { readBuffer, readFile } = require('../')(multiformats) +const { car, makeData, compareBlockData } = require('./fixture-data') if (!assert.rejects) { // browser polyfill is incomplete @@ -20,9 +20,9 @@ if (!assert.rejects) { } } -const factories = [['readBuffer', () => readBuffer(multiformats, car)]] +const factories = [['readBuffer', () => readBuffer(car)]] if (readFile) { // not in browser - factories.push(['readFile', () => readFile(multiformats, path.join(__dirname, 'go.car'))]) + factories.push(['readFile', () => readFile(path.join(__dirname, 'go.car'))]) } for (const [factoryName, factoryFn] of factories) { diff --git a/test/test-raw.js b/test/test-raw.js index c969b58..d5599c3 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -6,11 +6,11 @@ const path = require('path') const fs = require('fs') fs.open = promisify(fs.open) fs.close = promisify(fs.close) -const { indexer, readRaw } = require('../') -const { makeData } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { indexer, readRaw } = require('../')(multiformats) +const { makeData } = require('./fixture-data') describe('Raw', () => { const expectedRoots = [ @@ -44,12 +44,12 @@ describe('Raw', () => { } it('index a CAR (stream)', async () => { - const index = await indexer(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const index = await indexer(fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyIndex(index) }) it('index a CAR (file)', async () => { - const index = await indexer(multiformats, path.join(__dirname, 'go.car')) + const index = await indexer(path.join(__dirname, 'go.car')) await verifyIndex(index) }) @@ -65,7 +65,10 @@ describe('Raw', () => { const cid = block.cid const index = expectedCids.indexOf(cid.toString()) assert.ok(index >= 0, 'got expected block') - assert.strictEqual(expectedBlocks[index].binary.toString('hex'), block.binary.toString('hex'), 'got expected block content') + assert.strictEqual( + Buffer.from(expectedBlocks[index].binary).toString('hex'), + Buffer.from(block.binary).toString('hex'), + 'got expected block content') expectedBlocks.splice(index, 1) expectedCids.splice(index, 1) } @@ -85,7 +88,7 @@ describe('Raw', () => { }) it('errors', async () => { - await assert.rejects(indexer(multiformats), { + await assert.rejects(indexer(), { name: 'TypeError', message: 'indexer() requires a file path or a ReadableStream' }) diff --git a/test/test-readbuffer.js b/test/test-readbuffer.js index 34a7ab3..7d48c81 100644 --- a/test/test-readbuffer.js +++ b/test/test-readbuffer.js @@ -1,11 +1,11 @@ /* eslint-env mocha */ const assert = require('assert') -const { readBuffer } = require('../') -const { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { readBuffer } = require('../')(multiformats) +const { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') if (!assert.rejects) { // browser polyfill is incomplete @@ -28,7 +28,7 @@ describe('Read Buffer', () => { }) it('read existing', async () => { - const carDs = await readBuffer(multiformats, car) + const carDs = await readBuffer(car) await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -38,14 +38,14 @@ describe('Read Buffer', () => { it('verify only roots', async () => { // tests deferred open for getRoots() - const carDs = await readBuffer(multiformats, car) + const carDs = await readBuffer(car) await verifyRoots(carDs) await carDs.close() }) // when we instantiate from a Buffer, CarDatastore should be immutable it('immutable', async () => { - const carDs = await readBuffer(multiformats, car) + const carDs = await readBuffer(car) await assert.rejects(carDs.put(acid, Buffer.from('blip'))) await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) await assert.rejects(carDs.setRoots(acid)) diff --git a/test/test-readfile-writestream.js b/test/test-readfile-writestream.js index eee2490..27a3b67 100644 --- a/test/test-readfile-writestream.js +++ b/test/test-readfile-writestream.js @@ -3,11 +3,11 @@ const assert = require('assert') const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) -const { writeStream, readFileComplete } = require('../') -const { makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { writeStream, readFileComplete } = require('../')(multiformats) +const { makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') let rawBlocks let pbBlocks @@ -24,7 +24,7 @@ describe('Read File & Write Stream', () => { }) it('writeStream', async () => { - const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) + const carDs = await writeStream(fs.createWriteStream('./test.car')) await carDs.setRoots([cborBlocks[0].cid, cborBlocks[1].cid]) for (const block of rawBlocks.slice(0, 3).concat(pbBlocks).concat(cborBlocks)) { // add all but raw zzzz @@ -34,7 +34,7 @@ describe('Read File & Write Stream', () => { }) it('readFileComplete', async () => { - const carDs = await readFileComplete(multiformats, './test.car') + const carDs = await readFileComplete('./test.car') await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -48,7 +48,7 @@ describe('Read File & Write Stream', () => { blocks.push([block.cid, block.binary]) } - const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) + const carDs = await writeStream(fs.createWriteStream('./test.car')) carDs.setRoots(roots) for (const [cid, encoded] of blocks) { carDs.put(cid, encoded) @@ -57,7 +57,7 @@ describe('Read File & Write Stream', () => { }) it('readFileComplete post no-await write', async () => { - const carDs = await readFileComplete(multiformats, './test.car') + const carDs = await readFileComplete('./test.car') await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -65,7 +65,7 @@ describe('Read File & Write Stream', () => { }) it('writeStream errors', async () => { - const carDs = await writeStream(multiformats, fs.createWriteStream('./test.car')) + const carDs = await writeStream(fs.createWriteStream('./test.car')) await carDs.put(cborBlocks[0].cid, cborBlocks[0].binary) await assert.rejects(carDs.delete(cborBlocks[0].cid)) await carDs.close() diff --git a/test/test-readfile.js b/test/test-readfile.js index eb92c3d..1d46d6d 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -2,11 +2,11 @@ const path = require('path') const assert = require('assert') -const { readFileComplete } = require('../car') -const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { readFileComplete } = require('../car')(multiformats) +const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') let rawBlocks @@ -17,7 +17,7 @@ describe('Read File', () => { }) it('read existing', async () => { - const carDs = await readFileComplete(multiformats, path.join(__dirname, 'go.car')) + const carDs = await readFileComplete(path.join(__dirname, 'go.car')) await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -27,14 +27,14 @@ describe('Read File', () => { it('verify only roots', async () => { // tests deferred open for getRoots() - const carDs = await readFileComplete(multiformats, path.join(__dirname, 'go.car')) + const carDs = await readFileComplete(path.join(__dirname, 'go.car')) await verifyRoots(carDs) await carDs.close() }) // when we instantiate from a File, CarDatastore should be immutable it('immutable', async () => { - const carDs = await readFileComplete(multiformats, path.join(__dirname, 'go.car')) + const carDs = await readFileComplete(path.join(__dirname, 'go.car')) await assert.rejects(carDs.put(acid, Buffer.from('blip'))) await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) await assert.rejects(carDs.setRoots(acid)) diff --git a/test/test-readstream.js b/test/test-readstream.js index dc86071..3a111f3 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -3,11 +3,11 @@ const fs = require('fs') const path = require('path') const assert = require('assert') -const { readStreaming } = require('../car') -const { acid, makeData, compareBlockData, verifyRoots } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { readStreaming } = require('../')(multiformats) +const { acid, makeData, compareBlockData, verifyRoots } = require('./fixture-data') describe('Read Stream', () => { let allBlocks @@ -23,7 +23,7 @@ describe('Read Stream', () => { cids.push(block.cid.toString()) } - const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) let i = 0 for await (const entry of carDs.query()) { @@ -51,7 +51,7 @@ describe('Read Stream', () => { cids.push(await block.cid.toString()) } - const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) // test before await verifyRoots(carDs) @@ -71,13 +71,13 @@ describe('Read Stream', () => { }) it('verify only roots', async () => { - const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyRoots(carDs) await carDs.close() }) it('errors & immutability', async () => { - const carDs = await readStreaming(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) await assert.rejects(carDs.has(allBlocks[0].cid)) await assert.rejects(carDs.get(allBlocks[0].cid)) diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index f57322c..c6e3a40 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -3,11 +3,11 @@ const fs = require('fs') const path = require('path') const assert = require('assert') -const { readStreamComplete } = require('../car') -const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) +const { readStreamComplete } = require('../')(multiformats) +const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') let rawBlocks @@ -18,7 +18,7 @@ describe('Read Stream', () => { }) it('read existing', async () => { - const carDs = await readStreamComplete(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) @@ -28,14 +28,14 @@ describe('Read Stream', () => { it('verify only roots', async () => { // tests deferred open for getRoots() - const carDs = await readStreamComplete(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) await verifyRoots(carDs) await carDs.close() }) // when we instantiate from a Stream, CarDatastore should be immutable it('immutable', async () => { - const carDs = await readStreamComplete(multiformats, fs.createReadStream(path.join(__dirname, 'go.car'))) + const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) await assert.rejects(carDs.put(acid, Buffer.from('blip'))) await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) await assert.rejects(carDs.setRoots(acid)) From d03c683e40dc2dfb2c9e5cdd8c39797688298701 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Thu, 14 May 2020 22:12:59 +1000 Subject: [PATCH 05/23] cleanup, make work in browser --- car-browser.js | 11 +++++++++++ car.js | 7 +++---- lib/coding-browser.js | 4 ++-- package.json | 1 + test/fixture-data.js | 7 ++++--- test/test-errors.js | 2 +- test/test-readfile.js | 2 +- test/test-readstream.js | 2 +- test/test-readstreamcomplete.js | 2 +- 9 files changed, 25 insertions(+), 13 deletions(-) diff --git a/car-browser.js b/car-browser.js index 3771ab9..dc404de 100644 --- a/car-browser.js +++ b/car-browser.js @@ -29,4 +29,15 @@ async function readBuffer (multiformats, buffer) { return new CarDatastore(multiformats, reader, writer) } +module.exports = (multiformats) => { + function wrap (fn) { + return function (...args) { + return fn(multiformats, ...args) + } + } + + return { + readBuffer: wrap(readBuffer) + } +} module.exports.readBuffer = readBuffer diff --git a/car.js b/car.js index 09c8550..cae657f 100644 --- a/car.js +++ b/car.js @@ -2,7 +2,7 @@ const { Reader, NoWriter } = require('./lib/reader-writer-iface') const { createStreamCompleteReader, createStreamingReader, createFileReader } = require('./lib/reader') const createStreamWriter = require('./lib/writer-stream') const CarDatastore = require('./datastore') -const { readBuffer } = require('./car-browser') +const browser = require('./car-browser') const { indexer, readRaw } = require('./lib/raw') /** @@ -192,8 +192,7 @@ module.exports = (multiformats) => { } } - return { - readBuffer: wrap(readBuffer), + return Object.assign(browser(multiformats), { readFileComplete: wrap(readFileComplete), readStreamComplete: wrap(readStreamComplete), readStreaming: wrap(readStreaming), @@ -201,5 +200,5 @@ module.exports = (multiformats) => { indexer: wrap(indexer), readRaw, completeGraph - } + }) } diff --git a/lib/coding-browser.js b/lib/coding-browser.js index f89dbac..25a7dc9 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -64,7 +64,7 @@ async function readCid (multiformats, reader) { // cidv0 32-byte sha2-256 const bytes = await reader.exactly(34) reader.seek(34) - return new multiformats.CID(0, 0x70, new Uint8Array(bytes)) + return new multiformats.CID(0, 0x70, Uint8Array.from(bytes)) } const version = await readVarint(reader) @@ -73,7 +73,7 @@ async function readCid (multiformats, reader) { } const codec = await readVarint(reader) const multihash = await readMultihash(reader) - return new multiformats.CID(version, codec, new Uint8Array(multihash)) + return new multiformats.CID(version, codec, Uint8Array.from(multihash)) } async function readBlockHead (multiformats, reader) { diff --git a/package.json b/package.json index e211ec4..3932ef6 100644 --- a/package.json +++ b/package.json @@ -32,6 +32,7 @@ "homepage": "https://github.com/rvagg/js-datastore-car#readme", "devDependencies": { "@ipld/dag-cbor": "github:ipld/js-dag-cbor#multiformats", + "assert": "^2.0.0", "bl": "^4.0.2", "cids": "^0.8.0", "garbage": "0.0.0", diff --git a/test/fixture-data.js b/test/fixture-data.js index f7c7b7e..949e224 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -174,9 +174,10 @@ async function verifyBlocks (carDs, modified) { } async function verifyRoots (carDs, modified) { - // const expected = cborBlocks[modified ? 1 : 2].cid - const expected = [cborBlocks[0].cid, cborBlocks[1].cid] - assert.deepStrictEqual(await carDs.getRoots(), expected) + // using toString() for now, backing buffers in Uint8Arrays are getting in the way + // in the browser + const expected = [cborBlocks[0].cid.toString(), cborBlocks[1].cid.toString()] + assert.deepStrictEqual((await carDs.getRoots()).map((c) => c.toString()), expected) } module.exports.makeData = makeData diff --git a/test/test-errors.js b/test/test-errors.js index 4b5f84b..05b1ff1 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ +const assert = require('assert') const fs = require('fs').promises const path = require('path') -const assert = require('assert') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) const { readBuffer, readFileComplete, writeStream } = require('../')(multiformats) diff --git a/test/test-readfile.js b/test/test-readfile.js index 1d46d6d..687f535 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ -const path = require('path') const assert = require('assert') +const path = require('path') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) diff --git a/test/test-readstream.js b/test/test-readstream.js index 3a111f3..9ae22af 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ +const assert = require('assert') const fs = require('fs') const path = require('path') -const assert = require('assert') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index c6e3a40..a77859e 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ +const assert = require('assert') const fs = require('fs') const path = require('path') -const assert = require('assert') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) multiformats.multibase.add(require('multiformats/bases/base58')) From 3d359557e1d051815a62434e3a1de3bda1660e4e Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 15 May 2020 13:30:42 +1000 Subject: [PATCH 06/23] docs & cleanup example --- README.md | 23 +++++++++++++---------- datastore.js | 4 ++-- example.js | 13 ++++++++----- lib/coding-browser.js | 2 +- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 8f63a72..1fa978b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# datastore-car (js-datastore-car) [![Build Status](https://github.com/rvagg/js-datastore-car/workflows/CI/badge.svg)](https://github.com/rvagg/js-datastore-car/actions?workflow=CI) +# datastore-car (js-datastore-car) [![Build Status](https://github.com/ipld/js-datastore-car/workflows/CI/badge.svg)](https://github.com/ipld/js-datastore-car/actions?workflow=CI) [![NPM](https://nodei.co/npm/datastore-car.svg)](https://nodei.co/npm/datastore-car/) @@ -10,12 +10,15 @@ The interface wraps a [Datastore](https://github.com/ipfs/interface-datastore), ```js const fs = require('fs') -const CarDatastore = require('datastore-car') -const Block = require('@ipld/block') +const multiformats = require('multiformats/basics') +// dag-cbor is required for the CAR root block +multiformats.add(require('@ipld/dag-cbor')) +const CarDatastore = require('datastore-car')(multiformats) async function example () { - const block = Block.encoder(Buffer.from('random meaningless bytes'), 'raw') - const cid = await block.cid() + const binary = Buffer.from('random meaningless bytes') + const mh = await multiformats.multihash.hash(binary, 'sha2-256') + const cid = new multiformats.CID(1, multiformats.get('raw').code, mh) const outStream = fs.createWriteStream('example.car') const writeDs = await CarDatastore.writeStream(outStream) @@ -23,7 +26,7 @@ async function example () { // set the header with a single root await writeDs.setRoots(cid) // store a new block, creates a new file entry in the CAR archive - await writeDs.put(cid, await block.encode()) + await writeDs.put(cid, binary) await writeDs.close() const inStream = fs.createReadStream('example.car') @@ -34,7 +37,7 @@ async function example () { // read the list of roots from the header const roots = await readDs.getRoots() - // retrieve a block, as a UInt8Array, reading from the CAR archive + // retrieve a block, as a UInt8Array, reading from the ZIP archive const got = await readDs.get(roots[0]) // also possible: for await (const {key, data} = readDs.query()) { ... } @@ -238,8 +241,8 @@ a CAR file. ### `class CarDatastore` CarDatastore is a class to manage reading from, and writing to a CAR archives -using [CID](https://github.com/multiformats/js-cid)s as keys and file names -in the CAR and binary block data as the file contents. +using [CID](https://github.com/multiformats/js-multiformats)s as keys and +file names in the CAR and binary block data as the file contents. ### `async CarDatastore#get(key)` @@ -450,7 +453,7 @@ Read a block directly from a CAR file given an block index provided by * **`blockIndex`** _(`Object`)_: an index object of the style provided by `CarDatastore.indexer()` (`{ cid, offset, length }`). -**Return value** _(`Block`)_: an IPLD [Block](https://ghub.io/@ipld/block) object. +**Return value** _(`object`)_: an IPLD block of the form `{ cid, binary }`. ## License and Copyright diff --git a/datastore.js b/datastore.js index c4b6ce8..9825b9c 100644 --- a/datastore.js +++ b/datastore.js @@ -3,8 +3,8 @@ const { toKey } = require('./lib/util') /** * CarDatastore is a class to manage reading from, and writing to a CAR archives - * using [CID](https://github.com/multiformats/js-cid)s as keys and file names - * in the CAR and binary block data as the file contents. + * using [CID](https://github.com/multiformats/js-multiformats)s as keys and + * file names in the CAR and binary block data as the file contents. * * @class */ diff --git a/example.js b/example.js index 59ba82b..ac131c6 100644 --- a/example.js +++ b/example.js @@ -1,10 +1,13 @@ const fs = require('fs') -const CarDatastore = require('./') -const Block = require('@ipld/block') +const multiformats = require('multiformats/basics') +// dag-cbor is required for the CAR root block +multiformats.add(require('@ipld/dag-cbor')) +const CarDatastore = require('./')(multiformats) async function example () { - const block = Block.encoder(Buffer.from('random meaningless bytes'), 'raw') - const cid = await block.cid() + const binary = Buffer.from('random meaningless bytes') + const mh = await multiformats.multihash.hash(binary, 'sha2-256') + const cid = new multiformats.CID(1, multiformats.get('raw').code, mh) const outStream = fs.createWriteStream('example.car') const writeDs = await CarDatastore.writeStream(outStream) @@ -12,7 +15,7 @@ async function example () { // set the header with a single root await writeDs.setRoots(cid) // store a new block, creates a new file entry in the CAR archive - await writeDs.put(cid, await block.encode()) + await writeDs.put(cid, binary) await writeDs.close() const inStream = fs.createReadStream('example.car') diff --git a/lib/coding-browser.js b/lib/coding-browser.js index 25a7dc9..11daf1f 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -77,7 +77,7 @@ async function readCid (multiformats, reader) { } async function readBlockHead (multiformats, reader) { - // length includes a CID + Block, where CID has a variable length + // length includes a CID + Binary, where CID has a variable length // we have to deal with const totalLength = await readVarint(reader) const start = reader.pos From 1bbb212e2ca53f4726527c69a388229bc67a3845 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 15 May 2020 15:03:44 +1000 Subject: [PATCH 07/23] doc updates, change indexer() and readRaw() to match go-car-util output (BREAKING) --- README.md | 49 ++++++++++++++++------------------ lib/coding-browser.js | 18 +++++++------ lib/raw.js | 61 ++++++++++++++++++++----------------------- test/test-raw.js | 20 +++++++------- 4 files changed, 72 insertions(+), 76 deletions(-) diff --git a/README.md b/README.md index 1fa978b..429dc35 100644 --- a/README.md +++ b/README.md @@ -377,15 +377,19 @@ used to read individual blocks directly from the car (using `CarDatastore.readRaw()`). ```js -const { indexer } = require('datastore-car') +// full multiformats omitted, you'll need codecs, bases and hashes that +// appear in your CAR files if you want full information +const multiformats = ... +const { indexer } = require('datastore-car')(multiformats) async function run () { + const cidStr = (cid) => `${multiformats.get(cid.code).name}:${cid.toString()}` const index = await indexer('big.car') - index.roots = index.roots.map((cid) => cid.toString()) + index.roots = index.roots.map(cidStr) console.log('roots:', index.roots) for await (const blockIndex of index.iterator) { - blockIndex.cid = blockIndex.cid.toString() - console.log('block:', blockIndex) + blockIndex.cid = cidStr(cid) + console.log(JSON.toString(blockIndex)) } } @@ -399,24 +403,17 @@ Might output something like: ``` roots: [ - 'bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', - 'bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm' + 'dag-cbor:bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', + 'dag-cbor:bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm' ] -block: { - cid: 'bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', - length: 55, - offset: 137 -} -block: { - cid: 'QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d', - length: 97, - offset: 228 -} -block: { - cid: 'bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke', - length: 4, - offset: 362 -} +{"cid":"dag-cbor:bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm","length":92,"blockLength":55,"offset":100,"blockOffset":137} +{"cid":"dag-pb:QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d","length":133,"blockLength":97,"offset":192,"blockOffset":228} +{"cid":"raw:bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke","length":41,"blockLength":4,"offset":325,"blockOffset":362} +{"cid":"dag-pb:QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys","length":130,"blockLength":94,"offset":366,"blockOffset":402} +{"cid":"raw:bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4","length":41,"blockLength":4,"offset":496,"blockOffset":533} +{"cid":"dag-pb:QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT","length":82,"blockLength":47,"offset":537,"blockOffset":572} +{"cid":"raw:bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq","length":41,"blockLength":4,"offset":619,"blockOffset":656} +{"cid":"dag-cbor:bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm","length":55,"blockLength":18,"offset":660,"blockOffset":697} ... ``` @@ -435,16 +432,16 @@ read into memory. **Return value** _(`Object., iterator:AsyncIterator>`)_: an object containing a `roots` array of CIDs and an `iterator` AsyncIterator that will yield - Objects of the form `{ cid:CID, offset:number, length:number }` indicating - the CID of the block located at start=`offset` with a length of `number` in - the CAR archive provided. + Objects of the form `{ cid:CID, offset:number, length:number, byteOffset:number, byteLength:number }` + indicating the CID of the block located at `blockOffset` with a length of + `blockLength` in the CAR archive provided. ### `async CarDatastore.readRaw(fd, blockIndex)` Read a block directly from a CAR file given an block index provided by -`CarDatastore.indexer()` (i.e. an object of the form: -`{ cid:CID, offset:number, length:number }`). +`CarDatastore.indexer()` (i.e. an object with the minimal form: +`{ cid:CID, blockOffset:number, blockLength:number }`). **Parameters:** diff --git a/lib/coding-browser.js b/lib/coding-browser.js index 11daf1f..b30b98b 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -79,25 +79,27 @@ async function readCid (multiformats, reader) { async function readBlockHead (multiformats, reader) { // length includes a CID + Binary, where CID has a variable length // we have to deal with - const totalLength = await readVarint(reader) const start = reader.pos + const length = await readVarint(reader) + (reader.pos - start) const cid = await readCid(multiformats, reader) - const length = totalLength - (reader.pos - start) // subtract CID length + const blockLength = length - (reader.pos - start) // subtract CID length - return { cid, length } + return { cid, length, blockLength } } async function readBlock (multiformats, reader) { - const { cid, length } = await readBlockHead(multiformats, reader) - const binary = await reader.exactly(length) - reader.seek(length) + const { cid, blockLength } = await readBlockHead(multiformats, reader) + const binary = await reader.exactly(blockLength) + reader.seek(blockLength) return { cid, binary } } async function readBlockIndex (multiformats, reader) { + const offset = reader.pos const head = await readBlockHead(multiformats, reader) - head.offset = reader.pos - reader.seek(head.length) + head.offset = offset + head.blockOffset = reader.pos + reader.seek(head.blockLength) return head } diff --git a/lib/raw.js b/lib/raw.js index 0667214..b012343 100644 --- a/lib/raw.js +++ b/lib/raw.js @@ -16,15 +16,19 @@ const { StreamDecoder, FileDecoder } = require('./coding') * `CarDatastore.readRaw()`). * * ```js - * const { indexer } = require('datastore-car') + * // full multiformats omitted, you'll need codecs, bases and hashes that + * // appear in your CAR files if you want full information + * const multiformats = ... + * const { indexer } = require('datastore-car')(multiformats) * * async function run () { + * const cidStr = (cid) => `${multiformats.get(cid.code).name}:${cid.toString()}` * const index = await indexer('big.car') - * index.roots = index.roots.map((cid) => cid.toString()) + * index.roots = index.roots.map(cidStr) * console.log('roots:', index.roots) * for await (const blockIndex of index.iterator) { - * blockIndex.cid = blockIndex.cid.toString() - * console.log('block:', blockIndex) + * blockIndex.cid = cidStr(cid) + * console.log(JSON.toString(blockIndex)) * } * } * @@ -38,24 +42,17 @@ const { StreamDecoder, FileDecoder } = require('./coding') * * ``` * roots: [ - * 'bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', - * 'bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm' + * 'dag-cbor:bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', + * 'dag-cbor:bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm' * ] - * block: { - * cid: 'bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', - * length: 55, - * offset: 137 - * } - * block: { - * cid: 'QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d', - * length: 97, - * offset: 228 - * } - * block: { - * cid: 'bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke', - * length: 4, - * offset: 362 - * } + * {"cid":"dag-cbor:bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm","length":92,"blockLength":55,"offset":100,"blockOffset":137} + * {"cid":"dag-pb:QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d","length":133,"blockLength":97,"offset":192,"blockOffset":228} + * {"cid":"raw:bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke","length":41,"blockLength":4,"offset":325,"blockOffset":362} + * {"cid":"dag-pb:QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys","length":130,"blockLength":94,"offset":366,"blockOffset":402} + * {"cid":"raw:bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4","length":41,"blockLength":4,"offset":496,"blockOffset":533} + * {"cid":"dag-pb:QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT","length":82,"blockLength":47,"offset":537,"blockOffset":572} + * {"cid":"raw:bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq","length":41,"blockLength":4,"offset":619,"blockOffset":656} + * {"cid":"dag-cbor:bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm","length":55,"blockLength":18,"offset":660,"blockOffset":697} * ... * ``` * @@ -74,9 +71,9 @@ const { StreamDecoder, FileDecoder } = require('./coding') * or a ReadableStream that provides CAR archive data. * @returns {Object} an object containing a * `roots` array of CIDs and an `iterator` AsyncIterator that will yield - * Objects of the form `{ cid:CID, offset:number, length:number }` indicating - * the CID of the block located at start=`offset` with a length of `number` in - * the CAR archive provided. + * Objects of the form `{ cid:CID, offset:number, length:number, byteOffset:number, byteLength:number }` + * indicating the CID of the block located at `blockOffset` with a length of + * `blockLength` in the CAR archive provided. */ async function indexer (multiformats, inp) { let decoder @@ -96,8 +93,8 @@ async function indexer (multiformats, inp) { * @name CarDatastore.readRaw * @description * Read a block directly from a CAR file given an block index provided by - * `CarDatastore.indexer()` (i.e. an object of the form: - * `{ cid:CID, offset:number, length:number }`). + * `CarDatastore.indexer()` (i.e. an object with the minimal form: + * `{ cid:CID, blockOffset:number, blockLength:number }`). * @function * @memberof CarDatastore * @static @@ -109,18 +106,18 @@ async function indexer (multiformats, inp) { * @returns {object} an IPLD block of the form `{ cid, binary }`. */ async function readRaw (fd, blockIndex) { - const cid = blockIndex.cid - const binary = Buffer.alloc(blockIndex.length) + const { cid, blockLength, blockOffset } = blockIndex + const binary = Buffer.alloc(blockLength) let read if (typeof fd === 'number') { - read = (await fs.read(fd, binary, 0, blockIndex.length, blockIndex.offset)).bytesRead + read = (await fs.read(fd, binary, 0, blockLength, blockOffset)).bytesRead } else if (typeof fd === 'object' && typeof fd.read === 'function') { - read = (await fd.read(binary, 0, blockIndex.length, blockIndex.offset)).bytesRead + read = (await fd.read(binary, 0, blockLength, blockOffset)).bytesRead } else { throw new TypeError('Bad fd') } - if (read !== blockIndex.length) { - throw new Error(`Failed to read entire block (${read} instead of ${blockIndex.length})`) + if (read !== blockLength) { + throw new Error(`Failed to read entire block (${read} instead of ${blockLength})`) } return { cid, binary } } diff --git a/test/test-raw.js b/test/test-raw.js index d5599c3..177af5b 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -18,14 +18,14 @@ describe('Raw', () => { new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm') ] const expectedIndex = [ - { cid: new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), length: 55, offset: 137 }, - { cid: new multiformats.CID('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), length: 97, offset: 228 }, - { cid: new multiformats.CID('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), length: 4, offset: 362 }, - { cid: new multiformats.CID('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), length: 94, offset: 402 }, - { cid: new multiformats.CID('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), length: 4, offset: 533 }, - { cid: new multiformats.CID('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), length: 47, offset: 572 }, - { cid: new multiformats.CID('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), length: 4, offset: 656 }, - { cid: new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), length: 18, offset: 697 } + { cid: new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), offset: 100, length: 92, blockOffset: 137, blockLength: 55 }, + { cid: new multiformats.CID('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), offset: 192, length: 133, blockOffset: 228, blockLength: 97 }, + { cid: new multiformats.CID('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), offset: 325, length: 41, blockOffset: 362, blockLength: 4 }, + { cid: new multiformats.CID('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), offset: 366, length: 130, blockOffset: 402, blockLength: 94 }, + { cid: new multiformats.CID('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), offset: 496, length: 41, blockOffset: 533, blockLength: 4 }, + { cid: new multiformats.CID('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), offset: 537, length: 82, blockOffset: 572, blockLength: 47 }, + { cid: new multiformats.CID('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), offset: 619, length: 41, blockOffset: 656, blockLength: 4 }, + { cid: new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), offset: 660, length: 55, blockOffset: 697, blockLength: 18 } ] let allBlocksFlattened @@ -99,11 +99,11 @@ describe('Raw', () => { }) const badBlock = Object.assign({}, expectedIndex[expectedIndex.length - 1]) - badBlock.length += 10 + badBlock.blockLength += 10 const fd = await fs.open(path.join(__dirname, 'go.car')) await assert.rejects(readRaw(fd, badBlock), { name: 'Error', - message: `Failed to read entire block (${badBlock.length - 10} instead of ${badBlock.length})` + message: `Failed to read entire block (${badBlock.blockLength - 10} instead of ${badBlock.blockLength})` }) await fs.close(fd) }) From e66118a4b00043bcb8e1afd15892824b2b092e99 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 15 May 2020 15:42:58 +1000 Subject: [PATCH 08/23] fix indexer() skipping chunks on a stream --- lib/coding.js | 9 ++++++++- test/test-large.js | 37 +++++++++++++++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/lib/coding.js b/lib/coding.js index 51f12f2..6648b27 100644 --- a/lib/coding.js +++ b/lib/coding.js @@ -22,8 +22,15 @@ function chunkReader (readChunk, closer) { if (chunk.length === 0) { break } + if (have < 0) { // because of a seek() + // istanbul ignore next toohard to test the else + if (chunk.length > have) { + bufa.push(chunk.slice(-have)) + } // else discard + } else { + bufa.push(chunk) + } have += chunk.length - bufa.push(chunk) } currentChunk = Buffer.concat(bufa) offset = 0 diff --git a/test/test-large.js b/test/test-large.js index 29ca854..63405ad 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -4,17 +4,23 @@ const assert = require('assert') const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) const garbage = require('garbage') +const varint = require('varint') const multiformats = require('multiformats/basics') multiformats.add(require('@ipld/dag-cbor')) -const { writeStream, readFileComplete, readStreaming } = require('../')(multiformats) +const { writeStream, readFileComplete, readStreaming, indexer } = require('../')(multiformats) describe('Large CAR', () => { const objects = [] const cids = [] + const expectedIndex = [] it('create, no roots', async () => { const carDs = await writeStream(fs.createWriteStream('./test.car')) + // offset starts at header length + let offset = multiformats.encode({ version: 1, roots: [] }, 'dag-cbor').length + offset += varint.encode(offset.length).length + for (let i = 0; i < 500; i++) { const obj = garbage.object(1000) objects.push(obj) @@ -22,10 +28,37 @@ describe('Large CAR', () => { const mh = await multiformats.multihash.hash(binary, 'sha2-256') const cid = new multiformats.CID(1, multiformats.get('dag-cbor').code, mh) cids.push(cid.toString()) + const blockLength = binary.length + let length = cid.buffer.length + blockLength + const lengthLength = varint.encode(length).length + length += lengthLength + const blockOffset = offset + lengthLength + cid.buffer.length + expectedIndex.push({ cid, offset, length, blockOffset, blockLength }) + offset += length await carDs.put(cid, binary) } - return carDs.close() + await carDs.close() + }) + + it('indexer stream', async () => { + const index = await indexer(fs.createReadStream('./test.car')) + assert.deepStrictEqual(index.roots, []) + let i = 0 + for await (const blockIndex of index.iterator) { + assert.deepStrictEqual(blockIndex, expectedIndex[i]) + i++ + } + }) + + it('indexer file', async () => { + const index = await indexer('./test.car') + assert.deepStrictEqual(index.roots, []) + let i = 0 + for await (const blockIndex of index.iterator) { + assert.deepStrictEqual(blockIndex, expectedIndex[i]) + i++ + } }) it('readFileComplete', async () => { From e09d42dd166b22131331b9c01e544f4080bf5c62 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Sat, 16 May 2020 14:53:51 +1000 Subject: [PATCH 09/23] turn cidv0 magic numbers into constants --- lib/coding-browser.js | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/coding-browser.js b/lib/coding-browser.js index b30b98b..738286b 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -1,5 +1,10 @@ const varint = require('varint') const { isBlock, isCID } = require('./util') +const CIDV0_BYTES = { + SHA2_256: 0x12, + LENGTH: 0x20, + DAG_PB: 0x70 +} function bufferReader (buf) { let pos = 0 @@ -60,11 +65,11 @@ async function readMultihash (reader) { async function readCid (multiformats, reader) { const first = await reader.exactly(2) - if (first[0] === 0x12 && first[1] === 0x20) { + if (first[0] === CIDV0_BYTES.SHA2_256 && first[1] === CIDV0_BYTES.LENGTH) { // cidv0 32-byte sha2-256 const bytes = await reader.exactly(34) reader.seek(34) - return new multiformats.CID(0, 0x70, Uint8Array.from(bytes)) + return new multiformats.CID(0, CIDV0_BYTES.DAG_PB, Uint8Array.from(bytes)) } const version = await readVarint(reader) From bf95c2d91258d5433c33ab4eae6f337e650b55d1 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 22 May 2020 15:48:27 +1000 Subject: [PATCH 10/23] feat: add readFileIndexed for large files, random access, pre-indexed --- README.md | 38 +++++++++++++-- car.js | 43 +++++++++++++++-- lib/raw.js | 2 +- lib/reader.js | 71 ++++++++++++++++++++++++++- test/test-readindexed.js | 102 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 248 insertions(+), 8 deletions(-) create mode 100644 test/test-readindexed.js diff --git a/README.md b/README.md index 429dc35..ea48fd9 100644 --- a/README.md +++ b/README.md @@ -66,6 +66,7 @@ In this example, the `writeStream()` create-mode is used to generate the CAR fil * **[`CarDatastore.readFileComplete(file)`](#CarDatastore__readFileComplete)**: read a CAR archive directly from a file. Does not support mutation operations, only reads. This mode is not efficient for large data sets but does support `get()` and `has()` operations since it caches the entire archive in memory. This mode is _not available_ in a browser environment. * **[`CarDatastore.readStreamComplete(stream)`](#CarDatastore__readStreamComplete)**: read a CAR archive directly from a stream. Does not support mutation operations, only reads. This mode is not efficient for large data sets but does support `get()` and `has()` operations since it caches the entire archive in memory. This mode is _not available_ in a browser environment. * **[`CarDatastore.readStreaming(stream)`](#CarDatastore__readStreaming)**: read a CAR archive directly from a stream. Does not support mutation operations, and only supports iterative reads via `query()` (i.e. no `get()` and `has()`). This mode is very efficient for large data sets. This mode is _not available_ in a browser environment. +* **[`async CarDatastore.readFileIndexed(stream)`](#CarDatastore__readFileIndexed)**: read a CAR archive from a local file, index its contents and use that index to support random access reads (`has()`, `get()` and `query()`) without fitting the entire contents in memory as `readFileComplete()` does. Uses more memory than `readStreaming()` and less than `readFileComplete()`. Will be slower to initialize than `readStreaming()` but suitable where random access reads are required from a large file. * **[`CarDatastore.writeStream(stream)`](#CarDatastore__writeStream)**: write a CAR archive to a stream (e.g. `fs.createWriteStream(file)`). Does not support read operations, only writes, and the writes are append-only (i.e. no `delete()`). However, this mode is very efficient for dumping large data sets, with no caching and streaming writes. This mode is _not available_ in a browser environment. Other create-modes may be supported in the future, such as writing to a Buffer (although this is already possible if you couple `writeStream()` with a [`BufferListStream`](https://ghub.io/bl)) or a read/write mode such as datastore-zipcar makes available. @@ -78,6 +79,7 @@ Other create-modes may be supported in the future, such as writing to a Buffer ( * [`async CarDatastore.readFileComplete(file)`](#CarDatastore__readFileComplete) * [`async CarDatastore.readStreamComplete(stream)`](#CarDatastore__readStreamComplete) * [`async CarDatastore.readStreaming(stream)`](#CarDatastore__readStreaming) + * [`async CarDatastore.readFileIndexed(stream)`](#CarDatastore__readFileIndexed) * [`async CarDatastore.writeStream(stream)`](#CarDatastore__writeStream) * [`async CarDatastore.completeGraph(root, get, car[, concurrency])`](#CarDatastore__completeGraph) * [`class CarDatastore`](#CarDatastore) @@ -118,7 +120,7 @@ environment. Read a CAR archive from a file and return a CarDatastore. The CarDatastore returned will _only_ support read operations: `getRoots()`, `get()`, `has()` -and `query()`. Caching makes `get()` and `has()` possible as the entire +and `query()`. Caching makes `get()` and `has()`. This is possible as the entire file is read and decoded before the CarDatastore is returned. mutation operations (`put()`, `delete()` and `setRoots()`) are not available as there is no ability to modify the archive. @@ -148,7 +150,7 @@ This create-mode is not available in the browser environment. Read a CAR archive as a CarDataStore from a ReadableStream. The CarDatastore returned will _only_ support read operations: `getRoots()`, `get()`, `has()` -and `query()`. Caching makes `get()` and `has()` possible as the entire +and `query()`. Caching makes `get()` and `has()`. This is possible as the entire stream is read and decoded before the CarDatastore is returned. Mutation operations (`put()`, `delete()` and `setRoots()`) are not available as there is no ability to modify the archive. @@ -194,6 +196,36 @@ This create-mode is not available in the browser environment. **Return value** _(`CarDatastore`)_: a read-only CarDatastore. + +### `async CarDatastore.readFileIndexed(stream)` + +Read a CAR archive as a CarDataStore from a local file. The CarDatastore +returned will _only_ support read operations: `getRoots()`, `get()`, `has()` +and `query()`. Caching makes `get()` and `has()`. This is possible as the entire +stream is read and _indexed_ before the CarDatastore is returned. Mutation +operations (`put()`, `delete()` and `setRoots()`) are not available as there +is no ability to modify the archive. + +The indexing operation uses [`indexer`](#indexer) to catalogue the contents of the +CAR and store a mapping of CID to byte locations for each entry. This method +of parsing is not as memory intensive as [`readStreamComplete`](#readStreamComplete) as only +the index is stored in memory. When blocks are read, the index tells the +reader where to fetch the block from within the CAR file. + +This mode is suitable for large files where random-access operations are +required. Where a full sequential read is only required, use +[`createReadStreaming`](#createReadStreaming) which consumes the file in a single pass with no +memory used for indexing. + +This create-mode is not available in the browser environment. + +**Parameters:** + +* **`stream`** _(`ReadableStream`)_: a ReadableStream that provides an entire CAR + archive as a binary stream. + +**Return value** _(`CarDatastore`)_: a read-only CarDatastore. + ### `async CarDatastore.writeStream(stream)` @@ -388,7 +420,7 @@ async function run () { index.roots = index.roots.map(cidStr) console.log('roots:', index.roots) for await (const blockIndex of index.iterator) { - blockIndex.cid = cidStr(cid) + blockIndex.cid = cidStr(blockIndex.cid) console.log(JSON.toString(blockIndex)) } } diff --git a/car.js b/car.js index cae657f..1c19cf3 100644 --- a/car.js +++ b/car.js @@ -1,5 +1,5 @@ const { Reader, NoWriter } = require('./lib/reader-writer-iface') -const { createStreamCompleteReader, createStreamingReader, createFileReader } = require('./lib/reader') +const { createStreamCompleteReader, createStreamingReader, createFileReader, createFileIndexedReader } = require('./lib/reader') const createStreamWriter = require('./lib/writer-stream') const CarDatastore = require('./datastore') const browser = require('./car-browser') @@ -10,7 +10,7 @@ const { indexer, readRaw } = require('./lib/raw') * @description * Read a CAR archive from a file and return a CarDatastore. The CarDatastore * returned will _only_ support read operations: `getRoots()`, `get()`, `has()` - * and `query()`. Caching makes `get()` and `has()` possible as the entire + * and `query()`. Caching makes `get()` and `has()`. This is possible as the entire * file is read and decoded before the CarDatastore is returned. mutation * operations (`put()`, `delete()` and `setRoots()`) are not available as there * is no ability to modify the archive. @@ -46,7 +46,7 @@ async function readFileComplete (multiformats, file) { * @description * Read a CAR archive as a CarDataStore from a ReadableStream. The CarDatastore * returned will _only_ support read operations: `getRoots()`, `get()`, `has()` - * and `query()`. Caching makes `get()` and `has()` possible as the entire + * and `query()`. Caching makes `get()` and `has()`. This is possible as the entire * stream is read and decoded before the CarDatastore is returned. Mutation * operations (`put()`, `delete()` and `setRoots()`) are not available as there * is no ability to modify the archive. @@ -104,6 +104,42 @@ async function readStreaming (multiformats, stream) { return new CarDatastore(multiformats, reader, writer) } +/** + * @name CarDatastore.readFileIndexed + * @description + * Read a CAR archive as a CarDataStore from a local file. The CarDatastore + * returned will _only_ support read operations: `getRoots()`, `get()`, `has()` + * and `query()`. Caching makes `get()` and `has()`. This is possible as the entire + * stream is read and _indexed_ before the CarDatastore is returned. Mutation + * operations (`put()`, `delete()` and `setRoots()`) are not available as there + * is no ability to modify the archive. + * + * The indexing operation uses {@link indexer} to catalogue the contents of the + * CAR and store a mapping of CID to byte locations for each entry. This method + * of parsing is not as memory intensive as {@link readStreamComplete} as only + * the index is stored in memory. When blocks are read, the index tells the + * reader where to fetch the block from within the CAR file. + * + * This mode is suitable for large files where random-access operations are + * required. Where a full sequential read is only required, use + * {@link createReadStreaming} which consumes the file in a single pass with no + * memory used for indexing. + * + * This create-mode is not available in the browser environment. + * @function + * @memberof CarDatastore + * @static + * @async + * @param {ReadableStream} stream a ReadableStream that provides an entire CAR + * archive as a binary stream. + * @returns {CarDatastore} a read-only CarDatastore. + */ +async function readFileIndexed (multiformats, filePath) { + const reader = await createFileIndexedReader(multiformats, filePath) + const writer = new NoWriter() + return new CarDatastore(multiformats, reader, writer) +} + /** * @name CarDatastore.writeStream * @description @@ -196,6 +232,7 @@ module.exports = (multiformats) => { readFileComplete: wrap(readFileComplete), readStreamComplete: wrap(readStreamComplete), readStreaming: wrap(readStreaming), + readFileIndexed: wrap(readFileIndexed), writeStream: wrap(writeStream), indexer: wrap(indexer), readRaw, diff --git a/lib/raw.js b/lib/raw.js index b012343..4b4cbbe 100644 --- a/lib/raw.js +++ b/lib/raw.js @@ -27,7 +27,7 @@ const { StreamDecoder, FileDecoder } = require('./coding') * index.roots = index.roots.map(cidStr) * console.log('roots:', index.roots) * for await (const blockIndex of index.iterator) { - * blockIndex.cid = cidStr(cid) + * blockIndex.cid = cidStr(blockIndex.cid) * console.log(JSON.toString(blockIndex)) * } * } diff --git a/lib/reader.js b/lib/reader.js index e9b438e..00cb501 100644 --- a/lib/reader.js +++ b/lib/reader.js @@ -1,5 +1,8 @@ +const fs = require('fs').promises +const { Errors } = require('interface-datastore') const coding = require('./coding') -const { createFromDecoded, StreamingReader } = require('./reader-writer-iface') +const { createFromDecoded, StreamingReader, Reader } = require('./reader-writer-iface') +const { indexer, readRaw } = require('./raw') async function createStreamCompleteReader (multiformats, stream) { const decoded = await coding.decodeStream(multiformats, stream) @@ -16,6 +19,72 @@ async function createFileReader (multiformats, data) { return createFromDecoded(decoded) } +async function createFileIndexedReader (multiformats, path) { + const { roots, iterator } = await indexer(multiformats, path) + const index = new Map() + const order = [] + for await (const blockIndex of iterator) { + const cidStr = blockIndex.cid.toString() + index.set(cidStr, blockIndex) + order.push(cidStr) + } + return new IndexedReader(path, roots, index, order) +} + +class IndexedReader extends Reader { + constructor (path, roots, index, order) { + super() + this._path = path + this._roots = roots + this._index = index + this._order = order + this._fd = null + } + + has (key) { + return this._index.has(key.toString()) + } + + async get (key) { + const blockIndex = this._index.get(key.toString()) + if (!blockIndex) { + throw Errors.notFoundError() + } + if (!this._fd) { + this._fd = await fs.open(this._path) + } + return (await readRaw(this._fd, blockIndex)).binary + } + + async * iterator (keysOnly) { + if (keysOnly) { + for (const key of this._order) { + yield { key } + } + } else { + if (!this._fd) { + this._fd = await fs.open(this._path) + } + for (const cidStr of this._order) { + const blockIndex = this._index.get(cidStr) + const { binary } = await readRaw(this._fd, blockIndex) + yield { key: cidStr, value: binary } + } + } + } + + get roots () { + return this._roots + } + + async close () { + if (this._fd) { + return this._fd.close() + } + } +} + module.exports.createStreamCompleteReader = createStreamCompleteReader module.exports.createStreamingReader = createStreamingReader module.exports.createFileReader = createFileReader +module.exports.createFileIndexedReader = createFileIndexedReader diff --git a/test/test-readindexed.js b/test/test-readindexed.js new file mode 100644 index 0000000..4747074 --- /dev/null +++ b/test/test-readindexed.js @@ -0,0 +1,102 @@ +/* eslint-env mocha */ + +const assert = require('assert') +const path = require('path') +const multiformats = require('multiformats/basics') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58')) +const { readFileIndexed } = require('../')(multiformats) +const { acid, makeData, compareBlockData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') + +let rawBlocks, allBlocks + +describe('Read Indexed', () => { + before(async () => { + const data = await makeData() + rawBlocks = data.rawBlocks + allBlocks = data.allBlocksFlattened + }) + + it('read existing', async () => { + const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) + await verifyHas(carDs) + await verifyBlocks(carDs) + await verifyRoots(carDs) + await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist + await carDs.close() + }) + + it('verify only roots', async () => { + // tests deferred open for getRoots() + const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) + await verifyRoots(carDs) + await carDs.close() + }) + + // when we instantiate from a Stream, CarDatastore should be immutable + it('immutable', async () => { + const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) + await assert.rejects(carDs.put(acid, Buffer.from('blip'))) + await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) + await assert.rejects(carDs.setRoots(acid)) + await assert.rejects(carDs.setRoots([acid])) + }) + + it('read existing (query())', async () => { + const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) + + async function verify () { + const blocks_ = allBlocks.slice() + const cids = [] + for (const block of blocks_) { + cids.push(block.cid.toString()) + } + + let i = 0 + for await (const entry of carDs.query()) { + assert.deepStrictEqual(Object.keys(entry), ['key', 'value']) + const foundIndex = cids.findIndex((cid) => cid === entry.key) + if (foundIndex < 0) { + assert.fail(`Unexpected CID/key found: ${entry.key}`) + } + compareBlockData(entry.value, blocks_[foundIndex].binary, `#${i++}`) + cids.splice(foundIndex, 1) + blocks_.splice(foundIndex, 1) + } + assert.strictEqual(cids.length, 0, 'found all expected CIDs') + // test after + await verifyRoots(carDs) + } + + await verify() + await verify() // second pass, file should be open now + + await carDs.close() + }) + + it('read existing (query({keysOnly}))', async () => { + const blocks_ = allBlocks.slice() + const cids = [] + for (const block of blocks_) { + cids.push(await block.cid.toString()) + } + + const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) + + // test before + await verifyRoots(carDs) + + for await (const entry of carDs.query({ keysOnly: true })) { + assert.deepStrictEqual(Object.keys(entry), ['key']) + const foundIndex = cids.findIndex((cid) => cid === entry.key) + if (foundIndex < 0) { + assert.fail(`Unexpected CID/key found: ${entry.key}`) + } + assert.strictEqual(entry.value, undefined, 'no `value`') + cids.splice(foundIndex, 1) + blocks_.splice(foundIndex, 1) + } + assert.strictEqual(cids.length, 0, 'found all expected CIDs') + await carDs.close() + }) +}) From b1668bcf47f94bd83fb3e1f950da585750007f01 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Mon, 29 Jun 2020 15:54:11 +1000 Subject: [PATCH 11/23] latest multiformats, with ESM+CJS exports --- lib/reader.js | 6 +++--- package.json | 4 ++-- test/fixture-data.js | 4 ++-- test/test-decode.js | 2 +- test/test-encode.js | 2 +- test/test-errors.js | 2 +- test/test-large.js | 2 +- test/test-query.js | 4 ++-- test/test-raw.js | 4 ++-- test/test-readbuffer.js | 4 ++-- test/test-readfile-writestream.js | 4 ++-- test/test-readfile.js | 4 ++-- test/test-readindexed.js | 4 ++-- test/test-readstream.js | 4 ++-- test/test-readstreamcomplete.js | 4 ++-- 15 files changed, 27 insertions(+), 27 deletions(-) diff --git a/lib/reader.js b/lib/reader.js index 00cb501..29a9f67 100644 --- a/lib/reader.js +++ b/lib/reader.js @@ -23,9 +23,9 @@ async function createFileIndexedReader (multiformats, path) { const { roots, iterator } = await indexer(multiformats, path) const index = new Map() const order = [] - for await (const blockIndex of iterator) { - const cidStr = blockIndex.cid.toString() - index.set(cidStr, blockIndex) + for await (const { cid, blockLength, blockOffset } of iterator) { + const cidStr = cid.toString() + index.set(cidStr, { cid, blockLength, blockOffset }) order.push(cidStr) } return new IndexedReader(path, roots, index, order) diff --git a/package.json b/package.json index 3932ef6..8350b52 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ }, "homepage": "https://github.com/rvagg/js-datastore-car#readme", "devDependencies": { - "@ipld/dag-cbor": "github:ipld/js-dag-cbor#multiformats", + "@ipld/dag-cbor": "^1.1.3", "assert": "^2.0.0", "bl": "^4.0.2", "cids": "^0.8.0", @@ -46,7 +46,7 @@ }, "dependencies": { "interface-datastore": "^1.0.2", - "multiformats": "github:rvagg/js-multiformats#rvagg/non-enumerable-basecache", + "multiformats": "^1.1.11", "varint": "^5.0.0" } } diff --git a/test/fixture-data.js b/test/fixture-data.js index 949e224..dc5dd6d 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -2,9 +2,9 @@ const assert = require('assert') const { DAGNode, DAGLink, util: pbUtil } = require('ipld-dag-pb') // TODO: remove this const CID = require('cids') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) let rawBlocks const pbBlocks = [] diff --git a/test/test-decode.js b/test/test-decode.js index 3780f3c..4dc9a71 100644 --- a/test/test-decode.js +++ b/test/test-decode.js @@ -5,7 +5,7 @@ const { promisify } = require('util') const path = require('path') const fs = require('fs') fs.readFile = promisify(fs.readFile) -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) const { makeData, verifyDecoded } = require('./fixture-data') const coding = require('../lib/coding') diff --git a/test/test-encode.js b/test/test-encode.js index cdd267b..62b4634 100644 --- a/test/test-encode.js +++ b/test/test-encode.js @@ -6,7 +6,7 @@ const fs = require('fs') const bl = require('bl') fs.readFile = promisify(fs.readFile) fs.unlink = promisify(fs.unlink) -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) const { verifyDecoded, makeData } = require('./fixture-data') const coding = require('../lib/coding') diff --git a/test/test-errors.js b/test/test-errors.js index 05b1ff1..b75124c 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -3,7 +3,7 @@ const assert = require('assert') const fs = require('fs').promises const path = require('path') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) const { readBuffer, readFileComplete, writeStream } = require('../')(multiformats) const { acid, car } = require('./fixture-data') diff --git a/test/test-large.js b/test/test-large.js index 63405ad..b871b63 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -5,7 +5,7 @@ const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) const garbage = require('garbage') const varint = require('varint') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) const { writeStream, readFileComplete, readStreaming, indexer } = require('../')(multiformats) diff --git a/test/test-query.js b/test/test-query.js index 5a95b89..e745ba3 100644 --- a/test/test-query.js +++ b/test/test-query.js @@ -2,9 +2,9 @@ const assert = require('assert') const path = require('path') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { readBuffer, readFile } = require('../')(multiformats) const { car, makeData, compareBlockData } = require('./fixture-data') diff --git a/test/test-raw.js b/test/test-raw.js index 177af5b..7add2eb 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -6,9 +6,9 @@ const path = require('path') const fs = require('fs') fs.open = promisify(fs.open) fs.close = promisify(fs.close) -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { indexer, readRaw } = require('../')(multiformats) const { makeData } = require('./fixture-data') diff --git a/test/test-readbuffer.js b/test/test-readbuffer.js index 7d48c81..43d3b4e 100644 --- a/test/test-readbuffer.js +++ b/test/test-readbuffer.js @@ -1,9 +1,9 @@ /* eslint-env mocha */ const assert = require('assert') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { readBuffer } = require('../')(multiformats) const { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') diff --git a/test/test-readfile-writestream.js b/test/test-readfile-writestream.js index 27a3b67..50c1165 100644 --- a/test/test-readfile-writestream.js +++ b/test/test-readfile-writestream.js @@ -3,9 +3,9 @@ const assert = require('assert') const fs = require('fs') const unlink = require('util').promisify(require('fs').unlink) -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { writeStream, readFileComplete } = require('../')(multiformats) const { makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') diff --git a/test/test-readfile.js b/test/test-readfile.js index 687f535..0151073 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -2,9 +2,9 @@ const assert = require('assert') const path = require('path') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { readFileComplete } = require('../car')(multiformats) const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') diff --git a/test/test-readindexed.js b/test/test-readindexed.js index 4747074..c7d9eb9 100644 --- a/test/test-readindexed.js +++ b/test/test-readindexed.js @@ -2,9 +2,9 @@ const assert = require('assert') const path = require('path') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { readFileIndexed } = require('../')(multiformats) const { acid, makeData, compareBlockData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') diff --git a/test/test-readstream.js b/test/test-readstream.js index 9ae22af..d9b674f 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -3,9 +3,9 @@ const assert = require('assert') const fs = require('fs') const path = require('path') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { readStreaming } = require('../')(multiformats) const { acid, makeData, compareBlockData, verifyRoots } = require('./fixture-data') diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index a77859e..86af16f 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -3,9 +3,9 @@ const assert = require('assert') const fs = require('fs') const path = require('path') -const multiformats = require('multiformats/basics') +const multiformats = require('multiformats/basics.js') multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) const { readStreamComplete } = require('../')(multiformats) const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') From a2cd900e176c37ba508d2726e08b4e0196e4e5f1 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Tue, 30 Jun 2020 15:10:25 +1000 Subject: [PATCH 12/23] FIXME fix rebase --- car.js | 1 - package.json | 1 - 2 files changed, 2 deletions(-) diff --git a/car.js b/car.js index 1c19cf3..50c817a 100644 --- a/car.js +++ b/car.js @@ -171,7 +171,6 @@ async function writeStream (multiformats, stream) { return new CarDatastore(multiformats, reader, writer) } -<<<<<<< HEAD async function traverseBlock (block, get, car, concurrency = 1, seen = new Set()) { const cid = await block.cid() await car.put(cid, block.encodeUnsafe()) diff --git a/package.json b/package.json index 8350b52..dc0ee54 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,6 @@ "bl": "^4.0.2", "cids": "^0.8.0", "garbage": "0.0.0", - "hundreds": "0.0.7", "hundreds": "0.0.2", "ipld-dag-pb": "^0.18.5", "jsdoc4readme": "^1.3.0", From d225246d145977112d0ef8905f3cf978139b31ec Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Wed, 1 Jul 2020 15:05:05 +1000 Subject: [PATCH 13/23] make completeGraph work with multiformats --- car.js | 6 +++--- package.json | 5 +++-- test/test-complete-graph.js | 9 ++++++--- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/car.js b/car.js index 50c817a..9224bdd 100644 --- a/car.js +++ b/car.js @@ -175,7 +175,7 @@ async function traverseBlock (block, get, car, concurrency = 1, seen = new Set() const cid = await block.cid() await car.put(cid, block.encodeUnsafe()) seen.add(cid.toString('base58btc')) - if (cid.codec === 'raw') { + if (cid.code === 0x55) { // raw return } const reader = block.reader() @@ -204,8 +204,8 @@ async function traverseBlock (block, get, car, concurrency = 1, seen = new Set() * @memberof CarDatastore * @static * @async - * @param {Block} root the root of the graph to start at, this block will be - * included in the CAR and its CID will be set as the single root. + * @param {CID} root the CID of the root of the graph to start at, this block + * will be included in the CAR and the CID will be set as the single root. * @param {AsyncFunction} get an `async` function that takes a CID and returns * a `Block`. Can be used to attach to an arbitrary data store. * @param {CarDatastore} car a writable `CarDatastore` that has not yet been diff --git a/package.json b/package.json index dc0ee54..f31960f 100644 --- a/package.json +++ b/package.json @@ -31,12 +31,13 @@ }, "homepage": "https://github.com/rvagg/js-datastore-car#readme", "devDependencies": { + "@ipld/block": "^5.1.1", "@ipld/dag-cbor": "^1.1.3", "assert": "^2.0.0", "bl": "^4.0.2", - "cids": "^0.8.0", + "cids": "^0.8.3", "garbage": "0.0.0", - "hundreds": "0.0.2", + "hundreds": "0.0.7", "ipld-dag-pb": "^0.18.5", "jsdoc4readme": "^1.3.0", "mocha": "^7.2.0", diff --git a/test/test-complete-graph.js b/test/test-complete-graph.js index 30e4cc9..7ac934e 100644 --- a/test/test-complete-graph.js +++ b/test/test-complete-graph.js @@ -1,8 +1,11 @@ /* eslint-env mocha */ const assert = require('assert') -const { writeStream, readBuffer, completeGraph } = require('../') -const Block = require('@ipld/block') +const multiformats = require('multiformats/basics.js') +multiformats.add(require('@ipld/dag-cbor')) +multiformats.multibase.add(require('multiformats/bases/base58.js')) +const { writeStream, readBuffer, completeGraph } = require('../car')(multiformats) +const Block = require('@ipld/block')(multiformats) const { PassThrough } = require('stream') const same = assert.deepStrictEqual @@ -15,7 +18,7 @@ function all (car) { const block = Block.create(encoded, link) yield block const cid = await block.cid() - if (cid.codec === 'raw') { + if (cid.code === 0x55) { return } From 9dd5d1aeae93329efa50ce11bf32070ddbf1073e Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Wed, 1 Jul 2020 15:06:59 +1000 Subject: [PATCH 14/23] remove browser support --- README.md | 4 ++-- package.json | 5 +---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index ea48fd9..e6a206d 100644 --- a/README.md +++ b/README.md @@ -259,8 +259,8 @@ a CAR file. **Parameters:** -* **`root`** _(`Block`)_: the root of the graph to start at, this block will be - included in the CAR and its CID will be set as the single root. +* **`root`** _(`CID`)_: the CID of the root of the graph to start at, this block + will be included in the CAR and the CID will be set as the single root. * **`get`** _(`AsyncFunction`)_: an `async` function that takes a CID and returns a `Block`. Can be used to attach to an arbitrary data store. * **`car`** _(`CarDatastore`)_: a writable `CarDatastore` that has not yet been diff --git a/package.json b/package.json index f31960f..66dd8ad 100644 --- a/package.json +++ b/package.json @@ -9,9 +9,7 @@ }, "scripts": { "lint": "standard", - "test:browser": "polendina --cleanup test/test-readbuffer.js test/test-query.js", - "test:node": "hundreds mocha test/test-*.js", - "test": "npm run lint && npm run test:node && npm run test:browser", + "test": "npm run lint && hundreds mocha test/test-*.js", "docs": "jsdoc4readme --readme *.js lib/raw.js", "coverage": "c8 --reporter=html --reporter=text mocha test/test-*.js && npx st -d coverage -p 8888" }, @@ -41,7 +39,6 @@ "ipld-dag-pb": "^0.18.5", "jsdoc4readme": "^1.3.0", "mocha": "^7.2.0", - "polendina": "^1.0.0", "standard": "^14.3.4" }, "dependencies": { From 8c21332c34e38a8cddbe0b6d5d1a204850d7184a Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Sat, 1 Aug 2020 09:04:48 +1000 Subject: [PATCH 15/23] update deps --- package.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 66dd8ad..a02132a 100644 --- a/package.json +++ b/package.json @@ -29,21 +29,21 @@ }, "homepage": "https://github.com/rvagg/js-datastore-car#readme", "devDependencies": { - "@ipld/block": "^5.1.1", - "@ipld/dag-cbor": "^1.1.3", + "@ipld/block": "^6.0.1", + "@ipld/dag-cbor": "^1.1.4", "assert": "^2.0.0", "bl": "^4.0.2", "cids": "^0.8.3", "garbage": "0.0.0", "hundreds": "0.0.7", - "ipld-dag-pb": "^0.18.5", + "ipld-dag-pb": "^0.19.0", "jsdoc4readme": "^1.3.0", - "mocha": "^7.2.0", + "mocha": "^8.1.0", "standard": "^14.3.4" }, "dependencies": { - "interface-datastore": "^1.0.2", - "multiformats": "^1.1.11", + "interface-datastore": "^2.0.0", + "multiformats": "^2.3.0", "varint": "^5.0.0" } } From 9a9c226fc101f49947415d9ba17c3abb8419e5cb Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 14 Aug 2020 17:52:30 +1000 Subject: [PATCH 16/23] convert to ESM --- car-browser.js | 11 +++++----- car.js | 16 +++++++------- datastore.js | 8 ++++--- example.js | 8 ++++--- lib/coding-browser.js | 12 ++++------- lib/coding.js | 35 +++++++++++++++++-------------- lib/raw.js | 9 ++++---- lib/reader-browser.js | 6 +++--- lib/reader-writer-iface.js | 12 +++++------ lib/reader.js | 17 +++++++-------- lib/util.js | 5 +---- lib/writer-stream.js | 6 +++--- package.json | 12 ++++++++--- test/fixture-data.js | 31 +++++++++++++-------------- test/test-complete-graph.js | 20 +++++++++++------- test/test-decode.js | 22 ++++++++++++------- test/test-encode.js | 18 +++++++++------- test/test-errors.js | 21 ++++++++++++------- test/test-large.js | 21 ++++++++++++------- test/test-query.js | 18 +++++++++------- test/test-raw.js | 27 ++++++++++++++++-------- test/test-readbuffer.js | 16 ++++++++------ test/test-readfile-writestream.js | 22 ++++++++++++------- test/test-readfile.js | 22 ++++++++++++------- test/test-readindexed.js | 22 ++++++++++++------- test/test-readstream.js | 24 ++++++++++++++------- test/test-readstreamcomplete.js | 24 ++++++++++++++------- 27 files changed, 274 insertions(+), 191 deletions(-) diff --git a/car-browser.js b/car-browser.js index dc404de..239ee1c 100644 --- a/car-browser.js +++ b/car-browser.js @@ -1,6 +1,6 @@ -const { NoWriter } = require('./lib/reader-writer-iface') -const { createBufferReader } = require('./lib/reader-browser') -const CarDatastore = require('./datastore') +import { NoWriter } from './lib/reader-writer-iface.js' +import { createBufferReader } from './lib/reader-browser.js' +import CarDatastore from './datastore.js' /** * @name CarDatastore.readBuffer @@ -29,7 +29,7 @@ async function readBuffer (multiformats, buffer) { return new CarDatastore(multiformats, reader, writer) } -module.exports = (multiformats) => { +export default (multiformats) => { function wrap (fn) { return function (...args) { return fn(multiformats, ...args) @@ -40,4 +40,5 @@ module.exports = (multiformats) => { readBuffer: wrap(readBuffer) } } -module.exports.readBuffer = readBuffer + +export { readBuffer } diff --git a/car.js b/car.js index 9224bdd..9c12d19 100644 --- a/car.js +++ b/car.js @@ -1,9 +1,9 @@ -const { Reader, NoWriter } = require('./lib/reader-writer-iface') -const { createStreamCompleteReader, createStreamingReader, createFileReader, createFileIndexedReader } = require('./lib/reader') -const createStreamWriter = require('./lib/writer-stream') -const CarDatastore = require('./datastore') -const browser = require('./car-browser') -const { indexer, readRaw } = require('./lib/raw') +import { Reader, NoWriter } from './lib/reader-writer-iface.js' +import { createStreamCompleteReader, createStreamingReader, createFileReader, createFileIndexedReader } from './lib/reader.js' +import createStreamWriter from './lib/writer-stream.js' +import CarDatastore from './datastore.js' +import browser from './car-browser.js' +import { indexer, readRaw } from './lib/raw.js' /** * @name CarDatastore.readFileComplete @@ -220,7 +220,7 @@ async function completeGraph (root, get, car, concurrency) { await car.close() } -module.exports = (multiformats) => { +function create (multiformats) { function wrap (fn) { return function (...args) { return fn(multiformats, ...args) @@ -238,3 +238,5 @@ module.exports = (multiformats) => { completeGraph }) } + +export default create diff --git a/datastore.js b/datastore.js index 9825b9c..b79b946 100644 --- a/datastore.js +++ b/datastore.js @@ -1,5 +1,7 @@ -const { filter, map } = require('interface-datastore').utils -const { toKey } = require('./lib/util') +import interfaceDatastore from 'interface-datastore' +import { toKey } from './lib/util.js' + +const { filter, map } = interfaceDatastore.utils /** * CarDatastore is a class to manage reading from, and writing to a CAR archives @@ -225,4 +227,4 @@ class CarDatastore { } } -module.exports = CarDatastore +export default CarDatastore diff --git a/example.js b/example.js index ac131c6..bdd010f 100644 --- a/example.js +++ b/example.js @@ -1,8 +1,10 @@ -const fs = require('fs') -const multiformats = require('multiformats/basics') +import fs from 'fs' +import multiformats from 'multiformats/basics' +import car from './' + // dag-cbor is required for the CAR root block multiformats.add(require('@ipld/dag-cbor')) -const CarDatastore = require('./')(multiformats) +const CarDatastore = car(multiformats) async function example () { const binary = Buffer.from('random meaningless bytes') diff --git a/lib/coding-browser.js b/lib/coding-browser.js index 738286b..1767bb5 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -1,5 +1,6 @@ -const varint = require('varint') -const { isBlock, isCID } = require('./util') +import varint from 'varint' +import { isBlock, isCID } from './util.js' + const CIDV0_BYTES = { SHA2_256: 0x12, LENGTH: 0x20, @@ -242,9 +243,4 @@ async function encodeBuffer (multiformats, roots, blocks) { return Buffer.concat(bl) } -module.exports.encode = encode -module.exports.Encoder = Encoder -module.exports.decode = decode -module.exports.Decoder = Decoder -module.exports.decodeBuffer = decodeBuffer -module.exports.encodeBuffer = encodeBuffer +export { encode, Encoder, decode, Decoder, decodeBuffer, encodeBuffer } diff --git a/lib/coding.js b/lib/coding.js index 6648b27..1aa3d5d 100644 --- a/lib/coding.js +++ b/lib/coding.js @@ -1,9 +1,10 @@ -const fs = require('fs').promises -fs.createWriteStream = require('fs').createWriteStream -const { promisify } = require('util') -const { PassThrough, pipeline } = require('stream') +import { promises as fs, createWriteStream } from 'fs' +import { promisify } from 'util' +import { PassThrough, pipeline } from 'stream' +import { Encoder, Decoder, encode, decode, encodeBuffer, decodeBuffer } from './coding-browser.js' + const pipelineAsync = promisify(pipeline) -const { Encoder, Decoder, encode, decode, encodeBuffer, decodeBuffer } = require('./coding-browser') +fs.createWriteStream = createWriteStream // reusable reader for streams and files, we just need a way to read an // additional chunk (of some undetermined size) and a way to close the @@ -245,14 +246,16 @@ function createStreamWriter (stream) { } } -module.exports.Encoder = Encoder -module.exports.Decoder = Decoder -module.exports.decodeFile = decodeFile -module.exports.decodeStream = decodeStream -module.exports.StreamDecoder = StreamDecoder -module.exports.FileDecoder = FileDecoder -module.exports.decodeBuffer = decodeBuffer -module.exports.encodeFile = encodeFile -module.exports.encodeStream = encodeStream -module.exports.encodeBuffer = encodeBuffer -module.exports.createStreamWriter = createStreamWriter +export { + Encoder, + Decoder, + decodeFile, + decodeStream, + StreamDecoder, + FileDecoder, + decodeBuffer, + encodeFile, + encodeStream, + encodeBuffer, + createStreamWriter +} diff --git a/lib/raw.js b/lib/raw.js index 4b4cbbe..31fbbb5 100644 --- a/lib/raw.js +++ b/lib/raw.js @@ -1,7 +1,7 @@ -const { promisify } = require('util') -const fs = require('fs') +import { promisify } from 'util' +import fs from 'fs' +import { StreamDecoder, FileDecoder } from './coding.js' fs.read = promisify(fs.read) -const { StreamDecoder, FileDecoder } = require('./coding') /** * @name CarDatastore.indexer @@ -122,5 +122,4 @@ async function readRaw (fd, blockIndex) { return { cid, binary } } -module.exports.indexer = indexer -module.exports.readRaw = readRaw +export { indexer, readRaw } diff --git a/lib/reader-browser.js b/lib/reader-browser.js index f5b25b3..d1a76ba 100644 --- a/lib/reader-browser.js +++ b/lib/reader-browser.js @@ -1,9 +1,9 @@ -const coding = require('./coding-browser') -const { createFromDecoded } = require('./reader-writer-iface') +import * as coding from './coding-browser.js' +import { createFromDecoded } from './reader-writer-iface.js' async function createBufferReader (multiformats, data) { const decoded = await coding.decodeBuffer(multiformats, data) return createFromDecoded(decoded) } -module.exports.createBufferReader = createBufferReader +export { createBufferReader } diff --git a/lib/reader-writer-iface.js b/lib/reader-writer-iface.js index 3a630f9..255e3e8 100644 --- a/lib/reader-writer-iface.js +++ b/lib/reader-writer-iface.js @@ -1,5 +1,7 @@ -const { Errors } = require('interface-datastore') -const { verifyRoots } = require('./util') +import interfaceDatastore from 'interface-datastore' +import { verifyRoots } from './util.js' + +const { Errors } = interfaceDatastore class Reader { /* c8 ignore next 3 */ @@ -110,8 +112,4 @@ async function createFromDecoded (decoded) { return new DecodedReader(decoded) } -module.exports.createFromDecoded = createFromDecoded -module.exports.Reader = Reader -module.exports.Writer = Writer -module.exports.StreamingReader = StreamingReader -module.exports.NoWriter = NoWriter +export { createFromDecoded, Reader, Writer, StreamingReader, NoWriter } diff --git a/lib/reader.js b/lib/reader.js index 29a9f67..2338cf4 100644 --- a/lib/reader.js +++ b/lib/reader.js @@ -1,8 +1,10 @@ -const fs = require('fs').promises -const { Errors } = require('interface-datastore') -const coding = require('./coding') -const { createFromDecoded, StreamingReader, Reader } = require('./reader-writer-iface') -const { indexer, readRaw } = require('./raw') +import { promises as fs } from 'fs' +import interfaceDatastore from 'interface-datastore' +import * as coding from './coding.js' +import { createFromDecoded, StreamingReader, Reader } from './reader-writer-iface.js' +import { indexer, readRaw } from './raw.js' + +const { Errors } = interfaceDatastore async function createStreamCompleteReader (multiformats, stream) { const decoded = await coding.decodeStream(multiformats, stream) @@ -84,7 +86,4 @@ class IndexedReader extends Reader { } } -module.exports.createStreamCompleteReader = createStreamCompleteReader -module.exports.createStreamingReader = createStreamingReader -module.exports.createFileReader = createFileReader -module.exports.createFileIndexedReader = createFileIndexedReader +export { createStreamCompleteReader, createStreamingReader, createFileReader, createFileIndexedReader } diff --git a/lib/util.js b/lib/util.js index 03f5d53..e5e5196 100644 --- a/lib/util.js +++ b/lib/util.js @@ -42,7 +42,4 @@ function verifyRoots (roots) { return roots } -module.exports.isBlock = isBlock -module.exports.isCID = isCID -module.exports.toKey = toKey -module.exports.verifyRoots = verifyRoots +export { isBlock, isCID, toKey, verifyRoots } diff --git a/lib/writer-stream.js b/lib/writer-stream.js index c9ba26e..52deac4 100644 --- a/lib/writer-stream.js +++ b/lib/writer-stream.js @@ -1,5 +1,5 @@ -const { Writer } = require('./reader-writer-iface') -const { Encoder, createStreamWriter } = require('./coding') +import { Writer } from './reader-writer-iface.js' +import { Encoder, createStreamWriter } from './coding.js' class StreamWriter extends Writer { constructor (multiformats, outStream) { @@ -51,4 +51,4 @@ function create (multiformats, stream) { return new StreamWriter(multiformats, stream) } -module.exports = create +export default create diff --git a/package.json b/package.json index a02132a..874abef 100644 --- a/package.json +++ b/package.json @@ -3,15 +3,21 @@ "version": "1.2.0", "description": "Content ARchive format reader and writer", "main": "car.js", + "type": "module", "browser": "car-browser.js", "directories": { "test": "test" }, "scripts": { + "build": "npx @mikeal/limbo test/test-*.js", + "prepublishOnly": "npm run build", "lint": "standard", - "test": "npm run lint && hundreds mocha test/test-*.js", - "docs": "jsdoc4readme --readme *.js lib/raw.js", - "coverage": "c8 --reporter=html --reporter=text mocha test/test-*.js && npx st -d coverage -p 8888" + "test:cjs": "rm -rf dist && npm run build && mocha dist/cjs-node/test/test-*.cjs && npm run test:cjs:browser", + "test:node": "hundreds mocha test/test-*.js", + "test:cjs:browser": "polendina --cleanup dist/cjs-browser/test/test-*.js", + "test": "npm run lint && npm run test:node && npm run test:cjs", + "coverage": "c8 --reporter=html --reporter=text mocha test/test-*.js && npx st -d coverage -p 8888", + "docs": "jsdoc4readme --readme *.js lib/raw.js" }, "repository": { "type": "git", diff --git a/test/fixture-data.js b/test/fixture-data.js index dc5dd6d..d0e36fb 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -1,10 +1,15 @@ -const assert = require('assert') -const { DAGNode, DAGLink, util: pbUtil } = require('ipld-dag-pb') -// TODO: remove this -const CID = require('cids') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) +import assert from 'assert' +import IpldDagPb from 'ipld-dag-pb' +import CID from 'cids' +import multiformats from 'multiformats/basics.js' + +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' + +const { DAGNode, DAGLink, util: pbUtil } = IpldDagPb + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) let rawBlocks const pbBlocks = [] @@ -180,14 +185,6 @@ async function verifyRoots (carDs, modified) { assert.deepStrictEqual((await carDs.getRoots()).map((c) => c.toString()), expected) } -module.exports.makeData = makeData -module.exports.verifyBlocks = verifyBlocks -module.exports.verifyHas = verifyHas -module.exports.verifyRoots = verifyRoots -module.exports.acid = acid -module.exports.compareBlockData = compareBlockData - -// TODO: delete? -module.exports.verifyDecoded = verifyDecoded +const car = Buffer.from('63a265726f6f747382d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8bd82a5825000171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365b6776657273696f6e01280155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b461616161280155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e646877626262622801551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a2745163636363511220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0122d0a240155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4120363617418048001122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a122d0a240155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e6468771203646f671804122d0a221220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d01205666972737418338301122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de122e0a2401551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a274511204626561721804122f0a22122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a12067365636f6e641895015b01711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8ba2646c696e6bd82a582300122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de646e616d6564626c6970360171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365ba2646c696e6bf6646e616d65656c696d626f', 'hex') -module.exports.car = Buffer.from('63a265726f6f747382d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8bd82a5825000171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365b6776657273696f6e01280155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b461616161280155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e646877626262622801551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a2745163636363511220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0122d0a240155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4120363617418048001122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a122d0a240155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e6468771203646f671804122d0a221220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d01205666972737418338301122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de122e0a2401551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a274511204626561721804122f0a22122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a12067365636f6e641895015b01711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8ba2646c696e6bd82a582300122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de646e616d6564626c6970360171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365ba2646c696e6bf6646e616d65656c696d626f', 'hex') +export { makeData, verifyBlocks, verifyHas, verifyRoots, acid, compareBlockData, verifyDecoded, car } diff --git a/test/test-complete-graph.js b/test/test-complete-graph.js index 7ac934e..a5a9ec0 100644 --- a/test/test-complete-graph.js +++ b/test/test-complete-graph.js @@ -1,12 +1,18 @@ /* eslint-env mocha */ -const assert = require('assert') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { writeStream, readBuffer, completeGraph } = require('../car')(multiformats) -const Block = require('@ipld/block')(multiformats) -const { PassThrough } = require('stream') +import assert from 'assert' +import multiformats from 'multiformats/basics.js' +import { PassThrough } from 'stream' + +import IpldBlock from '@ipld/block' +import Car from '../car.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { writeStream, readBuffer, completeGraph } = Car(multiformats) +const Block = IpldBlock(multiformats) const same = assert.deepStrictEqual diff --git a/test/test-decode.js b/test/test-decode.js index 4dc9a71..e06e2f3 100644 --- a/test/test-decode.js +++ b/test/test-decode.js @@ -1,14 +1,20 @@ /* eslint-env mocha */ -const assert = require('assert') -const { promisify } = require('util') -const path = require('path') -const fs = require('fs') +import assert from 'assert' +import { promisify } from 'util' +import path from 'path' +import fs from 'fs' +import multiformats from 'multiformats/basics.js' +import { makeData, verifyDecoded } from './fixture-data.js' +import * as coding from '../lib/coding.js' +import dagCbor from '@ipld/dag-cbor' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + fs.readFile = promisify(fs.readFile) -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -const { makeData, verifyDecoded } = require('./fixture-data') -const coding = require('../lib/coding') +multiformats.add(dagCbor) describe('Decode', () => { before(makeData) diff --git a/test/test-encode.js b/test/test-encode.js index 62b4634..80455d9 100644 --- a/test/test-encode.js +++ b/test/test-encode.js @@ -1,15 +1,17 @@ /* eslint-env mocha */ -const assert = require('assert') -const { promisify } = require('util') -const fs = require('fs') -const bl = require('bl') +import assert from 'assert' +import { promisify } from 'util' +import fs from 'fs' +import bl from 'bl' +import multiformats from 'multiformats/basics.js' +import { verifyDecoded, makeData } from './fixture-data.js' +import * as coding from '../lib/coding.js' +import dagCbor from '@ipld/dag-cbor' + fs.readFile = promisify(fs.readFile) fs.unlink = promisify(fs.unlink) -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -const { verifyDecoded, makeData } = require('./fixture-data') -const coding = require('../lib/coding') +multiformats.add(dagCbor) describe('Encode', () => { let roots, allBlocks diff --git a/test/test-errors.js b/test/test-errors.js index b75124c..8e38f1b 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -1,12 +1,19 @@ /* eslint-env mocha */ -const assert = require('assert') -const fs = require('fs').promises -const path = require('path') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -const { readBuffer, readFileComplete, writeStream } = require('../')(multiformats) -const { acid, car } = require('./fixture-data') +import assert from 'assert' +import { promises as fs } from 'fs' +import path from 'path' +import multiformats from 'multiformats/basics.js' +import { acid, car } from './fixture-data.js' +import dagCbor from '@ipld/dag-cbor' +import Car from '../car.js' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +multiformats.add(dagCbor) +const { readBuffer, readFileComplete, writeStream } = Car(multiformats) describe('Errors', () => { it('unimplemented methods', async () => { diff --git a/test/test-large.js b/test/test-large.js index b871b63..756f19c 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -1,13 +1,18 @@ /* eslint-env mocha */ -const assert = require('assert') -const fs = require('fs') -const unlink = require('util').promisify(require('fs').unlink) -const garbage = require('garbage') -const varint = require('varint') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -const { writeStream, readFileComplete, readStreaming, indexer } = require('../')(multiformats) +import assert from 'assert' +import fs from 'fs' +import garbage from 'garbage' +import varint from 'varint' +import multiformats from 'multiformats/basics.js' +import dagCbor from '@ipld/dag-cbor' +import Car from '../car.js' +import { promisify } from 'util' + +const unlink = promisify(fs.unlink) + +multiformats.add(dagCbor) +const { writeStream, readFileComplete, readStreaming, indexer } = Car(multiformats) describe('Large CAR', () => { const objects = [] diff --git a/test/test-query.js b/test/test-query.js index e745ba3..cb45f5f 100644 --- a/test/test-query.js +++ b/test/test-query.js @@ -1,12 +1,16 @@ /* eslint-env mocha */ -const assert = require('assert') -const path = require('path') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { readBuffer, readFile } = require('../')(multiformats) -const { car, makeData, compareBlockData } = require('./fixture-data') +import assert from 'assert' +import path from 'path' +import multiformats from 'multiformats/basics.js' +import { car, makeData, compareBlockData } from './fixture-data.js' +import Car from '../car.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { readBuffer, readFile } = Car(multiformats) if (!assert.rejects) { // browser polyfill is incomplete diff --git a/test/test-raw.js b/test/test-raw.js index 7add2eb..2421d1f 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -1,16 +1,25 @@ /* eslint-env mocha */ -const assert = require('assert') -const { promisify } = require('util') -const path = require('path') -const fs = require('fs') +import assert from 'assert' +import { promisify } from 'util' +import path from 'path' +import fs from 'fs' +import multiformats from 'multiformats/basics.js' +import { makeData } from './fixture-data.js' +import Car from '../car.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + fs.open = promisify(fs.open) fs.close = promisify(fs.close) -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { indexer, readRaw } = require('../')(multiformats) -const { makeData } = require('./fixture-data') + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { indexer, readRaw } = Car(multiformats) describe('Raw', () => { const expectedRoots = [ diff --git a/test/test-readbuffer.js b/test/test-readbuffer.js index 43d3b4e..b444025 100644 --- a/test/test-readbuffer.js +++ b/test/test-readbuffer.js @@ -1,11 +1,15 @@ /* eslint-env mocha */ -const assert = require('assert') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { readBuffer } = require('../')(multiformats) -const { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +import assert from 'assert' +import multiformats from 'multiformats/basics.js' +import { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import Car from '../car.js' + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { readBuffer } = Car(multiformats) if (!assert.rejects) { // browser polyfill is incomplete diff --git a/test/test-readfile-writestream.js b/test/test-readfile-writestream.js index 50c1165..f927990 100644 --- a/test/test-readfile-writestream.js +++ b/test/test-readfile-writestream.js @@ -1,13 +1,19 @@ /* eslint-env mocha */ -const assert = require('assert') -const fs = require('fs') -const unlink = require('util').promisify(require('fs').unlink) -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { writeStream, readFileComplete } = require('../')(multiformats) -const { makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +import assert from 'assert' +import fs from 'fs' +import multiformats from 'multiformats/basics.js' +import { makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' +import { promisify } from 'util' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import Car from '../car.js' + +const unlink = promisify(fs.unlink) + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { writeStream, readFileComplete } = Car(multiformats) let rawBlocks let pbBlocks diff --git a/test/test-readfile.js b/test/test-readfile.js index 0151073..a5640f4 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -1,12 +1,20 @@ /* eslint-env mocha */ -const assert = require('assert') -const path = require('path') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { readFileComplete } = require('../car')(multiformats) -const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +import assert from 'assert' +import path from 'path' +import multiformats from 'multiformats/basics.js' +import { acid, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import Car from '../car.js' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { readFileComplete } = Car(multiformats) let rawBlocks diff --git a/test/test-readindexed.js b/test/test-readindexed.js index c7d9eb9..c1b2e90 100644 --- a/test/test-readindexed.js +++ b/test/test-readindexed.js @@ -1,12 +1,20 @@ /* eslint-env mocha */ -const assert = require('assert') -const path = require('path') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { readFileIndexed } = require('../')(multiformats) -const { acid, makeData, compareBlockData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +import assert from 'assert' +import path from 'path' +import multiformats from 'multiformats/basics.js' +import { acid, makeData, compareBlockData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import Car from '../car.js' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { readFileIndexed } = Car(multiformats) let rawBlocks, allBlocks diff --git a/test/test-readstream.js b/test/test-readstream.js index d9b674f..db981d8 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -1,13 +1,21 @@ /* eslint-env mocha */ -const assert = require('assert') -const fs = require('fs') -const path = require('path') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { readStreaming } = require('../')(multiformats) -const { acid, makeData, compareBlockData, verifyRoots } = require('./fixture-data') +import assert from 'assert' +import fs from 'fs' +import path from 'path' +import multiformats from 'multiformats/basics.js' +import { acid, makeData, compareBlockData, verifyRoots } from './fixture-data.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import Car from '../car.js' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { readStreaming } = Car(multiformats) describe('Read Stream', () => { let allBlocks diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index 86af16f..e9d549b 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -1,13 +1,21 @@ /* eslint-env mocha */ -const assert = require('assert') -const fs = require('fs') -const path = require('path') -const multiformats = require('multiformats/basics.js') -multiformats.add(require('@ipld/dag-cbor')) -multiformats.multibase.add(require('multiformats/bases/base58.js')) -const { readStreamComplete } = require('../')(multiformats) -const { acid, makeData, verifyBlocks, verifyHas, verifyRoots } = require('./fixture-data') +import assert from 'assert' +import fs from 'fs' +import path from 'path' +import multiformats from 'multiformats/basics.js' +import { acid, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' +import dagCbor from '@ipld/dag-cbor' +import base58 from 'multiformats/bases/base58.js' +import Car from '../car.js' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +multiformats.add(dagCbor) +multiformats.multibase.add(base58) +const { readStreamComplete } = Car(multiformats) let rawBlocks From 25c35bd925b12ceb65e1da9ccefaa74c97928775 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 14 Aug 2020 17:54:15 +1000 Subject: [PATCH 17/23] update deps --- package.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 874abef..caa3148 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "version": "1.2.0", "description": "Content ARchive format reader and writer", "main": "car.js", - "type": "module", + "type": "module", "browser": "car-browser.js", "directories": { "test": "test" @@ -39,12 +39,12 @@ "@ipld/dag-cbor": "^1.1.4", "assert": "^2.0.0", "bl": "^4.0.2", - "cids": "^0.8.3", + "cids": "^1.0.0", "garbage": "0.0.0", "hundreds": "0.0.7", - "ipld-dag-pb": "^0.19.0", + "ipld-dag-pb": "^0.20.0", "jsdoc4readme": "^1.3.0", - "mocha": "^8.1.0", + "mocha": "^8.1.1", "standard": "^14.3.4" }, "dependencies": { From d032b874f20947eb5903c34dc0cefaef61fa16e8 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 14 Aug 2020 20:43:55 +1000 Subject: [PATCH 18/23] limbo local --- .gitignore | 2 ++ package.json | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index ec572e1..c220d89 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,5 @@ node_modules/ .nyc_output/ coverage/ example.car +build/ +dist/ diff --git a/package.json b/package.json index caa3148..5a80dec 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "test": "test" }, "scripts": { - "build": "npx @mikeal/limbo test/test-*.js", + "build": "limbo test/test-*.js && cp test/go.car dist/cjs-browser/test/ && cp test/go.car dist/cjs-node/test/", "prepublishOnly": "npm run build", "lint": "standard", "test:cjs": "rm -rf dist && npm run build && mocha dist/cjs-node/test/test-*.cjs && npm run test:cjs:browser", @@ -37,6 +37,7 @@ "devDependencies": { "@ipld/block": "^6.0.1", "@ipld/dag-cbor": "^1.1.4", + "@mikeal/limbo": "0.0.1", "assert": "^2.0.0", "bl": "^4.0.2", "cids": "^1.0.0", From db777dffdaa5f11b6046ce0444d8c567e805cde4 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 14 Aug 2020 20:48:51 +1000 Subject: [PATCH 19/23] export maps --- package.json | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 5a80dec..6a8d851 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "Content ARchive format reader and writer", "main": "car.js", "type": "module", - "browser": "car-browser.js", + "browser": "./dist/cjs-browser/car-browser.js", "directories": { "test": "test" }, @@ -52,5 +52,19 @@ "interface-datastore": "^2.0.0", "multiformats": "^2.3.0", "varint": "^5.0.0" + }, + "exports": { + ".": { + "import": "./car.js", + "require": "./dist/cjs-node/car.cjs" + }, + "./car.js": { + "import": "./car.js", + "require": "./dist/cjs-node/car.cjs" + }, + "./car-browser.js": { + "import": "./car-browser.js", + "require": "./dist/cjs-node/car-browser.cjs" + } } } From c58509cceeddc627a04e170a58ae6d68394159a4 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Thu, 27 Aug 2020 19:11:43 +1000 Subject: [PATCH 20/23] ipjs, full esm + browser, no Buffer, verify-car.js executable --- README.md | 31 ++++++++------- car-browser.js | 4 +- datastore.js | 4 +- example.js | 13 ++++--- lib/coding-browser.js | 30 +++++++++------ lib/coding.js | 36 +++++++++++++----- lib/raw.js | 11 +++++- lib/util.js | 8 ++-- package.json | 34 +++++++---------- test/fixture-data.js | 34 ++++++++++------- test/test-complete-graph.js | 20 ++++++---- test/test-decode.js | 6 +-- test/test-encode.js | 2 +- test/test-errors.js | 8 ++-- test/test-large.js | 10 ++--- test/test-query.js | 8 ++-- test/test-raw.js | 32 ++++++++-------- test/test-readbuffer.js | 12 +++--- test/test-readfile-writestream.js | 6 +-- test/test-readfile.js | 10 ++--- test/test-readindexed.js | 10 ++--- test/test-readstream.js | 10 ++--- test/test-readstreamcomplete.js | 10 ++--- verify-car.js | 63 +++++++++++++++++++++++++++++++ 24 files changed, 258 insertions(+), 154 deletions(-) create mode 100755 verify-car.js diff --git a/README.md b/README.md index e6a206d..42f362e 100644 --- a/README.md +++ b/README.md @@ -9,16 +9,19 @@ The interface wraps a [Datastore](https://github.com/ipfs/interface-datastore), ## Example ```js -const fs = require('fs') -const multiformats = require('multiformats/basics') +import fs from 'fs' +import multiformats from 'multiformats/basics' +import car from 'datastore-car' +import dagCbor from '@ipld/dag-cbor' + // dag-cbor is required for the CAR root block -multiformats.add(require('@ipld/dag-cbor')) -const CarDatastore = require('datastore-car')(multiformats) +multiformats.add(dagCbor) +const CarDatastore = car(multiformats) async function example () { - const binary = Buffer.from('random meaningless bytes') + const binary = new TextEncoder().encode('random meaningless bytes') const mh = await multiformats.multihash.hash(binary, 'sha2-256') - const cid = new multiformats.CID(1, multiformats.get('raw').code, mh) + const cid = multiformats.CID.create(1, multiformats.get('raw').code, mh) const outStream = fs.createWriteStream('example.car') const writeDs = await CarDatastore.writeStream(outStream) @@ -39,10 +42,10 @@ async function example () { const roots = await readDs.getRoots() // retrieve a block, as a UInt8Array, reading from the ZIP archive const got = await readDs.get(roots[0]) - // also possible: for await (const {key, data} = readDs.query()) { ... } + // also possible: for await (const { key, value } of readDs.query()) { ... } console.log('Retrieved [%s] from example.car with CID [%s]', - Buffer.from(got).toString(), + new TextDecoder().decode(got), roots[0].toString()) await readDs.close() @@ -62,14 +65,14 @@ Retrieved [random meaningless bytes] from example.car with CID [bafkreihwkf6mtnj In this example, the `writeStream()` create-mode is used to generate the CAR file, this allows for an iterative write process where first the roots are set (`setRoots()`) and then all of the blocks are written (`put()`). After it is created, we use the `readStreamComplete()` create-mode to read the contents. Other create-modes are useful where the environment, data and needs demand: -* **[`CarDatastore.readBuffer(buffer)`](#CarDatastore__readBuffer)**: read a CAR archive from a `Buffer` or `Uint8Array`. Does not support mutation operations, only reads. This mode is not efficient for large data sets but does support `get()` and `has()` operations since it caches the entire archive in memory. This mode is the only mode _available_ in a browser environment +* **[`CarDatastore.readBuffer(buffer)`](#CarDatastore__readBuffer)**: read a CAR archive from a `Uint8Array`. Does not support mutation operations, only reads. This mode is not efficient for large data sets but does support `get()` and `has()` operations since it caches the entire archive in memory. This mode is the only mode _available_ in a browser environment * **[`CarDatastore.readFileComplete(file)`](#CarDatastore__readFileComplete)**: read a CAR archive directly from a file. Does not support mutation operations, only reads. This mode is not efficient for large data sets but does support `get()` and `has()` operations since it caches the entire archive in memory. This mode is _not available_ in a browser environment. * **[`CarDatastore.readStreamComplete(stream)`](#CarDatastore__readStreamComplete)**: read a CAR archive directly from a stream. Does not support mutation operations, only reads. This mode is not efficient for large data sets but does support `get()` and `has()` operations since it caches the entire archive in memory. This mode is _not available_ in a browser environment. * **[`CarDatastore.readStreaming(stream)`](#CarDatastore__readStreaming)**: read a CAR archive directly from a stream. Does not support mutation operations, and only supports iterative reads via `query()` (i.e. no `get()` and `has()`). This mode is very efficient for large data sets. This mode is _not available_ in a browser environment. * **[`async CarDatastore.readFileIndexed(stream)`](#CarDatastore__readFileIndexed)**: read a CAR archive from a local file, index its contents and use that index to support random access reads (`has()`, `get()` and `query()`) without fitting the entire contents in memory as `readFileComplete()` does. Uses more memory than `readStreaming()` and less than `readFileComplete()`. Will be slower to initialize than `readStreaming()` but suitable where random access reads are required from a large file. * **[`CarDatastore.writeStream(stream)`](#CarDatastore__writeStream)**: write a CAR archive to a stream (e.g. `fs.createWriteStream(file)`). Does not support read operations, only writes, and the writes are append-only (i.e. no `delete()`). However, this mode is very efficient for dumping large data sets, with no caching and streaming writes. This mode is _not available_ in a browser environment. -Other create-modes may be supported in the future, such as writing to a Buffer (although this is already possible if you couple `writeStream()` with a [`BufferListStream`](https://ghub.io/bl)) or a read/write mode such as datastore-zipcar makes available. +Other create-modes may be supported in the future, such as writing to a Uint8Array (although this is already possible if you couple `writeStream()` with a [`BufferListStream`](https://ghub.io/bl)) or a read/write mode such as datastore-zipcar makes available. ## API @@ -97,7 +100,7 @@ Other create-modes may be supported in the future, such as writing to a Buffer ( ### `async CarDatastore.readBuffer(buffer)` -Read a CarDatastore from a Buffer containing the contents of an existing +Read a CarDatastore from a Uint8Array containing the contents of an existing CAR archive. Mutation operations (`put()`, `delete()` and `setRoots()`) are not available. @@ -111,7 +114,7 @@ environment. **Parameters:** -* **`buffer`** _(`Buffer|Uint8Array`)_: the byte contents of a CAR archive +* **`buffer`** _(`Uint8Array`)_: the byte contents of a CAR archive **Return value** _(`CarDatastore`)_: a read-only CarDatastore. @@ -291,7 +294,7 @@ may throw an error if unsupported. * **`key`** _(`string|Key|CID`)_: a `CID` or `CID`-convertable object to identify the block. -**Return value** _(`Buffer`)_: the IPLD block data referenced by the CID. +**Return value** _(`Uint8Array`)_: the IPLD block data referenced by the CID. ### `async CarDatastore#has(key)` @@ -325,7 +328,7 @@ and an Error will be thrown when it is called. * **`key`** _(`string|Key|CID`)_: a `CID` or `CID`-convertable object to identify the `value`. -* **`value`** _(`Buffer|Uint8Array`)_: an IPLD block matching the given `key` +* **`value`** _(`Uint8Array`)_: an IPLD block matching the given `key` `CID`. diff --git a/car-browser.js b/car-browser.js index 239ee1c..af91c99 100644 --- a/car-browser.js +++ b/car-browser.js @@ -5,7 +5,7 @@ import CarDatastore from './datastore.js' /** * @name CarDatastore.readBuffer * @description - * Read a CarDatastore from a Buffer containing the contents of an existing + * Read a CarDatastore from a Uint8Array containing the contents of an existing * CAR archive. Mutation operations (`put()`, `delete()` and `setRoots()`) are * not available. * @@ -20,7 +20,7 @@ import CarDatastore from './datastore.js' * @memberof CarDatastore * @static * @async - * @param {Buffer|Uint8Array} buffer the byte contents of a CAR archive + * @param {Uint8Array} buffer the byte contents of a CAR archive * @returns {CarDatastore} a read-only CarDatastore. */ async function readBuffer (multiformats, buffer) { diff --git a/datastore.js b/datastore.js index b79b946..bceddc9 100644 --- a/datastore.js +++ b/datastore.js @@ -31,7 +31,7 @@ class CarDatastore { * @memberof CarDatastore * @param {string|Key|CID} key a `CID` or `CID`-convertable object to identify * the block. - * @return {Buffer} the IPLD block data referenced by the CID. + * @return {Uint8Array} the IPLD block data referenced by the CID. */ async get (key) { key = toKey(this._multiformats, key, 'get') @@ -74,7 +74,7 @@ class CarDatastore { * @memberof CarDatastore * @param {string|Key|CID} key a `CID` or `CID`-convertable object to identify * the `value`. - * @param {Buffer|Uint8Array} value an IPLD block matching the given `key` + * @param {Uint8Array} value an IPLD block matching the given `key` * `CID`. */ async put (key, value) { diff --git a/example.js b/example.js index bdd010f..20eda4f 100644 --- a/example.js +++ b/example.js @@ -1,15 +1,16 @@ import fs from 'fs' import multiformats from 'multiformats/basics' -import car from './' +import car from 'datastore-car' +import dagCbor from '@ipld/dag-cbor' // dag-cbor is required for the CAR root block -multiformats.add(require('@ipld/dag-cbor')) +multiformats.add(dagCbor) const CarDatastore = car(multiformats) async function example () { - const binary = Buffer.from('random meaningless bytes') + const binary = new TextEncoder().encode('random meaningless bytes') const mh = await multiformats.multihash.hash(binary, 'sha2-256') - const cid = new multiformats.CID(1, multiformats.get('raw').code, mh) + const cid = multiformats.CID.create(1, multiformats.get('raw').code, mh) const outStream = fs.createWriteStream('example.car') const writeDs = await CarDatastore.writeStream(outStream) @@ -30,10 +31,10 @@ async function example () { const roots = await readDs.getRoots() // retrieve a block, as a UInt8Array, reading from the ZIP archive const got = await readDs.get(roots[0]) - // also possible: for await (const {key, data} = readDs.query()) { ... } + // also possible: for await (const { key, value } of readDs.query()) { ... } console.log('Retrieved [%s] from example.car with CID [%s]', - Buffer.from(got).toString(), + new TextDecoder().decode(got), roots[0].toString()) await readDs.close() diff --git a/lib/coding-browser.js b/lib/coding-browser.js index 1767bb5..575de77 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -17,7 +17,7 @@ function bufferReader (buf) { async exactly (length) { if (length > buf.length - pos) { - throw new Error('Unexpected end of Buffer') + throw new Error('Unexpected end of data') } return buf.slice(pos, pos + length) }, @@ -70,7 +70,7 @@ async function readCid (multiformats, reader) { // cidv0 32-byte sha2-256 const bytes = await reader.exactly(34) reader.seek(34) - return new multiformats.CID(0, CIDV0_BYTES.DAG_PB, Uint8Array.from(bytes)) + return multiformats.CID.create(0, CIDV0_BYTES.DAG_PB, Uint8Array.from(bytes)) } const version = await readVarint(reader) @@ -79,7 +79,7 @@ async function readCid (multiformats, reader) { } const codec = await readVarint(reader) const multihash = await readMultihash(reader) - return new multiformats.CID(version, codec, Uint8Array.from(multihash)) + return multiformats.CID.create(version, codec, Uint8Array.from(multihash)) } async function readBlockHead (multiformats, reader) { @@ -159,7 +159,7 @@ function Encoder (multiformats, writer) { } const header = await multiformats.encode({ version: 1, roots }, 'dag-cbor') - await writer(Buffer.from(varint.encode(header.length))) + await writer(new Uint8Array(varint.encode(header.length))) await writer(header) }, @@ -167,8 +167,8 @@ function Encoder (multiformats, writer) { if (!isBlock(block)) { throw new TypeError('Block list must be of type { cid, binary }') } - await writer(Buffer.from(varint.encode(block.cid.buffer.length + block.binary.length))) - await writer(block.cid.buffer) + await writer(new Uint8Array(varint.encode(block.cid.bytes.length + block.binary.length))) + await writer(block.cid.bytes) await writer(block.binary) } } @@ -185,8 +185,8 @@ async function encode (multiformats, writer, roots, blocks) { /** * @name Car.decodeBuffer * @description - * Decode a `Buffer` representation of a Content ARchive (CAR) into an in-memory - * representation: + * Decode a `Uint8Array` representation of a Content ARchive (CAR) into an + * in-memory representation: * * `{ version, roots[], blocks[] }`. * @@ -202,7 +202,7 @@ async function encode (multiformats, writer, roots, blocks) { * @memberof Car * @static * @async - * @param {Buffer} buf the contents of a CAR + * @param {Uint8Array} buf the contents of a CAR * @returns {Car} an in-memory representation of a CAR file: * `{ version, roots[], blocks[] }`. */ @@ -223,7 +223,7 @@ function BufferDecoder (buf) { * @name Car.encodeBuffer * @description * Encode a set of IPLD blocks of the form `{ cid, binary }` in CAR format, - * returning it as a single `Buffer`. + * returning it as a single `Uint8Array` * @function * @memberof Car * @static @@ -233,14 +233,20 @@ function BufferDecoder (buf) { * blocks. * @param {object[]} blocks an array of IPLD blocks of the form `{ cid, binary }` * to append to the archive. - * @returns {Buffer} a `Buffer` representing the created archive. + * @returns {Uint8Array} a binary representation of the created archive. */ async function encodeBuffer (multiformats, roots, blocks) { const bl = [] const writer = (buf) => { bl.push(buf) } await encode(multiformats, writer, roots, blocks) - return Buffer.concat(bl) + const ret = new Uint8Array(bl.reduce((p, c) => p + c.length, 0)) + let off = 0 + for (const b of bl) { + ret.set(b, off) + off += b.length + } + return ret } export { encode, Encoder, decode, Decoder, decodeBuffer, encodeBuffer } diff --git a/lib/coding.js b/lib/coding.js index 1aa3d5d..334d6fa 100644 --- a/lib/coding.js +++ b/lib/coding.js @@ -1,10 +1,15 @@ -import { promises as fs, createWriteStream } from 'fs' +import fsCb from 'fs' import { promisify } from 'util' -import { PassThrough, pipeline } from 'stream' +import stream from 'readable-stream' import { Encoder, Decoder, encode, decode, encodeBuffer, decodeBuffer } from './coding-browser.js' +const { PassThrough, pipeline } = stream const pipelineAsync = promisify(pipeline) -fs.createWriteStream = createWriteStream +let fs +if (!process.browser) { + fs = fsCb.promises + fs.createWriteStream = fsCb.createWriteStream +} // reusable reader for streams and files, we just need a way to read an // additional chunk (of some undetermined size) and a way to close the @@ -13,7 +18,7 @@ function chunkReader (readChunk, closer) { let pos = 0 let have = 0 let offset = 0 - let currentChunk = Buffer.alloc(0) + let currentChunk = new Uint8Array(0) const read = async (length) => { have = currentChunk.length - offset @@ -33,7 +38,12 @@ function chunkReader (readChunk, closer) { } have += chunk.length } - currentChunk = Buffer.concat(bufa) + currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0)) + let off = 0 + for (const b of bufa) { + currentChunk.set(b, off) + off += b.length + } offset = 0 } @@ -77,7 +87,7 @@ function streamReader (stream) { async function readChunk () { const next = await iterator.next() if (next.done) { - return Buffer.alloc(0) + return new Uint8Array(0) } return next.value } @@ -86,14 +96,18 @@ function streamReader (stream) { } async function fileReader (file, options) { + /* c8 ignore next 3 */ + if (!fs) { + throw new Error('fileReader() not supported in a browser environment') + } const fd = await fs.open(file, 'r') const bufferSize = typeof options === 'object' && typeof options.bufferSize === 'number' ? options.bufferSize : 1024 - const readerChunk = Buffer.alloc(bufferSize) + const readerChunk = new Uint8Array(bufferSize) async function readChunk () { const { bytesRead } = await fd.read(readerChunk, 0, readerChunk.length) if (!bytesRead) { - return Buffer.alloc(0) + return new Uint8Array(0) } return Uint8Array.prototype.slice.call(readerChunk, 0, bytesRead) } @@ -190,7 +204,11 @@ function StreamDecoder (multiformats, stream) { * to append to the archive. */ -function encodeFile (multiformats, file, roots, blocks) { +async function encodeFile (multiformats, file, roots, blocks) { + /* c8 ignore next 3 */ + if (!fs) { + throw new Error('encodeFile() not supported in a browser environment') + } return pipelineAsync(encodeStream(multiformats, roots, blocks), fs.createWriteStream(file)) } diff --git a/lib/raw.js b/lib/raw.js index 31fbbb5..e997558 100644 --- a/lib/raw.js +++ b/lib/raw.js @@ -1,7 +1,10 @@ import { promisify } from 'util' import fs from 'fs' import { StreamDecoder, FileDecoder } from './coding.js' -fs.read = promisify(fs.read) + +if (!process.browser) { + fs.read = promisify(fs.read) +} /** * @name CarDatastore.indexer @@ -106,8 +109,12 @@ async function indexer (multiformats, inp) { * @returns {object} an IPLD block of the form `{ cid, binary }`. */ async function readRaw (fd, blockIndex) { + /* c8 ignore next 3 */ + if (process.browser) { + throw new Error('readRaw() not supported in a browser environment') + } const { cid, blockLength, blockOffset } = blockIndex - const binary = Buffer.alloc(blockLength) + const binary = new Uint8Array(blockLength) let read if (typeof fd === 'number') { read = (await fs.read(fd, binary, 0, blockLength, blockOffset)).bytesRead diff --git a/lib/util.js b/lib/util.js index e5e5196..44d93bd 100644 --- a/lib/util.js +++ b/lib/util.js @@ -5,7 +5,7 @@ function isBlock (block) { function isCID (key) { return typeof key === 'object' && - isBuffer(key.buffer) && + isBuffer(key.bytes) && isBuffer(key.multihash) && (key.version === 0 || key.version === 1) && typeof key.code === 'number' @@ -13,15 +13,13 @@ function isCID (key) { } function isBuffer (b) { - return (Buffer.isBuffer(b) || ( - b instanceof Uint8Array && - b.constructor.name === 'Uint8Array')) + return b instanceof ArrayBuffer || ArrayBuffer.isView(b) } function toKey (multiformats, key, method) { if (!isCID(key)) { try { - key = new multiformats.CID(key.toString()) + key = multiformats.CID.from(key.toString()) } catch (e) { throw new TypeError(`${method}() only accepts CIDs or CID strings`) } diff --git a/package.json b/package.json index 6a8d851..3351877 100644 --- a/package.json +++ b/package.json @@ -4,17 +4,17 @@ "description": "Content ARchive format reader and writer", "main": "car.js", "type": "module", - "browser": "./dist/cjs-browser/car-browser.js", + "browser": "./dist/cjs/car-browser.js", "directories": { "test": "test" }, "scripts": { - "build": "limbo test/test-*.js && cp test/go.car dist/cjs-browser/test/ && cp test/go.car dist/cjs-node/test/", + "build": "npm_config_yes=true ipjs build --tests", "prepublishOnly": "npm run build", "lint": "standard", - "test:cjs": "rm -rf dist && npm run build && mocha dist/cjs-node/test/test-*.cjs && npm run test:cjs:browser", + "test:cjs": "rm -rf dist && npm run build && cp test/go.car dist/cjs/node-test/ && mocha dist/cjs/node-test/test-*.js && npm run test:cjs:browser", "test:node": "hundreds mocha test/test-*.js", - "test:cjs:browser": "polendina --cleanup dist/cjs-browser/test/test-*.js", + "test:cjs:browser": "polendina --cleanup dist/cjs/browser-test/test-readbuffer.js dist/cjs/browser-test/test-query.js", "test": "npm run lint && npm run test:node && npm run test:cjs", "coverage": "c8 --reporter=html --reporter=text mocha test/test-*.js && npx st -d coverage -p 8888", "docs": "jsdoc4readme --readme *.js lib/raw.js" @@ -35,36 +35,30 @@ }, "homepage": "https://github.com/rvagg/js-datastore-car#readme", "devDependencies": { - "@ipld/block": "^6.0.1", - "@ipld/dag-cbor": "^1.1.4", - "@mikeal/limbo": "0.0.1", + "@ipld/block": "^6.0.3", + "@ipld/dag-cbor": "^1.1.10", "assert": "^2.0.0", - "bl": "^4.0.2", + "bl": "^4.0.3", "cids": "^1.0.0", "garbage": "0.0.0", - "hundreds": "0.0.7", + "hundreds": "0.0.8", + "ipjs": "^3.1.11", "ipld-dag-pb": "^0.20.0", "jsdoc4readme": "^1.3.0", - "mocha": "^8.1.1", + "mocha": "^8.1.2", + "multiformats": "^3.0.3", + "polendina": "^1.1.0", "standard": "^14.3.4" }, "dependencies": { "interface-datastore": "^2.0.0", - "multiformats": "^2.3.0", + "readable-stream": "^3.6.0", "varint": "^5.0.0" }, "exports": { ".": { "import": "./car.js", - "require": "./dist/cjs-node/car.cjs" - }, - "./car.js": { - "import": "./car.js", - "require": "./dist/cjs-node/car.cjs" - }, - "./car-browser.js": { - "import": "./car-browser.js", - "require": "./dist/cjs-node/car-browser.cjs" + "browser": "./car-browser.js" } } } diff --git a/test/fixture-data.js b/test/fixture-data.js index d0e36fb..1d8bbba 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -1,10 +1,10 @@ import assert from 'assert' import IpldDagPb from 'ipld-dag-pb' import CID from 'cids' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' +import base58 from 'multiformats/bases/base58' const { DAGNode, DAGLink, util: pbUtil } = IpldDagPb @@ -17,7 +17,7 @@ const cborBlocks = [] let allBlocks let allBlocksFlattened -const acid = new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm') +const acid = multiformats.CID.from('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm') function toCBORStruct (name, link) { return { name, link } @@ -25,26 +25,26 @@ function toCBORStruct (name, link) { async function toPbBlock (pnd) { const binary = pbUtil.serialize(pnd) - const cid = new multiformats.CID((await pbUtil.cid(binary, { cidVersion: 0 })).toString()) + const cid = multiformats.CID.from((await pbUtil.cid(binary, { cidVersion: 0 })).toString()) return { cid, binary } } async function toCborBlock (cb) { const binary = await multiformats.encode(cb, 'dag-cbor') const mh = await multiformats.multihash.hash(binary, 'sha2-256') - const cid = new multiformats.CID(1, multiformats.get('dag-cbor').code, mh) + const cid = multiformats.CID.create(1, multiformats.get('dag-cbor').code, mh) return { cid, binary } } async function toRawBlock (binary) { const mh = await multiformats.multihash.hash(binary, 'sha2-256') - const cid = new multiformats.CID(1, multiformats.get('raw').code, mh) + const cid = multiformats.CID.create(1, multiformats.get('raw').code, mh) return { cid, binary } } async function makeData () { if (!rawBlocks) { - rawBlocks = await Promise.all('aaaa bbbb cccc zzzz'.split(' ').map((s) => toRawBlock(Buffer.from(s)))) + rawBlocks = await Promise.all('aaaa bbbb cccc zzzz'.split(' ').map((s) => toRawBlock(new TextEncoder().encode(s)))) const pnd1 = new DAGNode(null, [ new DAGLink('cat', rawBlocks[0].binary.byteLength, new CID(rawBlocks[0].cid.toString())) @@ -103,8 +103,8 @@ async function verifyDecoded (decoded, singleRoot) { const index = expectedCids.indexOf(cid.toString()) assert.ok(index >= 0, 'got expected block') assert.strictEqual( - Buffer.from(expectedBlocks[index].binary).toString('hex'), - Buffer.from(block.binary).toString('hex'), + toHex(expectedBlocks[index].binary), + toHex(block.binary), 'got expected block content') expectedBlocks.splice(index, 1) expectedCids.splice(index, 1) @@ -138,11 +138,11 @@ async function verifyHas (carDs, modified) { } // not a block we have - await verifyHasnt((await toRawBlock(Buffer.from('dddd'))).cid, 'dddd') + await verifyHasnt((await toRawBlock(new TextEncoder().encode('dddd'))).cid, 'dddd') } function compareBlockData (actual, expected, id) { - assert.strictEqual(Buffer.from(actual).toString('hex'), Buffer.from(expected).toString('hex'), `comparing block as hex ${id}`) + assert.strictEqual(toHex(actual), toHex(expected), `comparing block as hex ${id}`) } async function verifyBlocks (carDs, modified) { @@ -185,6 +185,14 @@ async function verifyRoots (carDs, modified) { assert.deepStrictEqual((await carDs.getRoots()).map((c) => c.toString()), expected) } -const car = Buffer.from('63a265726f6f747382d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8bd82a5825000171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365b6776657273696f6e01280155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b461616161280155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e646877626262622801551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a2745163636363511220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0122d0a240155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4120363617418048001122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a122d0a240155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e6468771203646f671804122d0a221220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d01205666972737418338301122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de122e0a2401551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a274511204626561721804122f0a22122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a12067365636f6e641895015b01711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8ba2646c696e6bd82a582300122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de646e616d6564626c6970360171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365ba2646c696e6bf6646e616d65656c696d626f', 'hex') +function toHex (b) { + return b.reduce((hex, byte) => hex + byte.toString(16).padStart(2, '0'), '') +} + +function fromHex (str) { + return new Uint8Array(str.match(/../g).map(b => parseInt(b, 16))) +} + +const car = fromHex('63a265726f6f747382d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8bd82a5825000171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365b6776657273696f6e01280155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b461616161280155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e646877626262622801551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a2745163636363511220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0122d0a240155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4120363617418048001122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a122d0a240155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e6468771203646f671804122d0a221220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d01205666972737418338301122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de122e0a2401551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a274511204626561721804122f0a22122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a12067365636f6e641895015b01711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8ba2646c696e6bd82a582300122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de646e616d6564626c6970360171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365ba2646c696e6bf6646e616d65656c696d626f') -export { makeData, verifyBlocks, verifyHas, verifyRoots, acid, compareBlockData, verifyDecoded, car } +export { makeData, verifyBlocks, verifyHas, verifyRoots, acid, compareBlockData, verifyDecoded, toHex, fromHex, car } diff --git a/test/test-complete-graph.js b/test/test-complete-graph.js index a5a9ec0..feffe33 100644 --- a/test/test-complete-graph.js +++ b/test/test-complete-graph.js @@ -1,20 +1,20 @@ /* eslint-env mocha */ import assert from 'assert' -import multiformats from 'multiformats/basics.js' -import { PassThrough } from 'stream' +import multiformats from 'multiformats/basics' +import stream from 'readable-stream' import IpldBlock from '@ipld/block' -import Car from '../car.js' +import Car from 'datastore-car' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' +import base58 from 'multiformats/bases/base58' multiformats.add(dagCbor) multiformats.multibase.add(base58) const { writeStream, readBuffer, completeGraph } = Car(multiformats) const Block = IpldBlock(multiformats) - const same = assert.deepStrictEqual +const { PassThrough } = stream function all (car) { const _traverse = async function * (link, seen = new Set()) { @@ -52,14 +52,20 @@ async function concat (stream) { for await (const buffer of stream) { buffers.push(buffer) } - return Buffer.concat(buffers) + const ret = new Uint8Array(buffers.reduce((p, c) => p + c.length, 0)) + let off = 0 + for (const b of buffers) { + ret.set(b, off) + off += b.length + } + return ret } describe('Create car for full graph', () => { it('small graph', async () => { const leaf1 = Block.encoder({ hello: 'world' }, 'dag-cbor') const leaf2 = Block.encoder({ test: 1 }, 'dag-cbor') - const raw = Block.encoder(Buffer.from('test'), 'raw') + const raw = Block.encoder(new TextEncoder().encode('test'), 'raw') const root = Block.encoder( { one: await leaf1.cid(), diff --git a/test/test-decode.js b/test/test-decode.js index e06e2f3..de79d2d 100644 --- a/test/test-decode.js +++ b/test/test-decode.js @@ -4,7 +4,7 @@ import assert from 'assert' import { promisify } from 'util' import path from 'path' import fs from 'fs' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { makeData, verifyDecoded } from './fixture-data.js' import * as coding from '../lib/coding.js' import dagCbor from '@ipld/dag-cbor' @@ -44,11 +44,11 @@ describe('Decode', () => { // truncated await assert.rejects(coding.decodeBuffer(multiformats, buf.slice(0, buf.length - 10)), { name: 'Error', - message: 'Unexpected end of Buffer' + message: 'Unexpected end of data' }) // cid v0 - const buf2 = Buffer.alloc(buf.length) + const buf2 = new Uint8Array(buf.length) buf.copy(buf2) buf2[101] = 0 // first block's CID await assert.rejects(coding.decodeBuffer(multiformats, buf2), { diff --git a/test/test-encode.js b/test/test-encode.js index 80455d9..eb2e3c7 100644 --- a/test/test-encode.js +++ b/test/test-encode.js @@ -4,7 +4,7 @@ import assert from 'assert' import { promisify } from 'util' import fs from 'fs' import bl from 'bl' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { verifyDecoded, makeData } from './fixture-data.js' import * as coding from '../lib/coding.js' import dagCbor from '@ipld/dag-cbor' diff --git a/test/test-errors.js b/test/test-errors.js index 8e38f1b..3e02044 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -3,10 +3,10 @@ import assert from 'assert' import { promises as fs } from 'fs' import path from 'path' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { acid, car } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' -import Car from '../car.js' +import Car from 'datastore-car' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -56,8 +56,8 @@ describe('Errors', () => { it('bad puts', async () => { const carDs = await writeStream(fs.createWriteStream('test.car')) - await assert.rejects(carDs.put(acid, 'blip')) // not a Buffer value - await assert.rejects(carDs.put('blip', Buffer.from('blip')), /only accepts CIDs or CID strings/) // not a CID key + await assert.rejects(carDs.put(acid, 'blip')) // not a Uint8Array value + await assert.rejects(carDs.put('blip', new TextEncoder().encode('blip')), /only accepts CIDs or CID strings/) // not a CID key await carDs.close() }) diff --git a/test/test-large.js b/test/test-large.js index 756f19c..6f2a7bb 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -4,9 +4,9 @@ import assert from 'assert' import fs from 'fs' import garbage from 'garbage' import varint from 'varint' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import dagCbor from '@ipld/dag-cbor' -import Car from '../car.js' +import Car from 'datastore-car' import { promisify } from 'util' const unlink = promisify(fs.unlink) @@ -31,13 +31,13 @@ describe('Large CAR', () => { objects.push(obj) const binary = await multiformats.encode(obj, 'dag-cbor') const mh = await multiformats.multihash.hash(binary, 'sha2-256') - const cid = new multiformats.CID(1, multiformats.get('dag-cbor').code, mh) + const cid = multiformats.CID.create(1, multiformats.get('dag-cbor').code, mh) cids.push(cid.toString()) const blockLength = binary.length - let length = cid.buffer.length + blockLength + let length = cid.bytes.length + blockLength const lengthLength = varint.encode(length).length length += lengthLength - const blockOffset = offset + lengthLength + cid.buffer.length + const blockOffset = offset + lengthLength + cid.bytes.length expectedIndex.push({ cid, offset, length, blockOffset, blockLength }) offset += length await carDs.put(cid, binary) diff --git a/test/test-query.js b/test/test-query.js index cb45f5f..4ea7856 100644 --- a/test/test-query.js +++ b/test/test-query.js @@ -2,11 +2,11 @@ import assert from 'assert' import path from 'path' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { car, makeData, compareBlockData } from './fixture-data.js' -import Car from '../car.js' +import Car from 'datastore-car' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' +import base58 from 'multiformats/bases/base58' multiformats.add(dagCbor) multiformats.multibase.add(base58) @@ -84,7 +84,7 @@ for (const [factoryName, factoryFn] of factories) { const blocks_ = [] const cids = [] for (const block of blocks) { - const cid = new multiformats.CID(block.cid.toString()) + const cid = multiformats.CID.from(block.cid.toString()) if (cid.code === multiformats.get('dag-cbor').code) { cids.push(cid.toString()) blocks_.push(block) diff --git a/test/test-raw.js b/test/test-raw.js index 2421d1f..6eb0edd 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -4,11 +4,11 @@ import assert from 'assert' import { promisify } from 'util' import path from 'path' import fs from 'fs' -import multiformats from 'multiformats/basics.js' -import { makeData } from './fixture-data.js' -import Car from '../car.js' +import multiformats from 'multiformats/basics' +import { makeData, toHex } from './fixture-data.js' +import Car from 'datastore-car' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' +import base58 from 'multiformats/bases/base58' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -23,18 +23,18 @@ const { indexer, readRaw } = Car(multiformats) describe('Raw', () => { const expectedRoots = [ - new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), - new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm') + multiformats.CID.from('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), + multiformats.CID.from('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm') ] const expectedIndex = [ - { cid: new multiformats.CID('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), offset: 100, length: 92, blockOffset: 137, blockLength: 55 }, - { cid: new multiformats.CID('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), offset: 192, length: 133, blockOffset: 228, blockLength: 97 }, - { cid: new multiformats.CID('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), offset: 325, length: 41, blockOffset: 362, blockLength: 4 }, - { cid: new multiformats.CID('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), offset: 366, length: 130, blockOffset: 402, blockLength: 94 }, - { cid: new multiformats.CID('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), offset: 496, length: 41, blockOffset: 533, blockLength: 4 }, - { cid: new multiformats.CID('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), offset: 537, length: 82, blockOffset: 572, blockLength: 47 }, - { cid: new multiformats.CID('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), offset: 619, length: 41, blockOffset: 656, blockLength: 4 }, - { cid: new multiformats.CID('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), offset: 660, length: 55, blockOffset: 697, blockLength: 18 } + { cid: multiformats.CID.from('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), offset: 100, length: 92, blockOffset: 137, blockLength: 55 }, + { cid: multiformats.CID.from('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), offset: 192, length: 133, blockOffset: 228, blockLength: 97 }, + { cid: multiformats.CID.from('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), offset: 325, length: 41, blockOffset: 362, blockLength: 4 }, + { cid: multiformats.CID.from('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), offset: 366, length: 130, blockOffset: 402, blockLength: 94 }, + { cid: multiformats.CID.from('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), offset: 496, length: 41, blockOffset: 533, blockLength: 4 }, + { cid: multiformats.CID.from('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), offset: 537, length: 82, blockOffset: 572, blockLength: 47 }, + { cid: multiformats.CID.from('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), offset: 619, length: 41, blockOffset: 656, blockLength: 4 }, + { cid: multiformats.CID.from('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), offset: 660, length: 55, blockOffset: 697, blockLength: 18 } ] let allBlocksFlattened @@ -75,8 +75,8 @@ describe('Raw', () => { const index = expectedCids.indexOf(cid.toString()) assert.ok(index >= 0, 'got expected block') assert.strictEqual( - Buffer.from(expectedBlocks[index].binary).toString('hex'), - Buffer.from(block.binary).toString('hex'), + toHex(expectedBlocks[index].binary), + toHex(block.binary), 'got expected block content') expectedBlocks.splice(index, 1) expectedCids.splice(index, 1) diff --git a/test/test-readbuffer.js b/test/test-readbuffer.js index b444025..2a07c36 100644 --- a/test/test-readbuffer.js +++ b/test/test-readbuffer.js @@ -1,11 +1,11 @@ /* eslint-env mocha */ import assert from 'assert' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' -import Car from '../car.js' +import base58 from 'multiformats/bases/base58' +import Car from 'datastore-car' multiformats.add(dagCbor) multiformats.multibase.add(base58) @@ -47,11 +47,11 @@ describe('Read Buffer', () => { await carDs.close() }) - // when we instantiate from a Buffer, CarDatastore should be immutable + // when we instantiate from a Uint8Array, CarDatastore should be immutable it('immutable', async () => { const carDs = await readBuffer(car) - await assert.rejects(carDs.put(acid, Buffer.from('blip'))) - await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) + await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) await assert.rejects(carDs.setRoots(acid)) await assert.rejects(carDs.setRoots([acid])) }) diff --git a/test/test-readfile-writestream.js b/test/test-readfile-writestream.js index f927990..48f2b55 100644 --- a/test/test-readfile-writestream.js +++ b/test/test-readfile-writestream.js @@ -2,12 +2,12 @@ import assert from 'assert' import fs from 'fs' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' import { promisify } from 'util' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' -import Car from '../car.js' +import base58 from 'multiformats/bases/base58' +import Car from 'datastore-car' const unlink = promisify(fs.unlink) diff --git a/test/test-readfile.js b/test/test-readfile.js index a5640f4..5cb906b 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -2,11 +2,11 @@ import assert from 'assert' import path from 'path' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { acid, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' -import Car from '../car.js' +import base58 from 'multiformats/bases/base58' +import Car from 'datastore-car' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -43,8 +43,8 @@ describe('Read File', () => { // when we instantiate from a File, CarDatastore should be immutable it('immutable', async () => { const carDs = await readFileComplete(path.join(__dirname, 'go.car')) - await assert.rejects(carDs.put(acid, Buffer.from('blip'))) - await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) + await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) await assert.rejects(carDs.setRoots(acid)) await assert.rejects(carDs.setRoots([acid])) }) diff --git a/test/test-readindexed.js b/test/test-readindexed.js index c1b2e90..df6202b 100644 --- a/test/test-readindexed.js +++ b/test/test-readindexed.js @@ -2,11 +2,11 @@ import assert from 'assert' import path from 'path' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { acid, makeData, compareBlockData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' -import Car from '../car.js' +import base58 from 'multiformats/bases/base58' +import Car from 'datastore-car' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -44,8 +44,8 @@ describe('Read Indexed', () => { // when we instantiate from a Stream, CarDatastore should be immutable it('immutable', async () => { const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) - await assert.rejects(carDs.put(acid, Buffer.from('blip'))) - await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) + await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) await assert.rejects(carDs.setRoots(acid)) await assert.rejects(carDs.setRoots([acid])) }) diff --git a/test/test-readstream.js b/test/test-readstream.js index db981d8..a4fe5da 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -3,11 +3,11 @@ import assert from 'assert' import fs from 'fs' import path from 'path' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { acid, makeData, compareBlockData, verifyRoots } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' -import Car from '../car.js' +import base58 from 'multiformats/bases/base58' +import Car from 'datastore-car' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -90,8 +90,8 @@ describe('Read Stream', () => { await assert.rejects(carDs.get(allBlocks[0].cid)) // when we instantiate from a Stream, CarDatastore should be immutable - await assert.rejects(carDs.put(acid, Buffer.from('blip'))) - await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) + await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) await assert.rejects(carDs.setRoots(acid)) await assert.rejects(carDs.setRoots([acid])) }) diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index e9d549b..7ee6285 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -3,11 +3,11 @@ import assert from 'assert' import fs from 'fs' import path from 'path' -import multiformats from 'multiformats/basics.js' +import multiformats from 'multiformats/basics' import { acid, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' -import base58 from 'multiformats/bases/base58.js' -import Car from '../car.js' +import base58 from 'multiformats/bases/base58' +import Car from 'datastore-car' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -44,8 +44,8 @@ describe('Read Stream', () => { // when we instantiate from a Stream, CarDatastore should be immutable it('immutable', async () => { const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) - await assert.rejects(carDs.put(acid, Buffer.from('blip'))) - await assert.rejects(carDs.delete(acid, Buffer.from('blip'))) + await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) await assert.rejects(carDs.setRoots(acid)) await assert.rejects(carDs.setRoots([acid])) }) diff --git a/verify-car.js b/verify-car.js new file mode 100755 index 0000000..d635f54 --- /dev/null +++ b/verify-car.js @@ -0,0 +1,63 @@ +#!/usr/bin/env node + +// Verify a car file's blocks have round-tripishness +import fs from 'fs' +import multiformats from 'multiformats/basics' +import car from 'datastore-car' +import dagCbor from '@ipld/dag-cbor' +import blake from '../../multiformats/js-multihashing/src/blake.js' + +if (!process.argv[2]) { + console.log('Usage: verify-car.js ') + process.exit(1) +} + +multiformats.add(dagCbor) + +// for filecoin +const blake2b = (() => { + const table = {} + blake.addFuncs(table) + const B = table[0xb220] + return (bin) => B().update(bin).digest() +})() + +multiformats.multihash.add({ + name: 'blake2b-256', + encode: blake2b, + code: 0xb220 +}) + +const CarDatastore = car(multiformats) + +function toHex (b) { + return b.reduce((hex, byte) => hex + byte.toString(16).padStart(2, '0'), '') +} + +async function example () { + const inStream = fs.createReadStream(process.argv[2]) + const readDs = await CarDatastore.readStreaming(inStream) + let count = 0 + for await (const { key, value } of readDs.query()) { + const cid = multiformats.CID.from(key) + const obj = multiformats.decode(value, cid.code) + const reenc = multiformats.encode(obj, cid.code) + const hashFn = multiformats.multihash.decode(cid.multihash).code + const mh = await multiformats.multihash.hash(reenc, hashFn) + const recid = multiformats.CID.create(1, cid.code, mh) + if (!recid.equals(cid)) { + console.log(`\nMismatch: ${cid} <> ${recid}`) + console.log(`Orig:\n${toHex(value)}\nRe-encode:\n${toHex(reenc)}`) + } else { + if (count++ % 100 === 0) { + process.stdout.write('.') + } + } + } + await readDs.close() +} + +example().catch((err) => { + console.error(err) + process.exit(1) +}) From 45db3ba800959d97cf2eee66bed1189efb0bdd08 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Thu, 27 Aug 2020 19:25:29 +1000 Subject: [PATCH 21/23] assert -> chai --- package.json | 2 ++ test/fixture-data.js | 8 ++++++-- test/test-complete-graph.js | 4 +++- test/test-decode.js | 10 +++++++--- test/test-encode.js | 10 +++++++--- test/test-errors.js | 32 +++++++++++++++++-------------- test/test-large.js | 4 +++- test/test-query.js | 18 +++++------------ test/test-raw.js | 12 ++++++++---- test/test-readbuffer.js | 28 ++++++++++----------------- test/test-readfile-writestream.js | 10 +++++++--- test/test-readfile.js | 16 ++++++++++------ test/test-readindexed.js | 16 ++++++++++------ test/test-readstream.js | 18 ++++++++++------- test/test-readstreamcomplete.js | 16 ++++++++++------ 15 files changed, 117 insertions(+), 87 deletions(-) diff --git a/package.json b/package.json index 3351877..488adad 100644 --- a/package.json +++ b/package.json @@ -39,6 +39,8 @@ "@ipld/dag-cbor": "^1.1.10", "assert": "^2.0.0", "bl": "^4.0.3", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", "cids": "^1.0.0", "garbage": "0.0.0", "hundreds": "0.0.8", diff --git a/test/fixture-data.js b/test/fixture-data.js index 1d8bbba..c532cac 100644 --- a/test/fixture-data.js +++ b/test/fixture-data.js @@ -1,4 +1,5 @@ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import IpldDagPb from 'ipld-dag-pb' import CID from 'cids' import multiformats from 'multiformats/basics' @@ -6,6 +7,9 @@ import multiformats from 'multiformats/basics' import dagCbor from '@ipld/dag-cbor' import base58 from 'multiformats/bases/base58' +chai.use(chaiAsPromised) +const { assert } = chai + const { DAGNode, DAGLink, util: pbUtil } = IpldDagPb multiformats.add(dagCbor) @@ -162,7 +166,7 @@ async function verifyBlocks (carDs, modified) { const block = blocks[i] if (modified && i === 1) { - await assert.rejects(carDs.get(block.cid), { + await assert.isRejected(carDs.get(block.cid), { name: 'Error', message: 'Not Found' }) diff --git a/test/test-complete-graph.js b/test/test-complete-graph.js index feffe33..0465ba1 100644 --- a/test/test-complete-graph.js +++ b/test/test-complete-graph.js @@ -1,6 +1,6 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' import multiformats from 'multiformats/basics' import stream from 'readable-stream' @@ -9,6 +9,8 @@ import Car from 'datastore-car' import dagCbor from '@ipld/dag-cbor' import base58 from 'multiformats/bases/base58' +const { assert } = chai + multiformats.add(dagCbor) multiformats.multibase.add(base58) const { writeStream, readBuffer, completeGraph } = Car(multiformats) diff --git a/test/test-decode.js b/test/test-decode.js index de79d2d..d14287f 100644 --- a/test/test-decode.js +++ b/test/test-decode.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import { promisify } from 'util' import path from 'path' import fs from 'fs' @@ -10,6 +11,9 @@ import * as coding from '../lib/coding.js' import dagCbor from '@ipld/dag-cbor' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -42,7 +46,7 @@ describe('Decode', () => { it('decode errors', async () => { const buf = await fs.readFile(path.join(__dirname, 'go.car')) // truncated - await assert.rejects(coding.decodeBuffer(multiformats, buf.slice(0, buf.length - 10)), { + await assert.isRejected(coding.decodeBuffer(multiformats, buf.slice(0, buf.length - 10)), { name: 'Error', message: 'Unexpected end of data' }) @@ -51,7 +55,7 @@ describe('Decode', () => { const buf2 = new Uint8Array(buf.length) buf.copy(buf2) buf2[101] = 0 // first block's CID - await assert.rejects(coding.decodeBuffer(multiformats, buf2), { + await assert.isRejected(coding.decodeBuffer(multiformats, buf2), { name: 'Error', message: 'Unexpected CID version (0)' }) diff --git a/test/test-encode.js b/test/test-encode.js index eb2e3c7..41404cd 100644 --- a/test/test-encode.js +++ b/test/test-encode.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import { promisify } from 'util' import fs from 'fs' import bl from 'bl' @@ -9,6 +10,9 @@ import { verifyDecoded, makeData } from './fixture-data.js' import * as coding from '../lib/coding.js' import dagCbor from '@ipld/dag-cbor' +chai.use(chaiAsPromised) +const { assert } = chai + fs.readFile = promisify(fs.readFile) fs.unlink = promisify(fs.unlink) multiformats.add(dagCbor) @@ -65,12 +69,12 @@ describe('Encode', () => { }) it('encode errors', async () => { - await assert.rejects(coding.encodeBuffer(multiformats, ['blip'], allBlocks), { + await assert.isRejected(coding.encodeBuffer(multiformats, ['blip'], allBlocks), { name: 'TypeError', message: 'Roots must be CIDs' }) - await assert.rejects(coding.encodeBuffer(multiformats, roots, ['blip']), { + await assert.isRejected(coding.encodeBuffer(multiformats, roots, ['blip']), { name: 'TypeError', message: 'Block list must be of type { cid, binary }' }) diff --git a/test/test-errors.js b/test/test-errors.js index 3e02044..3f7d17c 100644 --- a/test/test-errors.js +++ b/test/test-errors.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import { promises as fs } from 'fs' import path from 'path' import multiformats from 'multiformats/basics' @@ -9,6 +10,9 @@ import dagCbor from '@ipld/dag-cbor' import Car from 'datastore-car' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -18,23 +22,23 @@ const { readBuffer, readFileComplete, writeStream } = Car(multiformats) describe('Errors', () => { it('unimplemented methods', async () => { const carDs = await readBuffer(car) - await assert.rejects(carDs.batch()) - await assert.rejects(carDs.batch('foo')) + await assert.isRejected(carDs.batch()) + await assert.isRejected(carDs.batch('foo')) await carDs.close() }) it('bad gets', async () => { const carDs = await readBuffer(car) - await assert.rejects(carDs.get('blip'), /only accepts CIDs or CID strings/) // not a CID key - await assert.rejects(carDs.get(null)) // not a CID key - await assert.doesNotReject(carDs.get(acid)) // sanity check + await assert.isRejected(carDs.get('blip'), /only accepts CIDs or CID strings/) // not a CID key + await assert.isRejected(carDs.get(null)) // not a CID key + await assert.isFulfilled(carDs.get(acid)) // sanity check await carDs.close() }) it('bad has\'', async () => { const carDs = await readBuffer(car) - await assert.rejects(carDs.has('blip'), /only accepts CIDs or CID strings/) // not a CID key - await assert.doesNotReject(carDs.has(acid)) // sanity check + await assert.isRejected(carDs.has('blip'), /only accepts CIDs or CID strings/) // not a CID key + await assert.isFulfilled(carDs.has(acid)) // sanity check await carDs.close() }) @@ -48,23 +52,23 @@ describe('Errors', () => { it('bad root type', async () => { const carDs = await writeStream(fs.createWriteStream('test.car')) - assert.rejects(carDs.setRoots('blip')) - assert.rejects(carDs.setRoots(['blip'])) - assert.rejects(carDs.setRoots([acid, false])) + assert.isRejected(carDs.setRoots('blip')) + assert.isRejected(carDs.setRoots(['blip'])) + assert.isRejected(carDs.setRoots([acid, false])) await carDs.close() }) it('bad puts', async () => { const carDs = await writeStream(fs.createWriteStream('test.car')) - await assert.rejects(carDs.put(acid, 'blip')) // not a Uint8Array value - await assert.rejects(carDs.put('blip', new TextEncoder().encode('blip')), /only accepts CIDs or CID strings/) // not a CID key + await assert.isRejected(carDs.put(acid, 'blip')) // not a Uint8Array value + await assert.isRejected(carDs.put('blip', new TextEncoder().encode('blip')), /only accepts CIDs or CID strings/) // not a CID key await carDs.close() }) it('truncated file', async () => { const data = await fs.readFile(path.join(__dirname, 'go.car')) await fs.writeFile('test.car', data.slice(0, data.length - 5)) - await assert.rejects(readFileComplete('test.car'), { + await assert.isRejected(readFileComplete('test.car'), { name: 'Error', message: 'Unexpected end of file' }) diff --git a/test/test-large.js b/test/test-large.js index 6f2a7bb..ee03764 100644 --- a/test/test-large.js +++ b/test/test-large.js @@ -1,6 +1,6 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' import fs from 'fs' import garbage from 'garbage' import varint from 'varint' @@ -9,6 +9,8 @@ import dagCbor from '@ipld/dag-cbor' import Car from 'datastore-car' import { promisify } from 'util' +const { assert } = chai + const unlink = promisify(fs.unlink) multiformats.add(dagCbor) diff --git a/test/test-query.js b/test/test-query.js index 4ea7856..926c179 100644 --- a/test/test-query.js +++ b/test/test-query.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import path from 'path' import multiformats from 'multiformats/basics' import { car, makeData, compareBlockData } from './fixture-data.js' @@ -8,22 +9,13 @@ import Car from 'datastore-car' import dagCbor from '@ipld/dag-cbor' import base58 from 'multiformats/bases/base58' +chai.use(chaiAsPromised) +const { assert } = chai + multiformats.add(dagCbor) multiformats.multibase.add(base58) const { readBuffer, readFile } = Car(multiformats) -if (!assert.rejects) { - // browser polyfill is incomplete - assert.rejects = async (promise, msg) => { - try { - await promise - } catch (err) { - return - } - assert.fail(`Promise did not reject: ${msg}`) - } -} - const factories = [['readBuffer', () => readBuffer(car)]] if (readFile) { // not in browser factories.push(['readFile', () => readFile(path.join(__dirname, 'go.car'))]) diff --git a/test/test-raw.js b/test/test-raw.js index 6eb0edd..30c696c 100644 --- a/test/test-raw.js +++ b/test/test-raw.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import { promisify } from 'util' import path from 'path' import fs from 'fs' @@ -11,6 +12,9 @@ import dagCbor from '@ipld/dag-cbor' import base58 from 'multiformats/bases/base58' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -97,12 +101,12 @@ describe('Raw', () => { }) it('errors', async () => { - await assert.rejects(indexer(), { + await assert.isRejected(indexer(), { name: 'TypeError', message: 'indexer() requires a file path or a ReadableStream' }) - await assert.rejects(readRaw(true, expectedIndex[0]), { + await assert.isRejected(readRaw(true, expectedIndex[0]), { name: 'TypeError', message: 'Bad fd' }) @@ -110,7 +114,7 @@ describe('Raw', () => { const badBlock = Object.assign({}, expectedIndex[expectedIndex.length - 1]) badBlock.blockLength += 10 const fd = await fs.open(path.join(__dirname, 'go.car')) - await assert.rejects(readRaw(fd, badBlock), { + await assert.isRejected(readRaw(fd, badBlock), { name: 'Error', message: `Failed to read entire block (${badBlock.blockLength - 10} instead of ${badBlock.blockLength})` }) diff --git a/test/test-readbuffer.js b/test/test-readbuffer.js index 2a07c36..8f1a23c 100644 --- a/test/test-readbuffer.js +++ b/test/test-readbuffer.js @@ -1,28 +1,20 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import multiformats from 'multiformats/basics' import { acid, car, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' import dagCbor from '@ipld/dag-cbor' import base58 from 'multiformats/bases/base58' import Car from 'datastore-car' +chai.use(chaiAsPromised) +const { assert } = chai + multiformats.add(dagCbor) multiformats.multibase.add(base58) const { readBuffer } = Car(multiformats) -if (!assert.rejects) { - // browser polyfill is incomplete - assert.rejects = async (promise, msg) => { - try { - await promise - } catch (err) { - return - } - assert.fail(`Promise did not reject: ${msg}`) - } -} - let rawBlocks describe('Read Buffer', () => { @@ -36,7 +28,7 @@ describe('Read Buffer', () => { await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist + await assert.isRejected(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) @@ -50,9 +42,9 @@ describe('Read Buffer', () => { // when we instantiate from a Uint8Array, CarDatastore should be immutable it('immutable', async () => { const carDs = await readBuffer(car) - await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.setRoots(acid)) - await assert.rejects(carDs.setRoots([acid])) + await assert.isRejected(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.delete(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.setRoots(acid)) + await assert.isRejected(carDs.setRoots([acid])) }) }) diff --git a/test/test-readfile-writestream.js b/test/test-readfile-writestream.js index 48f2b55..f517d2f 100644 --- a/test/test-readfile-writestream.js +++ b/test/test-readfile-writestream.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import fs from 'fs' import multiformats from 'multiformats/basics' import { makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' @@ -9,6 +10,9 @@ import dagCbor from '@ipld/dag-cbor' import base58 from 'multiformats/bases/base58' import Car from 'datastore-car' +chai.use(chaiAsPromised) +const { assert } = chai + const unlink = promisify(fs.unlink) multiformats.add(dagCbor) @@ -73,9 +77,9 @@ describe('Read File & Write Stream', () => { it('writeStream errors', async () => { const carDs = await writeStream(fs.createWriteStream('./test.car')) await carDs.put(cborBlocks[0].cid, cborBlocks[0].binary) - await assert.rejects(carDs.delete(cborBlocks[0].cid)) + await assert.isRejected(carDs.delete(cborBlocks[0].cid)) await carDs.close() - await assert.rejects(carDs.close()) + await assert.isRejected(carDs.close()) }) after(async () => { diff --git a/test/test-readfile.js b/test/test-readfile.js index 5cb906b..43fc806 100644 --- a/test/test-readfile.js +++ b/test/test-readfile.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import path from 'path' import multiformats from 'multiformats/basics' import { acid, makeData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' @@ -9,6 +10,9 @@ import base58 from 'multiformats/bases/base58' import Car from 'datastore-car' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -29,7 +33,7 @@ describe('Read File', () => { await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist + await assert.isRejected(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) @@ -43,9 +47,9 @@ describe('Read File', () => { // when we instantiate from a File, CarDatastore should be immutable it('immutable', async () => { const carDs = await readFileComplete(path.join(__dirname, 'go.car')) - await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.setRoots(acid)) - await assert.rejects(carDs.setRoots([acid])) + await assert.isRejected(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.delete(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.setRoots(acid)) + await assert.isRejected(carDs.setRoots([acid])) }) }) diff --git a/test/test-readindexed.js b/test/test-readindexed.js index df6202b..198a1da 100644 --- a/test/test-readindexed.js +++ b/test/test-readindexed.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import path from 'path' import multiformats from 'multiformats/basics' import { acid, makeData, compareBlockData, verifyBlocks, verifyHas, verifyRoots } from './fixture-data.js' @@ -9,6 +10,9 @@ import base58 from 'multiformats/bases/base58' import Car from 'datastore-car' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -30,7 +34,7 @@ describe('Read Indexed', () => { await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist + await assert.isRejected(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) @@ -44,10 +48,10 @@ describe('Read Indexed', () => { // when we instantiate from a Stream, CarDatastore should be immutable it('immutable', async () => { const carDs = await readFileIndexed(path.join(__dirname, 'go.car')) - await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.setRoots(acid)) - await assert.rejects(carDs.setRoots([acid])) + await assert.isRejected(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.delete(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.setRoots(acid)) + await assert.isRejected(carDs.setRoots([acid])) }) it('read existing (query())', async () => { diff --git a/test/test-readstream.js b/test/test-readstream.js index a4fe5da..ffe4c0c 100644 --- a/test/test-readstream.js +++ b/test/test-readstream.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import fs from 'fs' import path from 'path' import multiformats from 'multiformats/basics' @@ -10,6 +11,9 @@ import base58 from 'multiformats/bases/base58' import Car from 'datastore-car' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -86,13 +90,13 @@ describe('Read Stream', () => { it('errors & immutability', async () => { const carDs = await readStreaming(fs.createReadStream(path.join(__dirname, 'go.car'))) - await assert.rejects(carDs.has(allBlocks[0].cid)) - await assert.rejects(carDs.get(allBlocks[0].cid)) + await assert.isRejected(carDs.has(allBlocks[0].cid)) + await assert.isRejected(carDs.get(allBlocks[0].cid)) // when we instantiate from a Stream, CarDatastore should be immutable - await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.setRoots(acid)) - await assert.rejects(carDs.setRoots([acid])) + await assert.isRejected(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.delete(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.setRoots(acid)) + await assert.isRejected(carDs.setRoots([acid])) }) }) diff --git a/test/test-readstreamcomplete.js b/test/test-readstreamcomplete.js index 7ee6285..0810235 100644 --- a/test/test-readstreamcomplete.js +++ b/test/test-readstreamcomplete.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ -import assert from 'assert' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' import fs from 'fs' import path from 'path' import multiformats from 'multiformats/basics' @@ -10,6 +11,9 @@ import base58 from 'multiformats/bases/base58' import Car from 'datastore-car' import { fileURLToPath } from 'url' +chai.use(chaiAsPromised) +const { assert } = chai + const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) @@ -30,7 +34,7 @@ describe('Read Stream', () => { await verifyHas(carDs) await verifyBlocks(carDs) await verifyRoots(carDs) - await assert.rejects(carDs.get(rawBlocks[3].cid)) // doesn't exist + await assert.isRejected(carDs.get(rawBlocks[3].cid)) // doesn't exist await carDs.close() }) @@ -44,9 +48,9 @@ describe('Read Stream', () => { // when we instantiate from a Stream, CarDatastore should be immutable it('immutable', async () => { const carDs = await readStreamComplete(fs.createReadStream(path.join(__dirname, 'go.car'))) - await assert.rejects(carDs.put(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.delete(acid, new TextEncoder().encode('blip'))) - await assert.rejects(carDs.setRoots(acid)) - await assert.rejects(carDs.setRoots([acid])) + await assert.isRejected(carDs.put(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.delete(acid, new TextEncoder().encode('blip'))) + await assert.isRejected(carDs.setRoots(acid)) + await assert.isRejected(carDs.setRoots([acid])) }) }) From 803c7211676a2087434912ed1c7cc02f79552683 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Thu, 27 Aug 2020 19:33:49 +1000 Subject: [PATCH 22/23] refactor complete-graph test --- test/test-complete-graph.js | 37 +++++++------------------------------ 1 file changed, 7 insertions(+), 30 deletions(-) diff --git a/test/test-complete-graph.js b/test/test-complete-graph.js index 0465ba1..e21f43f 100644 --- a/test/test-complete-graph.js +++ b/test/test-complete-graph.js @@ -15,32 +15,8 @@ multiformats.add(dagCbor) multiformats.multibase.add(base58) const { writeStream, readBuffer, completeGraph } = Car(multiformats) const Block = IpldBlock(multiformats) -const same = assert.deepStrictEqual const { PassThrough } = stream -function all (car) { - const _traverse = async function * (link, seen = new Set()) { - link = await link - seen.add(link.toString('base64')) - const encoded = await car.get(link) - const block = Block.create(encoded, link) - yield block - const cid = await block.cid() - if (cid.code === 0x55) { - return - } - - for (const [, link] of block.reader().links()) { - if (seen.has(link.toString('base64'))) { - continue - } - yield * _traverse(link, seen) - } - } - - return _traverse(car.getRoots().then(([root]) => root)) -} - async function createGet (blocks) { const db = new Map() for (const block of blocks) { @@ -77,20 +53,21 @@ describe('Create car for full graph', () => { }, 'dag-cbor') const expected = [root, leaf1, leaf2, raw] + const get = await createGet(expected) const stream = new PassThrough() const car = await writeStream(stream) await completeGraph(await root.cid(), get, car) const data = await concat(stream) - const reader = await readBuffer(data) - const [readRoot, ...more] = await reader.getRoots() - same(more.length, 0) - assert.ok(readRoot.equals(await root.cid())) + const carDs = await readBuffer(data) + const roots = await carDs.getRoots() + assert.strictEqual(roots.length, 1) + assert.deepStrictEqual(roots[0], await root.cid()) - for await (const block of all(reader)) { + for await (const { key: cid } of carDs.query()) { const expectedBlock = expected.shift() - assert.ok((await expectedBlock.cid()).equals(await block.cid())) + assert.strictEqual(cid, (await expectedBlock.cid()).toString()) } }) }) From b428c6a2ef9e0e55b4a4be7e8ef2ed0397812f4e Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 28 Aug 2020 15:22:50 +1000 Subject: [PATCH 23/23] simplify CID handling and some type checking --- datastore.js | 19 +++++++++++------ lib/coding-browser.js | 13 +++++++----- lib/reader-browser.js | 2 +- lib/reader-writer-iface.js | 29 ++++++++++++++++++------- lib/reader.js | 12 +++++------ lib/util.js | 43 -------------------------------------- lib/writer-stream.js | 2 +- 7 files changed, 50 insertions(+), 70 deletions(-) delete mode 100644 lib/util.js diff --git a/datastore.js b/datastore.js index bceddc9..407e3c4 100644 --- a/datastore.js +++ b/datastore.js @@ -1,5 +1,4 @@ import interfaceDatastore from 'interface-datastore' -import { toKey } from './lib/util.js' const { filter, map } = interfaceDatastore.utils @@ -12,7 +11,7 @@ const { filter, map } = interfaceDatastore.utils */ class CarDatastore { constructor (multiformats, reader, writer) { - this._multiformats = multiformats + this.multiformats = multiformats this._reader = reader this._writer = writer } @@ -34,7 +33,7 @@ class CarDatastore { * @return {Uint8Array} the IPLD block data referenced by the CID. */ async get (key) { - key = toKey(this._multiformats, key, 'get') + key = toKey(this.multiformats, key, 'get') return this._reader.get(key) } @@ -55,7 +54,7 @@ class CarDatastore { * @return {boolean} indicating whether the key exists in this Datastore. */ async has (key) { - key = toKey(this._multiformats, key, 'has') + key = toKey(this.multiformats, key, 'has') return this._reader.has(key) } @@ -78,7 +77,7 @@ class CarDatastore { * `CID`. */ async put (key, value) { - key = toKey(this._multiformats, key, 'put') + key = toKey(this.multiformats, key, 'put') if (!(value instanceof Uint8Array)) { throw new TypeError('put() can only receive Uint8Arrays or Buffers') } @@ -97,7 +96,7 @@ class CarDatastore { * the block. */ async delete (key) { - key = toKey(this._multiformats, key, 'delete') + key = toKey(this.multiformats, key, 'delete') return this._writer.delete(key) } @@ -227,4 +226,12 @@ class CarDatastore { } } +function toKey (multiformats, key, method) { + try { + return multiformats.CID.from(key.toString()) + } catch (e) { + throw new TypeError(`${method}() only accepts CIDs or CID strings`) + } +} + export default CarDatastore diff --git a/lib/coding-browser.js b/lib/coding-browser.js index 575de77..1da311c 100644 --- a/lib/coding-browser.js +++ b/lib/coding-browser.js @@ -1,5 +1,4 @@ import varint from 'varint' -import { isBlock, isCID } from './util.js' const CIDV0_BYTES = { SHA2_256: 0x12, @@ -152,9 +151,10 @@ function Encoder (multiformats, writer) { if (!Array.isArray(roots)) { roots = [roots] } - for (const root of roots) { - if (!isCID(root)) { - throw new TypeError('Roots must be CIDs') + for (let i = 0; i < roots.length; i++) { + roots[i] = multiformats.CID.asCID(roots[i]) + if (!roots[i]) { + throw new TypeError('Roots must be CIDs') // or gently coercable to CIDs } } @@ -164,7 +164,10 @@ function Encoder (multiformats, writer) { }, async writeBlock (block) { - if (!isBlock(block)) { + if (typeof block !== 'object' || + !(block.binary instanceof Uint8Array) || + !block.cid || + !(block.cid.bytes instanceof Uint8Array)) { throw new TypeError('Block list must be of type { cid, binary }') } await writer(new Uint8Array(varint.encode(block.cid.bytes.length + block.binary.length))) diff --git a/lib/reader-browser.js b/lib/reader-browser.js index d1a76ba..82a2839 100644 --- a/lib/reader-browser.js +++ b/lib/reader-browser.js @@ -3,7 +3,7 @@ import { createFromDecoded } from './reader-writer-iface.js' async function createBufferReader (multiformats, data) { const decoded = await coding.decodeBuffer(multiformats, data) - return createFromDecoded(decoded) + return createFromDecoded(multiformats, decoded) } export { createBufferReader } diff --git a/lib/reader-writer-iface.js b/lib/reader-writer-iface.js index 255e3e8..44c6b02 100644 --- a/lib/reader-writer-iface.js +++ b/lib/reader-writer-iface.js @@ -1,9 +1,12 @@ import interfaceDatastore from 'interface-datastore' -import { verifyRoots } from './util.js' const { Errors } = interfaceDatastore class Reader { + constructor (multiformats) { + this.multiformats = multiformats + } + /* c8 ignore next 3 */ get () { throw new Error('Unimplemented method') @@ -27,6 +30,10 @@ class Reader { } class Writer { + constructor (multiformats) { + this.multiformats = multiformats + } + put () { throw new Error('Unimplemented method') } @@ -36,7 +43,13 @@ class Writer { } setRoots (roots) { - this.roots = verifyRoots(roots) + for (let i = 0; i < roots.length; i++) { + roots[i] = this.multiformats.CID.asCID(roots[i]) + if (!roots[i]) { + throw new TypeError('Roots must be CIDs') // or gently coercable to CIDs + } + } + this.roots = roots } close () {} @@ -49,8 +62,8 @@ class NoWriter extends Writer { } class DecodedReader extends Reader { - constructor (carData) { - super() + constructor (multiformats, carData) { + super(multiformats) this._carData = carData } @@ -76,8 +89,8 @@ class DecodedReader extends Reader { } class StreamingReader extends Reader { - constructor (decoder) { - super() + constructor (multiformats, decoder) { + super(multiformats) this._decoder = decoder } @@ -106,10 +119,10 @@ class StreamingReader extends Reader { } } -async function createFromDecoded (decoded) { +async function createFromDecoded (multiformats, decoded) { const cids = decoded.blocks.map((b) => b.cid) decoded.keys = cids.map((c) => c.toString()) - return new DecodedReader(decoded) + return new DecodedReader(multiformats, decoded) } export { createFromDecoded, Reader, Writer, StreamingReader, NoWriter } diff --git a/lib/reader.js b/lib/reader.js index 2338cf4..4020989 100644 --- a/lib/reader.js +++ b/lib/reader.js @@ -8,17 +8,17 @@ const { Errors } = interfaceDatastore async function createStreamCompleteReader (multiformats, stream) { const decoded = await coding.decodeStream(multiformats, stream) - return createFromDecoded(decoded) + return createFromDecoded(multiformats, decoded) } async function createStreamingReader (multiformats, stream) { const decoder = coding.StreamDecoder(multiformats, stream) - return new StreamingReader(decoder) + return new StreamingReader(multiformats, decoder) } async function createFileReader (multiformats, data) { const decoded = await coding.decodeFile(multiformats, data) - return createFromDecoded(decoded) + return createFromDecoded(multiformats, decoded) } async function createFileIndexedReader (multiformats, path) { @@ -30,12 +30,12 @@ async function createFileIndexedReader (multiformats, path) { index.set(cidStr, { cid, blockLength, blockOffset }) order.push(cidStr) } - return new IndexedReader(path, roots, index, order) + return new IndexedReader(multiformats, path, roots, index, order) } class IndexedReader extends Reader { - constructor (path, roots, index, order) { - super() + constructor (multiformats, path, roots, index, order) { + super(multiformats) this._path = path this._roots = roots this._index = index diff --git a/lib/util.js b/lib/util.js deleted file mode 100644 index 44d93bd..0000000 --- a/lib/util.js +++ /dev/null @@ -1,43 +0,0 @@ -// { cid, binary } -function isBlock (block) { - return typeof block === 'object' && isCID(block.cid) && isBuffer(block.binary) -} - -function isCID (key) { - return typeof key === 'object' && - isBuffer(key.bytes) && - isBuffer(key.multihash) && - (key.version === 0 || key.version === 1) && - typeof key.code === 'number' - // don't handle old style CIDs -} - -function isBuffer (b) { - return b instanceof ArrayBuffer || ArrayBuffer.isView(b) -} - -function toKey (multiformats, key, method) { - if (!isCID(key)) { - try { - key = multiformats.CID.from(key.toString()) - } catch (e) { - throw new TypeError(`${method}() only accepts CIDs or CID strings`) - } - } - - return key // cidToKey(key) -} - -function verifyRoots (roots) { - if (!Array.isArray(roots)) { - roots = [roots] - } - for (const root of roots) { - if (!isCID(root)) { - throw new TypeError('Roots may only be a CID or an array of CIDs') - } - } - return roots -} - -export { isBlock, isCID, toKey, verifyRoots } diff --git a/lib/writer-stream.js b/lib/writer-stream.js index 52deac4..0dd7ac3 100644 --- a/lib/writer-stream.js +++ b/lib/writer-stream.js @@ -3,7 +3,7 @@ import { Encoder, createStreamWriter } from './coding.js' class StreamWriter extends Writer { constructor (multiformats, outStream) { - super() + super(multiformats) this._outStream = outStream const writer = createStreamWriter(outStream) this._encoder = Encoder(multiformats, writer)