From d13d15f022a87d04a35f0f7822142f9cb898479c Mon Sep 17 00:00:00 2001 From: Volker Mische Date: Mon, 19 Jul 2021 13:04:24 +0200 Subject: [PATCH] feat: upgrade to the new multiformats (#3556) - Replaces the old [interface-ipld-format](https://github.com/ipld/interface-ipld-format) stack with the new [multiformats](https://github.com/multiformats/js-multiformats) stack. - The Block API takes/returns `Uint8Array`s instead of [ipld-block](https://github.com/ipld/js-ipld-block) objects BREAKING CHANGE: ipld-formats no longer supported, use multiformat BlockCodecs instead Co-authored-by: Rod Vagg Co-authored-by: achingbrain --- .github/workflows/bundlesize.yml | 2 +- .github/workflows/typecheck.yml | 2 +- .gitignore | 1 + README.md | 31 +- docs/MIGRATION-TO-ASYNC-AWAIT.md | 12 +- docs/core-api/BLOCK.md | 7 +- docs/core-api/DAG.md | 2 +- docs/core-api/OBJECT.md | 6 +- docs/core-api/PIN.md | 14 +- examples/browser-add-readable-stream/index.js | 13 +- examples/browser-exchange-files/package.json | 4 +- examples/browser-ipns-publish/index.js | 6 +- examples/browser-ipns-publish/package.json | 8 +- examples/circuit-relaying/package.json | 4 +- examples/custom-ipfs-repo/index.js | 126 +++-- examples/custom-ipfs-repo/package.json | 10 +- examples/custom-ipld-formats/daemon-node.js | 65 +-- .../custom-ipld-formats/in-process-node.js | 51 +- examples/custom-ipld-formats/package.json | 9 +- examples/custom-libp2p/package.json | 4 +- .../explore-ethereum-blockchain/CHANGELOG.md | 16 - .../explore-ethereum-blockchain/README.md | 58 -- .../eth-stuffs/block_302515 | Bin 530 -> 0 bytes .../eth-stuffs/block_302516 | Bin 530 -> 0 bytes .../eth-stuffs/block_302517 | Bin 527 -> 0 bytes .../eth-stuffs/state_000017_302516 | Bin 105 -> 0 bytes .../eth-stuffs/state_00001_302516 | 1 - .../eth-stuffs/state_0000_302516 | Bin 404 -> 0 bytes .../eth-stuffs/state_000_302516 | Bin 532 -> 0 bytes .../eth-stuffs/state_00_302516 | Bin 532 -> 0 bytes .../eth-stuffs/state_0_302516 | Bin 532 -> 0 bytes .../eth-stuffs/state_r_302516 | 1 - .../load-eth-stuffs.sh | 13 - .../explore-ethereum-blockchain/package.json | 19 - examples/explore-ethereum-blockchain/test.js | 55 -- .../http-client-browser-pubsub/package.json | 2 +- .../http-client-bundle-webpack/package.json | 2 +- examples/http-client-name-api/package.json | 6 +- .../{ => public}/index.html | 0 .../{ => public}/index.js | 0 examples/ipfs-101/package.json | 2 +- examples/ipfs-client-add-files/package.json | 2 +- examples/test-ipfs-example/package.json | 2 +- examples/traverse-ipld-graphs/README.md | 9 - examples/traverse-ipld-graphs/eth.js | 48 -- .../get-path-accross-formats.js | 10 +- examples/traverse-ipld-graphs/git.js | 66 --- examples/traverse-ipld-graphs/package.json | 6 +- examples/traverse-ipld-graphs/test.js | 9 - examples/traverse-ipld-graphs/tree.js | 40 -- examples/types-use-ipfs-from-ts/src/main.ts | 4 +- .../types-use-ipfs-from-typed-js/src/main.js | 4 +- .../package-list.json => package-list.json | 16 +- package.json | 3 +- packages/interface-ipfs-core/package.json | 36 +- packages/interface-ipfs-core/src/add-all.js | 16 +- packages/interface-ipfs-core/src/add.js | 30 +- .../src/bitswap/transfer.js | 42 +- .../src/bitswap/wantlist-for-peer.js | 3 +- .../src/bitswap/wantlist.js | 8 +- packages/interface-ipfs-core/src/block/get.js | 54 +- packages/interface-ipfs-core/src/block/put.js | 62 +-- packages/interface-ipfs-core/src/block/rm.js | 45 +- .../interface-ipfs-core/src/block/stat.js | 12 +- packages/interface-ipfs-core/src/cat.js | 33 +- packages/interface-ipfs-core/src/dag/get.js | 62 ++- packages/interface-ipfs-core/src/dag/index.js | 3 +- packages/interface-ipfs-core/src/dag/put.js | 60 +-- .../interface-ipfs-core/src/dag/resolve.js | 19 +- packages/interface-ipfs-core/src/dag/tree.js | 96 ---- .../interface-ipfs-core/src/dht/provide.js | 4 +- packages/interface-ipfs-core/src/dht/utils.js | 8 +- packages/interface-ipfs-core/src/files/cp.js | 14 +- packages/interface-ipfs-core/src/files/ls.js | 13 +- .../interface-ipfs-core/src/files/mkdir.js | 5 +- packages/interface-ipfs-core/src/files/rm.js | 2 +- .../interface-ipfs-core/src/files/stat.js | 21 +- .../interface-ipfs-core/src/files/write.js | 19 +- packages/interface-ipfs-core/src/get.js | 36 +- packages/interface-ipfs-core/src/ls.js | 6 +- .../src/miscellaneous/id.js | 2 - .../src/miscellaneous/resolve.js | 8 +- .../interface-ipfs-core/src/name/publish.js | 8 +- .../interface-ipfs-core/src/name/resolve.js | 12 +- .../interface-ipfs-core/src/object/data.js | 16 +- .../interface-ipfs-core/src/object/get.js | 80 ++- .../interface-ipfs-core/src/object/links.js | 56 +- .../interface-ipfs-core/src/object/new.js | 4 +- .../src/object/patch/add-link.js | 28 +- .../src/object/patch/rm-link.js | 11 +- .../interface-ipfs-core/src/object/put.js | 90 ++-- .../interface-ipfs-core/src/object/stat.js | 84 +-- .../interface-ipfs-core/src/object/utils.js | 15 - packages/interface-ipfs-core/src/pin/add.js | 19 +- .../interface-ipfs-core/src/pin/remote/ls.js | 10 +- .../src/pin/remote/rm-all.js | 10 +- .../interface-ipfs-core/src/pin/remote/rm.js | 10 +- packages/interface-ipfs-core/src/pin/utils.js | 12 +- .../interface-ipfs-core/src/refs-local.js | 13 +- packages/interface-ipfs-core/src/refs.js | 45 +- packages/interface-ipfs-core/src/repo/gc.js | 97 ++-- .../interface-ipfs-core/src/swarm/addrs.js | 4 +- .../interface-ipfs-core/src/swarm/peers.js | 4 +- .../src/utils/blockstore-adapter.js | 52 ++ .../interface-ipfs-core/src/utils/index.js | 9 +- .../interface-ipfs-core/src/utils/mocha.js | 9 +- .../src/utils/traverse-leaf-nodes.js | 8 +- packages/ipfs-cli/package.json | 36 +- packages/ipfs-cli/src/commands/add.js | 17 +- .../ipfs-cli/src/commands/bitswap/stat.js | 10 +- .../ipfs-cli/src/commands/bitswap/unwant.js | 11 +- .../ipfs-cli/src/commands/bitswap/wantlist.js | 9 +- packages/ipfs-cli/src/commands/block/get.js | 4 +- packages/ipfs-cli/src/commands/block/put.js | 19 +- packages/ipfs-cli/src/commands/block/rm.js | 2 +- packages/ipfs-cli/src/commands/block/stat.js | 11 +- packages/ipfs-cli/src/commands/cid.js | 9 +- packages/ipfs-cli/src/commands/cid/base32.js | 36 ++ packages/ipfs-cli/src/commands/cid/bases.js | 40 ++ packages/ipfs-cli/src/commands/cid/codecs.js | 30 ++ packages/ipfs-cli/src/commands/cid/format.js | 226 ++++++++ packages/ipfs-cli/src/commands/cid/hashes.js | 30 ++ packages/ipfs-cli/src/commands/dag/get.js | 22 +- packages/ipfs-cli/src/commands/dag/put.js | 29 +- .../src/commands/dht/find-providers.js | 2 +- packages/ipfs-cli/src/commands/dht/get.js | 2 +- packages/ipfs-cli/src/commands/dht/provide.js | 2 +- packages/ipfs-cli/src/commands/files/chmod.js | 2 +- packages/ipfs-cli/src/commands/files/cp.js | 2 +- packages/ipfs-cli/src/commands/files/flush.js | 13 +- packages/ipfs-cli/src/commands/files/ls.js | 9 +- packages/ipfs-cli/src/commands/files/mkdir.js | 4 +- packages/ipfs-cli/src/commands/files/mv.js | 4 +- packages/ipfs-cli/src/commands/files/stat.js | 43 +- packages/ipfs-cli/src/commands/files/touch.js | 4 +- packages/ipfs-cli/src/commands/files/write.js | 4 +- packages/ipfs-cli/src/commands/init.js | 3 +- packages/ipfs-cli/src/commands/ls.js | 10 +- .../ipfs-cli/src/commands/name/publish.js | 2 +- packages/ipfs-cli/src/commands/object/data.js | 2 +- packages/ipfs-cli/src/commands/object/get.js | 23 +- .../ipfs-cli/src/commands/object/links.js | 11 +- packages/ipfs-cli/src/commands/object/new.js | 9 +- .../src/commands/object/patch/add-link.js | 35 +- .../src/commands/object/patch/append-data.js | 11 +- .../src/commands/object/patch/rm-link.js | 11 +- .../src/commands/object/patch/set-data.js | 12 +- packages/ipfs-cli/src/commands/object/put.js | 31 +- packages/ipfs-cli/src/commands/object/stat.js | 2 +- packages/ipfs-cli/src/commands/pin/add.js | 9 +- packages/ipfs-cli/src/commands/pin/ls.js | 11 +- packages/ipfs-cli/src/commands/pin/rm.js | 9 +- packages/ipfs-cli/src/commands/refs-local.js | 4 +- packages/ipfs-cli/src/commands/resolve.js | 5 +- packages/ipfs-cli/src/types.d.ts | 2 +- packages/ipfs-cli/src/utils.js | 15 +- packages/ipfs-cli/test/add.js | 101 ++-- packages/ipfs-cli/test/bitswap.js | 66 ++- packages/ipfs-cli/test/block.js | 68 +-- packages/ipfs-cli/test/cat.js | 10 +- packages/ipfs-cli/test/cid.js | 304 +++++++++++ packages/ipfs-cli/test/commands.js | 2 +- packages/ipfs-cli/test/dag.js | 107 ++-- packages/ipfs-cli/test/dht.js | 16 +- packages/ipfs-cli/test/files/flush.js | 20 +- packages/ipfs-cli/test/files/ls.js | 26 +- packages/ipfs-cli/test/files/stat.js | 30 +- packages/ipfs-cli/test/get.js | 4 +- packages/ipfs-cli/test/ls.js | 71 ++- packages/ipfs-cli/test/object.js | 243 ++++++--- packages/ipfs-cli/test/pin.js | 124 +++-- packages/ipfs-cli/test/refs-local.js | 4 +- packages/ipfs-cli/test/refs.js | 4 +- packages/ipfs-cli/test/repo.js | 4 +- packages/ipfs-cli/test/resolve.js | 6 +- packages/ipfs-cli/tsconfig.json | 6 - packages/ipfs-client/.aegir.js | 2 +- packages/ipfs-client/package.json | 2 +- packages/ipfs-client/src/index.js | 4 +- packages/ipfs-core-types/package.json | 10 +- .../ipfs-core-types/src/bitswap/index.d.ts | 14 +- packages/ipfs-core-types/src/block/index.d.ts | 42 +- .../ipfs-core-types/src/config/index.d.ts | 24 +- packages/ipfs-core-types/src/dag/index.d.ts | 60 +-- packages/ipfs-core-types/src/dht/index.d.ts | 74 +-- packages/ipfs-core-types/src/files/index.d.ts | 32 +- packages/ipfs-core-types/src/index.d.ts | 21 + packages/ipfs-core-types/src/name/index.d.ts | 16 +- .../src/name/pubsub/index.d.ts | 1 - .../ipfs-core-types/src/object/index.d.ts | 20 +- .../src/object/patch/index.d.ts | 4 +- packages/ipfs-core-types/src/pin/index.d.ts | 19 +- .../ipfs-core-types/src/pin/remote/index.d.ts | 2 +- .../src/pin/remote/service/index.d.ts | 1 + .../ipfs-core-types/src/pubsub/index.d.ts | 4 +- packages/ipfs-core-types/src/refs/index.d.ts | 7 +- packages/ipfs-core-types/src/repo/index.d.ts | 12 +- packages/ipfs-core-types/src/root.d.ts | 7 +- packages/ipfs-core-types/src/stats/index.d.ts | 13 +- packages/ipfs-core-types/src/swarm/index.d.ts | 3 - packages/ipfs-core-types/src/utils.d.ts | 10 +- packages/ipfs-core-utils/package.json | 14 +- packages/ipfs-core-utils/src/cid.js | 33 -- .../src/files/normalise-input/index.js | 3 +- .../files/normalise-input/normalise-input.js | 2 + packages/ipfs-core-utils/src/multibases.js | 88 +++ packages/ipfs-core-utils/src/multicodecs.js | 86 +++ packages/ipfs-core-utils/src/multihashes.js | 86 +++ .../src/pins/normalise-input.js | 10 +- .../ipfs-core-utils/src/to-cid-and-path.js | 8 +- packages/ipfs-core-utils/src/types.d.ts | 7 + .../test/pins/normalise-input.spec.js | 6 +- packages/ipfs-core-utils/tsconfig.json | 5 + packages/ipfs-core/.aegir.js | 2 +- packages/ipfs-core/package.json | 87 ++- packages/ipfs-core/src/block-storage.js | 150 ++++++ .../ipfs-core/src/components/add-all/index.js | 27 +- .../ipfs-core/src/components/bitswap/index.js | 2 +- .../ipfs-core/src/components/bitswap/stat.js | 2 +- .../src/components/bitswap/unwant.js | 8 - .../components/bitswap/wantlist-for-peer.js | 2 +- .../ipfs-core/src/components/block/get.js | 9 +- .../ipfs-core/src/components/block/index.js | 21 +- .../ipfs-core/src/components/block/put.js | 70 +-- packages/ipfs-core/src/components/block/rm.js | 26 +- .../ipfs-core/src/components/block/stat.js | 8 +- .../ipfs-core/src/components/block/utils.js | 19 +- .../ipfs-core/src/components/bootstrap/add.js | 2 +- .../src/components/bootstrap/clear.js | 2 +- .../src/components/bootstrap/index.js | 2 +- .../src/components/bootstrap/list.js | 2 +- .../src/components/bootstrap/reset.js | 2 +- .../ipfs-core/src/components/bootstrap/rm.js | 2 +- packages/ipfs-core/src/components/cat.js | 10 +- packages/ipfs-core/src/components/config.js | 2 +- packages/ipfs-core/src/components/dag/get.js | 22 +- .../ipfs-core/src/components/dag/index.js | 30 +- packages/ipfs-core/src/components/dag/put.js | 122 +---- .../ipfs-core/src/components/dag/resolve.js | 52 +- packages/ipfs-core/src/components/dag/tree.js | 33 -- packages/ipfs-core/src/components/dht.js | 17 +- .../ipfs-core/src/components/files/chmod.js | 68 ++- packages/ipfs-core/src/components/files/cp.js | 16 +- .../ipfs-core/src/components/files/index.js | 38 +- packages/ipfs-core/src/components/files/ls.js | 2 +- .../ipfs-core/src/components/files/mkdir.js | 19 +- packages/ipfs-core/src/components/files/mv.js | 5 +- .../ipfs-core/src/components/files/read.js | 2 +- packages/ipfs-core/src/components/files/rm.js | 5 +- .../ipfs-core/src/components/files/stat.js | 9 +- .../ipfs-core/src/components/files/touch.js | 63 ++- .../src/components/files/utils/add-link.js | 155 ++++-- .../src/components/files/utils/create-node.js | 31 +- .../src/components/files/utils/hamt-utils.js | 75 +-- .../src/components/files/utils/remove-link.js | 85 +-- .../src/components/files/utils/to-mfs-path.js | 30 +- .../src/components/files/utils/to-trail.js | 15 +- .../components/files/utils/update-mfs-root.js | 2 +- .../src/components/files/utils/update-tree.js | 17 +- .../components/files/utils/with-mfs-root.js | 21 +- .../ipfs-core/src/components/files/write.js | 34 +- packages/ipfs-core/src/components/gc-lock.js | 25 - packages/ipfs-core/src/components/get.js | 12 +- packages/ipfs-core/src/components/index.js | 103 ++-- packages/ipfs-core/src/components/ipld.js | 15 - packages/ipfs-core/src/components/ipns.js | 4 +- packages/ipfs-core/src/components/libp2p.js | 44 +- packages/ipfs-core/src/components/ls.js | 20 +- .../ipfs-core/src/components/name/index.js | 7 +- .../ipfs-core/src/components/name/publish.js | 7 +- .../ipfs-core/src/components/name/resolve.js | 11 +- .../ipfs-core/src/components/name/utils.js | 9 +- packages/ipfs-core/src/components/network.js | 26 +- .../ipfs-core/src/components/object/data.js | 8 +- .../ipfs-core/src/components/object/get.js | 25 +- .../ipfs-core/src/components/object/index.js | 24 +- .../ipfs-core/src/components/object/links.js | 50 +- .../ipfs-core/src/components/object/new.js | 25 +- .../src/components/object/patch/add-link.js | 20 +- .../components/object/patch/append-data.js | 22 +- .../src/components/object/patch/index.js | 18 +- .../src/components/object/patch/rm-link.js | 15 +- .../src/components/object/patch/set-data.js | 20 +- .../ipfs-core/src/components/object/put.js | 105 +--- .../ipfs-core/src/components/object/stat.js | 26 +- .../ipfs-core/src/components/pin/add-all.js | 22 +- packages/ipfs-core/src/components/pin/add.js | 4 +- .../ipfs-core/src/components/pin/index.js | 20 +- packages/ipfs-core/src/components/pin/ls.js | 28 +- .../src/components/pin/pin-manager.js | 351 ------------ .../ipfs-core/src/components/pin/rm-all.js | 19 +- packages/ipfs-core/src/components/ping.js | 2 +- .../ipfs-core/src/components/refs/index.js | 131 +++-- .../ipfs-core/src/components/refs/local.js | 2 +- packages/ipfs-core/src/components/repo/gc.js | 137 +---- .../ipfs-core/src/components/repo/index.js | 20 +- .../ipfs-core/src/components/repo/stat.js | 2 +- .../ipfs-core/src/components/repo/version.js | 2 +- packages/ipfs-core/src/components/resolve.js | 27 +- packages/ipfs-core/src/components/root.js | 12 +- packages/ipfs-core/src/components/start.js | 9 +- packages/ipfs-core/src/components/stats/bw.js | 2 +- .../ipfs-core/src/components/stats/index.js | 2 +- packages/ipfs-core/src/components/stop.js | 6 +- packages/ipfs-core/src/components/storage.js | 9 +- packages/ipfs-core/src/components/version.js | 2 +- packages/ipfs-core/src/index.js | 12 +- packages/ipfs-core/src/ipns/publisher.js | 4 +- packages/ipfs-core/src/ipns/resolver.js | 2 +- packages/ipfs-core/src/ipns/routing/config.js | 2 +- .../src/ipns/routing/offline-datastore.js | 2 +- .../src/ipns/routing/pubsub-datastore.js | 6 +- packages/ipfs-core/src/mfs-preload.js | 5 +- packages/ipfs-core/src/runtime/ipld.js | 50 -- .../runtime/libp2p-pubsub-routers-nodejs.js | 1 - .../ipfs-core/src/runtime/repo-browser.js | 18 +- packages/ipfs-core/src/runtime/repo-nodejs.js | 24 +- packages/ipfs-core/src/types.d.ts | 81 ++- packages/ipfs-core/src/utils.js | 187 +++++-- packages/ipfs-core/src/utils/service.js | 1 + packages/ipfs-core/test/block-storage.spec.js | 41 ++ packages/ipfs-core/test/create-node.spec.js | 21 +- packages/ipfs-core/test/exports.spec.js | 10 +- .../test/fixtures/planets/mercury/wiki.md | 12 - .../test/fixtures/planets/solar-system.md | 10 - packages/ipfs-core/test/init.spec.js | 8 +- packages/ipfs-core/test/ipld.spec.js | 28 +- packages/ipfs-core/test/mfs-preload.spec.js | 8 +- packages/ipfs-core/test/name.spec.js | 21 - packages/ipfs-core/test/node.js | 3 - packages/ipfs-core/test/preload.spec.js | 26 +- packages/ipfs-core/test/utils.js | 79 --- packages/ipfs-core/test/utils.spec.js | 75 +++ packages/ipfs-core/test/utils/codecs.js | 12 + .../ipfs-core/test/utils/create-backend.js | 19 + packages/ipfs-core/test/utils/create-node.js | 3 +- .../test/utils/create-repo-browser.js | 95 ---- .../test/utils/create-repo-nodejs.js | 49 -- packages/ipfs-core/test/utils/create-repo.js | 47 ++ .../test/utils/mock-preload-node-utils.js | 2 +- packages/ipfs-core/tsconfig.json | 3 + packages/ipfs-daemon/package.json | 15 +- packages/ipfs-daemon/src/index.js | 70 +-- packages/ipfs-daemon/tsconfig.json | 6 - packages/ipfs-grpc-client/.aegir.js | 2 +- packages/ipfs-grpc-client/package.json | 10 +- .../ipfs-grpc-client/src/core-api/add-all.js | 4 +- .../ipfs-grpc-client/src/core-api/files/ls.js | 4 +- packages/ipfs-grpc-server/package.json | 8 +- packages/ipfs-grpc-server/src/types.d.ts | 11 +- packages/ipfs-grpc-server/tsconfig.json | 5 + packages/ipfs-http-client/.aegir.js | 2 +- packages/ipfs-http-client/package.json | 26 +- packages/ipfs-http-client/src/add-all.js | 12 +- packages/ipfs-http-client/src/bitswap/stat.js | 4 +- .../ipfs-http-client/src/bitswap/unwant.js | 4 +- .../src/bitswap/wantlist-for-peer.js | 9 +- .../ipfs-http-client/src/bitswap/wantlist.js | 4 +- packages/ipfs-http-client/src/block/get.js | 7 +- packages/ipfs-http-client/src/block/put.js | 32 +- packages/ipfs-http-client/src/block/rm.js | 6 +- packages/ipfs-http-client/src/block/stat.js | 6 +- packages/ipfs-http-client/src/cat.js | 3 +- packages/ipfs-http-client/src/dag/get.js | 65 ++- packages/ipfs-http-client/src/dag/index.js | 10 +- packages/ipfs-http-client/src/dag/put.js | 95 ++-- packages/ipfs-http-client/src/dag/resolve.js | 4 +- packages/ipfs-http-client/src/dag/tree.js | 19 - .../ipfs-http-client/src/dht/find-provs.js | 3 +- packages/ipfs-http-client/src/dht/provide.js | 8 +- packages/ipfs-http-client/src/dht/put.js | 2 - packages/ipfs-http-client/src/dht/query.js | 4 +- packages/ipfs-http-client/src/files/cp.js | 9 +- packages/ipfs-http-client/src/files/flush.js | 4 +- packages/ipfs-http-client/src/files/ls.js | 8 +- packages/ipfs-http-client/src/files/mv.js | 3 +- packages/ipfs-http-client/src/files/stat.js | 8 +- packages/ipfs-http-client/src/files/write.js | 2 +- packages/ipfs-http-client/src/get.js | 4 +- packages/ipfs-http-client/src/index.js | 51 +- packages/ipfs-http-client/src/lib/core.js | 2 +- .../ipfs-http-client/src/lib/ipld-formats.js | 63 --- .../ipfs-http-client/src/lib/parse-mtime.js | 77 +++ packages/ipfs-http-client/src/lib/resolve.js | 67 +++ .../src/lib/to-url-search-params.js | 2 +- packages/ipfs-http-client/src/ls.js | 8 +- packages/ipfs-http-client/src/object/data.js | 4 +- packages/ipfs-http-client/src/object/get.js | 17 +- packages/ipfs-http-client/src/object/index.js | 5 +- packages/ipfs-http-client/src/object/links.js | 11 +- packages/ipfs-http-client/src/object/new.js | 4 +- .../src/object/patch/add-link.js | 7 +- .../src/object/patch/append-data.js | 6 +- .../src/object/patch/rm-link.js | 6 +- .../src/object/patch/set-data.js | 12 +- packages/ipfs-http-client/src/object/put.js | 100 +--- packages/ipfs-http-client/src/object/stat.js | 11 +- packages/ipfs-http-client/src/pin/add-all.js | 6 +- packages/ipfs-http-client/src/pin/ls.js | 4 +- .../ipfs-http-client/src/pin/remote/index.js | 8 +- .../src/pin/remote/service.js | 4 +- packages/ipfs-http-client/src/pin/rm-all.js | 6 +- packages/ipfs-http-client/src/refs/index.js | 9 +- packages/ipfs-http-client/src/repo/gc.js | 4 +- packages/ipfs-http-client/src/types.d.ts | 23 +- packages/ipfs-http-client/test/dag.spec.js | 63 ++- .../ipfs-http-client/test/exports.spec.js | 8 +- packages/ipfs-http-client/test/files.spec.js | 3 +- .../ipfs-http-client/test/utils/factory.js | 2 +- packages/ipfs-http-gateway/package.json | 16 +- packages/ipfs-http-gateway/src/index.js | 1 - .../src/resources/gateway.js | 11 +- .../ipfs-http-gateway/test/routes.spec.js | 79 +-- packages/ipfs-http-gateway/tsconfig.json | 5 +- packages/ipfs-http-server/package.json | 20 +- .../src/api/resources/bitswap.js | 21 +- .../src/api/resources/block.js | 44 +- .../ipfs-http-server/src/api/resources/dag.js | 47 +- .../ipfs-http-server/src/api/resources/dht.js | 4 +- .../src/api/resources/files-regular.js | 13 +- .../src/api/resources/files/flush.js | 7 +- .../src/api/resources/files/ls.js | 19 +- .../src/api/resources/files/stat.js | 6 +- .../src/api/resources/object.js | 240 ++++++--- .../ipfs-http-server/src/api/resources/pin.js | 23 +- .../src/api/resources/ping.js | 2 +- .../src/api/resources/resolve.js | 2 +- .../src/api/resources/stats.js | 2 +- packages/ipfs-http-server/src/index.js | 8 +- packages/ipfs-http-server/src/types.d.ts | 2 - packages/ipfs-http-server/src/utils/joi.js | 28 +- .../ipfs-http-server/test/inject/bitswap.js | 61 +-- .../ipfs-http-server/test/inject/block.js | 118 ++--- packages/ipfs-http-server/test/inject/dag.js | 105 +++- packages/ipfs-http-server/test/inject/dht.js | 16 +- .../ipfs-http-server/test/inject/files.js | 33 +- .../ipfs-http-server/test/inject/mfs/flush.js | 15 +- .../ipfs-http-server/test/inject/mfs/ls.js | 14 +- .../ipfs-http-server/test/inject/mfs/stat.js | 19 +- packages/ipfs-http-server/test/inject/name.js | 4 +- .../ipfs-http-server/test/inject/object.js | 500 ++++++++++-------- packages/ipfs-http-server/test/inject/pin.js | 84 ++- packages/ipfs-http-server/test/inject/ping.js | 9 - packages/ipfs-http-server/test/inject/repo.js | 6 +- .../ipfs-http-server/test/inject/resolve.js | 25 +- .../ipfs-http-server/test/inject/stats.js | 3 +- packages/ipfs-http-server/tsconfig.json | 4 +- .../ipfs-message-port-client/package.json | 6 +- .../ipfs-message-port-client/src/block.js | 17 +- .../src/client/query.js | 2 +- packages/ipfs-message-port-client/src/core.js | 6 +- packages/ipfs-message-port-client/src/dag.js | 19 +- .../ipfs-message-port-client/src/files.js | 4 +- packages/ipfs-message-port-protocol/README.md | 32 +- .../ipfs-message-port-protocol/package.json | 7 +- .../ipfs-message-port-protocol/src/block.js | 30 +- .../ipfs-message-port-protocol/src/cid.js | 28 +- .../ipfs-message-port-protocol/src/dag.js | 4 +- .../test/block.browser.js | 30 +- .../test/cid.browser.js | 14 +- .../test/cid.spec.js | 20 +- .../test/dag.browser.js | 16 +- .../test/dag.spec.js | 22 +- packages/ipfs-message-port-server/.aegir.js | 2 +- .../ipfs-message-port-server/package.json | 6 +- .../ipfs-message-port-server/src/block.js | 29 +- packages/ipfs-message-port-server/src/core.js | 2 +- packages/ipfs-message-port-server/src/dag.js | 32 +- .../test/basic.spec.js | 3 - .../test/transfer.spec.js | 7 +- .../ipfs-message-port-server/tsconfig.json | 4 +- packages/ipfs/.aegir.js | 2 +- packages/ipfs/package.json | 10 +- packages/ipfs/test/interface-http-go.js | 21 +- packages/ipfs/test/interface-http-js.js | 3 - packages/ipfs/test/utils/factory.js | 2 +- 476 files changed, 6057 insertions(+), 5885 deletions(-) delete mode 100644 examples/explore-ethereum-blockchain/CHANGELOG.md delete mode 100644 examples/explore-ethereum-blockchain/README.md delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/block_302515 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/block_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/block_302517 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_000017_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_00001_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_0000_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_000_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_00_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_0_302516 delete mode 100644 examples/explore-ethereum-blockchain/eth-stuffs/state_r_302516 delete mode 100755 examples/explore-ethereum-blockchain/load-eth-stuffs.sh delete mode 100644 examples/explore-ethereum-blockchain/package.json delete mode 100644 examples/explore-ethereum-blockchain/test.js rename examples/http-client-name-api/{ => public}/index.html (100%) rename examples/http-client-name-api/{ => public}/index.js (100%) delete mode 100644 examples/traverse-ipld-graphs/eth.js delete mode 100644 examples/traverse-ipld-graphs/git.js delete mode 100644 examples/traverse-ipld-graphs/tree.js rename packages/ipfs/package-list.json => package-list.json (76%) delete mode 100644 packages/interface-ipfs-core/src/dag/tree.js delete mode 100644 packages/interface-ipfs-core/src/object/utils.js create mode 100644 packages/interface-ipfs-core/src/utils/blockstore-adapter.js create mode 100644 packages/ipfs-cli/src/commands/cid/base32.js create mode 100644 packages/ipfs-cli/src/commands/cid/bases.js create mode 100644 packages/ipfs-cli/src/commands/cid/codecs.js create mode 100644 packages/ipfs-cli/src/commands/cid/format.js create mode 100644 packages/ipfs-cli/src/commands/cid/hashes.js create mode 100644 packages/ipfs-cli/test/cid.js delete mode 100644 packages/ipfs-core-utils/src/cid.js create mode 100644 packages/ipfs-core-utils/src/multibases.js create mode 100644 packages/ipfs-core-utils/src/multicodecs.js create mode 100644 packages/ipfs-core-utils/src/multihashes.js create mode 100644 packages/ipfs-core-utils/src/types.d.ts create mode 100644 packages/ipfs-core/src/block-storage.js delete mode 100644 packages/ipfs-core/src/components/dag/tree.js delete mode 100644 packages/ipfs-core/src/components/gc-lock.js delete mode 100644 packages/ipfs-core/src/components/ipld.js delete mode 100644 packages/ipfs-core/src/components/pin/pin-manager.js delete mode 100644 packages/ipfs-core/src/runtime/ipld.js create mode 100644 packages/ipfs-core/test/block-storage.spec.js delete mode 100644 packages/ipfs-core/test/fixtures/planets/mercury/wiki.md delete mode 100644 packages/ipfs-core/test/fixtures/planets/solar-system.md delete mode 100644 packages/ipfs-core/test/node.js delete mode 100644 packages/ipfs-core/test/utils.js create mode 100644 packages/ipfs-core/test/utils.spec.js create mode 100644 packages/ipfs-core/test/utils/codecs.js create mode 100644 packages/ipfs-core/test/utils/create-backend.js delete mode 100644 packages/ipfs-core/test/utils/create-repo-browser.js delete mode 100644 packages/ipfs-core/test/utils/create-repo-nodejs.js create mode 100644 packages/ipfs-core/test/utils/create-repo.js delete mode 100644 packages/ipfs-http-client/src/dag/tree.js delete mode 100644 packages/ipfs-http-client/src/lib/ipld-formats.js create mode 100644 packages/ipfs-http-client/src/lib/parse-mtime.js create mode 100644 packages/ipfs-http-client/src/lib/resolve.js diff --git a/.github/workflows/bundlesize.yml b/.github/workflows/bundlesize.yml index dad3e1ab7d..53dfdbf100 100644 --- a/.github/workflows/bundlesize.yml +++ b/.github/workflows/bundlesize.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md strategy: matrix: - node-version: [14.x] + node-version: [16.x] project: - packages/ipfs - packages/ipfs-core diff --git a/.github/workflows/typecheck.yml b/.github/workflows/typecheck.yml index b64158cba4..23105459f9 100644 --- a/.github/workflows/typecheck.yml +++ b/.github/workflows/typecheck.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + node-version: [16.x] project: - packages/ipfs - packages/ipfs-cli diff --git a/.gitignore b/.gitignore index 4da7986f91..58411cd23c 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ dist build bundle.js tsconfig-types.aegir.json +tsconfig-check.aegir.json .tsbuildinfo # Deployment files diff --git a/README.md b/README.md index 019fb749c6..6434402028 100644 --- a/README.md +++ b/README.md @@ -42,12 +42,15 @@ We've come a long way, but this project is still in Alpha, lots of development i ## Table of Contents - [Getting started](#getting-started) + - [Install as a CLI user](#install-as-a-cli-user) + - [Install as an application developer](#install-as-an-application-developer) - [Documentation](#documentation) - [Structure](#structure) +- [Packages](#packages) - [Want to hack on IPFS?](#want-to-hack-on-ipfs) - [License](#license) -## Getting Started +## Getting Started ### Install as a CLI user @@ -92,6 +95,7 @@ console.info(cid) ## Documentation +* [Concepts](https://docs.ipfs.io/concepts/) * [Config](./docs/CONFIG.md) * [Core API](./docs/core-api) * [Examples](./examples) @@ -102,9 +106,18 @@ console.info(cid) This project is broken into several modules, their purposes are: * [`/packages/interface-ipfs-core`](./packages/interface-ipfs-core) Tests to ensure adherence of an implementation to the spec -* [`/packages/ipfs`](./packages/ipfs) The core implementation +* [`/packages/ipfs`](./packages/ipfs) An aggregator module that bundles the core implementation, the CLI, HTTP API server and daemon +* [`/packages/ipfs-cli`](./packages/ipfs-cli) A CLI to the core implementation +* [`/packages/ipfs-core`](./packages/ipfs-core) The core implementation +* [`/packages/ipfs-core-types`](./packages/ipfs-core-types) Typescript definitions for the core API * [`/packages/ipfs-core-utils`](./packages/ipfs-core-utils) Helpers and utilities common to core and the HTTP RPC API client +* [`/packages/ipfs-daemon`](./packages/ipfs-daemon) Run js-IPFS as a background daemon +* [`/packages/ipfs-grpc-client`](./packages/ipfs-grpc-client) A gRPC client for js-IPFS +* [`/packages/ipfs-grpc-protocol`](./packages/ipfs-grpc-protocol]) Shared module between the gRPC client and server +* [`/packages/ipfs-grpc-server`](./packages/ipfs-grpc-server) A gRPC-over-websockets server for js-IPFS * [`/packages/ipfs-http-client`](./packages/ipfs-http-client) A client for the RPC-over-HTTP API presented by both js-ipfs and go-ipfs +* [`/packages/ipfs-http-gateway`](./packages/ipfs-http-gateway) JS implementation of the [IPFS HTTP Gateway](https://docs.ipfs.io/concepts/ipfs-gateway/) +* [`/packages/ipfs-http-server`](./packages/ipfs-http-server) JS implementation of the [IPFS HTTP API](https://docs.ipfs.io/reference/http/api/) * [`/packages/ipfs-message-port-client`](./packages/ipfs-http-client) A client for the RPC-over-HTTP API presented by both js-ipfs and go-ipfs * [`/packages/ipfs-message-port-client`](./packages/ipfs-message-port-client) A client for the RPC-over-message-port API presented by js-ipfs running in a shared worker * [`/packages/ipfs-message-port-protocol`](./packages/ipfs-message-port-protocol) Code shared by the message port client & server @@ -117,14 +130,11 @@ List of the main packages that make up the IPFS ecosystem. | Package | Version | Deps | CI/Travis | Coverage | Lead Maintainer | | ---------|---------|---------|---------|---------|--------- | | **Files** | -| [`ipfs-unixfs-exporter`](//github.com/ipfs/js-ipfs-unixfs) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs-exporter.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-unixfs/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-unixfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-unixfs/master)](https://travis-ci.com/ipfs/js-ipfs-unixfs) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-unixfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-unixfs) | [Alex Potsides](mailto:alex.potsides@protocol.ai) | -| [`ipfs-unixfs-importer`](//github.com/ipfs/js-ipfs-unixfs) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs-importer.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-unixfs/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-unixfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-unixfs/master)](https://travis-ci.com/ipfs/js-ipfs-unixfs) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-unixfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-unixfs) | [Alex Potsides](mailto:alex.potsides@protocol.ai) | | [`ipfs-unixfs`](//github.com/ipfs/js-ipfs-unixfs) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-unixfs/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-unixfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-unixfs/master)](https://travis-ci.com/ipfs/js-ipfs-unixfs) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-unixfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-unixfs) | [Alex Potsides](mailto:alex.potsides@protocol.ai) | | **Repo** | | [`ipfs-repo`](//github.com/ipfs/js-ipfs-repo) | [![npm](https://img.shields.io/npm/v/ipfs-repo.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-repo/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-repo.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo/master)](https://travis-ci.com/ipfs/js-ipfs-repo) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-repo) | [Alex Potsides](mailto:alex@achingbrain.net) | +| [`ipfs-repo-migrations`](//github.com/ipfs/js-ipfs-repo-migrations) | [![npm](https://img.shields.io/npm/v/ipfs-repo-migrations.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-repo-migrations/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-repo-migrations.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo-migrations) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo-migrations/master)](https://travis-ci.com/ipfs/js-ipfs-repo-migrations) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations) | N/A | | **Exchange** | -| [`ipfs-block-service`](//github.com/ipfs/js-ipfs-block-service) | [![npm](https://img.shields.io/npm/v/ipfs-block-service.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-block-service/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-block-service.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-block-service) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-block-service/master)](https://travis-ci.com/ipfs/js-ipfs-block-service) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-block-service/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-block-service) | [Volker Mische](mailto:volker.mische@gmail.com) | -| [`ipfs-block`](//github.com/ipfs/js-ipfs-block) | [![npm](https://img.shields.io/npm/v/ipfs-block.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-block/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-block.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-block) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-block/master)](https://travis-ci.com/ipfs/js-ipfs-block) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-block/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-block) | [Volker Mische](mailto:volker.mische@gmail.com) | | [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [![npm](https://img.shields.io/npm/v/ipfs-bitswap.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-bitswap/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-bitswap.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-bitswap) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-bitswap/master)](https://travis-ci.com/ipfs/js-ipfs-bitswap) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-bitswap/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | [Dirk McCormick](mailto:dirk@protocol.ai) | | **IPNS** | | [`ipns`](//github.com/ipfs/js-ipns) | [![npm](https://img.shields.io/npm/v/ipns.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipns/releases) | [![Deps](https://david-dm.org/ipfs/js-ipns.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipns) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipns/master)](https://travis-ci.com/ipfs/js-ipns) | [![codecov](https://codecov.io/gh/ipfs/js-ipns/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipns) | [Vasco Santos](mailto:vasco.santos@moxy.studio) | @@ -135,7 +145,6 @@ List of the main packages that make up the IPFS ecosystem. | [`ipfsd-ctl`](//github.com/ipfs/js-ipfsd-ctl) | [![npm](https://img.shields.io/npm/v/ipfsd-ctl.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfsd-ctl/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfsd-ctl.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfsd-ctl) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfsd-ctl/master)](https://travis-ci.com/ipfs/js-ipfsd-ctl) | [![codecov](https://codecov.io/gh/ipfs/js-ipfsd-ctl/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfsd-ctl) | [Hugo Dias](mailto:mail@hugodias.me) | | [`is-ipfs`](//github.com/ipfs/is-ipfs) | [![npm](https://img.shields.io/npm/v/is-ipfs.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/is-ipfs/releases) | [![Deps](https://david-dm.org/ipfs/is-ipfs.svg?style=flat-square)](https://david-dm.org/ipfs/is-ipfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/is-ipfs/master)](https://travis-ci.com/ipfs/is-ipfs) | [![codecov](https://codecov.io/gh/ipfs/is-ipfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/is-ipfs) | [Marcin Rataj](mailto:lidel@lidel.org) | | [`aegir`](//github.com/ipfs/aegir) | [![npm](https://img.shields.io/npm/v/aegir.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/aegir/releases) | [![Deps](https://david-dm.org/ipfs/aegir.svg?style=flat-square)](https://david-dm.org/ipfs/aegir) | [![Travis CI](https://flat.badgen.net/travis/ipfs/aegir/master)](https://travis-ci.com/ipfs/aegir) | [![codecov](https://codecov.io/gh/ipfs/aegir/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/aegir) | [Hugo Dias](mailto:hugomrdias@gmail.com) | -| [`ipfs-repo-migrations`](//github.com/ipfs/js-ipfs-repo-migrations) | [![npm](https://img.shields.io/npm/v/ipfs-repo-migrations.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-repo-migrations/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-repo-migrations.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo-migrations) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo-migrations/master)](https://travis-ci.com/ipfs/js-ipfs-repo-migrations) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations) | N/A | | **libp2p** | | [`libp2p`](//github.com/libp2p/js-libp2p) | [![npm](https://img.shields.io/npm/v/libp2p.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p/releases) | [![Deps](https://david-dm.org/libp2p/js-libp2p.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p) | [![Travis CI](https://flat.badgen.net/travis/libp2p/js-libp2p/master)](https://travis-ci.com/libp2p/js-libp2p) | [![codecov](https://codecov.io/gh/libp2p/js-libp2p/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p) | [Jacob Heun](mailto:jacobheun@gmail.com) | | [`peer-id`](//github.com/libp2p/js-peer-id) | [![npm](https://img.shields.io/npm/v/peer-id.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-peer-id/releases) | [![Deps](https://david-dm.org/libp2p/js-peer-id.svg?style=flat-square)](https://david-dm.org/libp2p/js-peer-id) | [![Travis CI](https://flat.badgen.net/travis/libp2p/js-peer-id/master)](https://travis-ci.com/libp2p/js-peer-id) | [![codecov](https://codecov.io/gh/libp2p/js-peer-id/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-peer-id) | [Vasco Santos](mailto:santos.vasco10@gmail.com) | @@ -153,14 +162,12 @@ List of the main packages that make up the IPFS ecosystem. | [`libp2p-delegated-content-routing`](//github.com/libp2p/js-libp2p-delegated-content-routing) | [![npm](https://img.shields.io/npm/v/libp2p-delegated-content-routing.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-delegated-content-routing/releases) | [![Deps](https://david-dm.org/libp2p/js-libp2p-delegated-content-routing.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-delegated-content-routing) | [![Travis CI](https://flat.badgen.net/travis/libp2p/js-libp2p-delegated-content-routing/master)](https://travis-ci.com/libp2p/js-libp2p-delegated-content-routing) | [![codecov](https://codecov.io/gh/libp2p/js-libp2p-delegated-content-routing/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-delegated-content-routing) | [Jacob Heun](mailto:jacobheun@gmail.com) | | [`libp2p-delegated-peer-routing`](//github.com/libp2p/js-libp2p-delegated-peer-routing) | [![npm](https://img.shields.io/npm/v/libp2p-delegated-peer-routing.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-delegated-peer-routing/releases) | [![Deps](https://david-dm.org/libp2p/js-libp2p-delegated-peer-routing.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-delegated-peer-routing) | [![Travis CI](https://flat.badgen.net/travis/libp2p/js-libp2p-delegated-peer-routing/master)](https://travis-ci.com/libp2p/js-libp2p-delegated-peer-routing) | [![codecov](https://codecov.io/gh/libp2p/js-libp2p-delegated-peer-routing/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-delegated-peer-routing) | [Jacob Heun](mailto:jacobheun@gmail.com) | | **IPLD** | -| [`ipld`](//github.com/ipld/js-ipld) | [![npm](https://img.shields.io/npm/v/ipld.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-ipld/releases) | [![Deps](https://david-dm.org/ipld/js-ipld.svg?style=flat-square)](https://david-dm.org/ipld/js-ipld) | [![Travis CI](https://flat.badgen.net/travis/ipld/js-ipld/master)](https://travis-ci.com/ipld/js-ipld) | [![codecov](https://codecov.io/gh/ipld/js-ipld/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipld/js-ipld) | [Volker Mische](mailto:volker.mische@gmail.com) | -| [`ipld-dag-pb`](//github.com/ipld/js-ipld-dag-pb) | [![npm](https://img.shields.io/npm/v/ipld-dag-pb.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-ipld-dag-pb/releases) | [![Deps](https://david-dm.org/ipld/js-ipld-dag-pb.svg?style=flat-square)](https://david-dm.org/ipld/js-ipld-dag-pb) | [![Travis CI](https://flat.badgen.net/travis/ipld/js-ipld-dag-pb/master)](https://travis-ci.com/ipld/js-ipld-dag-pb) | [![codecov](https://codecov.io/gh/ipld/js-ipld-dag-pb/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipld/js-ipld-dag-pb) | [Volker Mische](mailto:volker.mische@gmail.com) | -| [`ipld-dag-cbor`](//github.com/ipld/js-ipld-dag-cbor) | [![npm](https://img.shields.io/npm/v/ipld-dag-cbor.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-ipld-dag-cbor/releases) | [![Deps](https://david-dm.org/ipld/js-ipld-dag-cbor.svg?style=flat-square)](https://david-dm.org/ipld/js-ipld-dag-cbor) | [![Travis CI](https://flat.badgen.net/travis/ipld/js-ipld-dag-cbor/master)](https://travis-ci.com/ipld/js-ipld-dag-cbor) | [![codecov](https://codecov.io/gh/ipld/js-ipld-dag-cbor/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipld/js-ipld-dag-cbor) | [Volker Mische](mailto:volker.mische@gmail.com) | +| [`@ipld/dag-pb`](//github.com/ipld/js-dag-pb) | [![npm](https://img.shields.io/npm/v/@ipld/dag-pb.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-dag-pb/releases) | [![Deps](https://david-dm.org/ipld/js-dag-pb.svg?style=flat-square)](https://david-dm.org/ipld/js-dag-pb) | [![Travis CI](https://flat.badgen.net/travis/ipld/js-dag-pb/master)](https://travis-ci.com/ipld/js-dag-pb) | [![codecov](https://codecov.io/gh/ipld/js-dag-pb/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipld/js-dag-pb) | N/A | +| [`@ipld/dag-cbor`](//github.com/ipld/js-dag-cbor) | [![npm](https://img.shields.io/npm/v/@ipld/dag-cbor.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-dag-cbor/releases) | [![Deps](https://david-dm.org/ipld/js-dag-cbor.svg?style=flat-square)](https://david-dm.org/ipld/js-dag-cbor) | [![Travis CI](https://flat.badgen.net/travis/ipld/js-dag-cbor/master)](https://travis-ci.com/ipld/js-dag-cbor) | [![codecov](https://codecov.io/gh/ipld/js-dag-cbor/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipld/js-dag-cbor) | N/A | | **Multiformats** | -| [`multihashing`](//github.com/multiformats/js-multihashing) | [![npm](https://img.shields.io/npm/v/multihashing.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multihashing/releases) | [![Deps](https://david-dm.org/multiformats/js-multihashing.svg?style=flat-square)](https://david-dm.org/multiformats/js-multihashing) | [![Travis CI](https://flat.badgen.net/travis/multiformats/js-multihashing/master)](https://travis-ci.com/multiformats/js-multihashing) | [![codecov](https://codecov.io/gh/multiformats/js-multihashing/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/multiformats/js-multihashing) | [Hugo Dias](mailto:mail@hugodias.me) | +| [`multiformats`](//github.com/multiformats/js-multiformats) | [![npm](https://img.shields.io/npm/v/multiformats.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multiformats/releases) | [![Deps](https://david-dm.org/multiformats/js-multiformats.svg?style=flat-square)](https://david-dm.org/multiformats/js-multiformats) | [![Travis CI](https://flat.badgen.net/travis/multiformats/js-multiformats/master)](https://travis-ci.com/multiformats/js-multiformats) | [![codecov](https://codecov.io/gh/multiformats/js-multiformats/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/multiformats/js-multiformats) | N/A | | [`mafmt`](//github.com/multiformats/js-mafmt) | [![npm](https://img.shields.io/npm/v/mafmt.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-mafmt/releases) | [![Deps](https://david-dm.org/multiformats/js-mafmt.svg?style=flat-square)](https://david-dm.org/multiformats/js-mafmt) | [![Travis CI](https://flat.badgen.net/travis/multiformats/js-mafmt/master)](https://travis-ci.com/multiformats/js-mafmt) | [![codecov](https://codecov.io/gh/multiformats/js-mafmt/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/multiformats/js-mafmt) | [Vasco Santos](mailto:vasco.santos@moxy.studio) | | [`multiaddr`](//github.com/multiformats/js-multiaddr) | [![npm](https://img.shields.io/npm/v/multiaddr.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multiaddr/releases) | [![Deps](https://david-dm.org/multiformats/js-multiaddr.svg?style=flat-square)](https://david-dm.org/multiformats/js-multiaddr) | [![Travis CI](https://flat.badgen.net/travis/multiformats/js-multiaddr/master)](https://travis-ci.com/multiformats/js-multiaddr) | [![codecov](https://codecov.io/gh/multiformats/js-multiaddr/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/multiformats/js-multiaddr) | [Jacob Heun](mailto:jacobheun@gmail.com) | -| [`multihashes`](//github.com/multiformats/js-multihash) | [![npm](https://img.shields.io/npm/v/multihashes.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multihash/releases) | [![Deps](https://david-dm.org/multiformats/js-multihash.svg?style=flat-square)](https://david-dm.org/multiformats/js-multihash) | [![Travis CI](https://flat.badgen.net/travis/multiformats/js-multihash/master)](https://travis-ci.com/multiformats/js-multihash) | [![codecov](https://codecov.io/gh/multiformats/js-multihash/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/multiformats/js-multihash) | [David Dias](mailto:daviddias@ipfs.io) | > This table is generated using the module [`package-table`](https://www.npmjs.com/package/package-table) with `package-table --data=package-list.json`. diff --git a/docs/MIGRATION-TO-ASYNC-AWAIT.md b/docs/MIGRATION-TO-ASYNC-AWAIT.md index b9cb823987..9db9e634c2 100644 --- a/docs/MIGRATION-TO-ASYNC-AWAIT.md +++ b/docs/MIGRATION-TO-ASYNC-AWAIT.md @@ -94,20 +94,20 @@ Libp2p `PeerId` instances are no longer returned from the API. If your applicati Peer ID strings are also CIDs so converting them is simple: ```js -const peerId = PeerId.createFromCID(peerIdStr) +const peerId = PeerId.createFromB58String(peerIdStr) ``` You can get hold of the `PeerId` class using npm or in a script tag: ```js const PeerId = require('peer-id') -const peerId = PeerId.createFromCID(peerIdStr) +const peerId = PeerId.createFromB58String(peerIdStr) ``` ```html ``` @@ -120,7 +120,7 @@ Libp2p `PeerInfo` instances are no longer returned from the API. Instead, plain Instantiate a new `PeerInfo` and add addresses to it: ```js -const peerInfo = new PeerInfo(PeerId.createFromCID(info.id)) +const peerInfo = new PeerInfo(PeerId.createFromB58String(info.id)) info.addrs.forEach(addr => peerInfo.multiaddrs.add(addr)) ``` @@ -129,7 +129,7 @@ You can get hold of the `PeerInfo` class using npm or in a script tag: ```js const PeerInfo = require('peer-info') const PeerId = require('peer-id') -const peerInfo = new PeerInfo(PeerId.createFromCID(info.id)) +const peerInfo = new PeerInfo(PeerId.createFromB58String(info.id)) info.addrs.forEach(addr => peerInfo.multiaddrs.add(addr)) ``` @@ -137,7 +137,7 @@ info.addrs.forEach(addr => peerInfo.multiaddrs.add(addr)) ``` diff --git a/docs/core-api/BLOCK.md b/docs/core-api/BLOCK.md index e0f54f1e85..b8ad1e7495 100644 --- a/docs/core-api/BLOCK.md +++ b/docs/core-api/BLOCK.md @@ -105,9 +105,10 @@ console.log(block.cid.toString()) // the CID of the object // With custom format and hashtype through CID -const CID = require('cids') +const { CID } = require('multiformats/cid') +const dagPb = require('@ipld/dag-pb') const buf = new TextEncoder().encode('another serialized object') -const cid = new CID(1, 'dag-pb', multihash) +const cid = CID.createV1(dagPb.code, multihash) const block = await ipfs.block.put(blob, cid) @@ -211,7 +212,7 @@ the returned object has the following keys: ```JavaScript const multihashStr = 'QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ' -const cid = new CID(multihashStr) +const cid = CID.parse(multihashStr) const stats = await ipfs.block.stat(cid) console.log(stats.cid.toString()) diff --git a/docs/core-api/DAG.md b/docs/core-api/DAG.md index 33d59f79e8..559065889c 100644 --- a/docs/core-api/DAG.md +++ b/docs/core-api/DAG.md @@ -62,7 +62,7 @@ An optional object which may have the following keys: ```JavaScript const obj = { simple: 'object' } -const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha3-512' }) +const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-512' }) console.log(cid.toString()) // zBwWX9ecx5F4X54WAjmFLErnBT6ByfNxStr5ovowTL7AhaUR98RWvXPS1V3HqV1qs3r5Ec5ocv7eCdbqYQREXNUfYNuKG diff --git a/docs/core-api/OBJECT.md b/docs/core-api/OBJECT.md index 9382abcb33..c960b95e6b 100644 --- a/docs/core-api/OBJECT.md +++ b/docs/core-api/OBJECT.md @@ -343,7 +343,7 @@ An optional object which may have the following keys: const cid = await ipfs.object.patch.addLink(node, { name: 'some-link', size: 10, - cid: new CID('QmPTkMuuL6PD8L2SwTwbcs1NPg14U8mRzerB1ZrrBrkSDD') + cid: CID.parse('QmPTkMuuL6PD8L2SwTwbcs1NPg14U8mRzerB1ZrrBrkSDD') }) ``` @@ -357,7 +357,7 @@ The `DAGLink` to be added can also be passed as an object containing: `name`, `c const link = { name: 'Qmef7ScwzJUCg1zUSrCmPAz45m8uP5jU7SLgt2EffjBmbL', size: 37, - cid: new CID('Qmef7ScwzJUCg1zUSrCmPAz45m8uP5jU7SLgt2EffjBmbL') + cid: CID.parse('Qmef7ScwzJUCg1zUSrCmPAz45m8uP5jU7SLgt2EffjBmbL') }; ``` @@ -400,7 +400,7 @@ An optional object which may have the following keys: const cid = await ipfs.object.patch.rmLink(node, { name: 'some-link', size: 10, - cid: new CID('QmPTkMuuL6PD8L2SwTwbcs1NPg14U8mRzerB1ZrrBrkSDD') + cid: CID.parse('QmPTkMuuL6PD8L2SwTwbcs1NPg14U8mRzerB1ZrrBrkSDD') }) ``` diff --git a/docs/core-api/PIN.md b/docs/core-api/PIN.md index 54a25e3df4..35232b1222 100644 --- a/docs/core-api/PIN.md +++ b/docs/core-api/PIN.md @@ -86,7 +86,7 @@ An optional object which may have the following keys: ### Example ```JavaScript -const cid of ipfs.pin.add(new CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u')) +const cid of ipfs.pin.add(CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u')) console.log(cid) // Logs: // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') @@ -130,7 +130,7 @@ Each yielded object has the form: ### Example ```JavaScript -for await (const cid of ipfs.pin.addAll(new CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u'))) { +for await (const cid of ipfs.pin.addAll(CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u'))) { console.log(cid) } // Logs: @@ -178,7 +178,7 @@ for await (const { cid, type } of ipfs.pin.ls()) { ```JavaScript for await (const { cid, type } of ipfs.pin.ls({ - paths: [ new CID('Qmc5..'), new CID('QmZb..'), new CID('QmSo..') ] + paths: [ CID.parse('Qmc5..'), CID.parse('QmZb..'), CID.parse('QmSo..') ] })) { console.log({ cid, type }) } @@ -218,7 +218,7 @@ An optional object which may have the following keys: ### Example ```JavaScript -const cid of ipfs.pin.rm(new CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u')) +const cid of ipfs.pin.rm(CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u')) console.log(cid) // prints the CID that was unpinned // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') @@ -254,7 +254,7 @@ An optional object which may have the following keys: ### Example ```JavaScript -for await (const cid of ipfs.pin.rmAll(new CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u'))) { +for await (const cid of ipfs.pin.rmAll(CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u'))) { console.log(cid) } // prints the CIDs that were unpinned @@ -320,7 +320,7 @@ An object may have the following optional fields: | Name | Type | Default | Description | | ---- | ---- | ------- | ----------- | -| stat | `boolean` | `false` | If `true` will include service stats. | +| stat | `boolean` | `false` | If `true` will include service stats. | | timeout | `number` | `undefined` | A timeout in ms | | signal | [AbortSignal][] | `undefined` | Can be used to cancel any long running requests started as a result of this call | @@ -486,7 +486,7 @@ Status is one of the following string values: ### Example ```JavaScript -const cid = new CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') +const cid = CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') const pin = await ipfs.pin.remote.add(cid, { service: 'pinata', name: 'block-party' diff --git a/examples/browser-add-readable-stream/index.js b/examples/browser-add-readable-stream/index.js index 4c870f2193..d03ef40d19 100644 --- a/examples/browser-add-readable-stream/index.js +++ b/examples/browser-add-readable-stream/index.js @@ -14,13 +14,14 @@ const main = async () => { const directoryHash = await streamFiles(ipfs, directoryName, inputFiles) - const fileList = await ipfs.ls(directoryHash) - log(`\n--\n\nDirectory contents:\n\n${directoryName}/ ${directoryHash}`) - fileList.forEach((file, index) => { - log(` ${index < fileList.length - 1 ? '\u251C' : '\u2514'}\u2500 ${file.name} ${file.path} ${file.hash}`) - }) + let index = 0 + + for await (const file of ipfs.ls(directoryHash)) { + log(` ${index < inputFiles.length - 1 ? '\u251C' : '\u2514'}\u2500 ${file.name} ${file.path} ${file.cid}`) + index++ + } } const createFiles = (directory) => { @@ -54,7 +55,7 @@ const streamFiles = async (ipfs, directory, files) => { const data = await ipfs.add(stream) - log(`Added ${data.path} hash: ${data.hash}`) + log(`Added ${data.path} hash: ${data.cid}`) // The last data event will contain the directory hash if (data.path === directory) { diff --git a/examples/browser-exchange-files/package.json b/examples/browser-exchange-files/package.json index 60f6636709..0714bf16fb 100644 --- a/examples/browser-exchange-files/package.json +++ b/examples/browser-exchange-files/package.json @@ -15,12 +15,12 @@ "execa": "^5.0.0", "http-server": "^0.12.3", "ipfs-http-client": "^50.1.2", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "dependencies": { "ipfs": "^0.55.4", "it-all": "^1.0.4", - "libp2p-websockets": "^0.15.6", + "libp2p-websockets": "^0.16.1", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0" } diff --git a/examples/browser-ipns-publish/index.js b/examples/browser-ipns-publish/index.js index e0e5ef72d2..d1dedb1b48 100644 --- a/examples/browser-ipns-publish/index.js +++ b/examples/browser-ipns-publish/index.js @@ -8,6 +8,7 @@ const last = require("it-last"); const cryptoKeys = require("human-crypto-keys"); // { getKeyPairFromSeed } const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') +const { sha256 } = require('multiformats/hashes/sha2') const { sleep, Logger, onEnterPress, catchAndLog } = require("./util"); @@ -142,11 +143,10 @@ async function main() { return new Promise(async (resolve, reject) => { try { // quick and dirty key gen, don't do this in real life - const key = await IPFS.multihashing.digest( + const key = await sha256.digest( uint8ArrayFromString(keyName + Math.random().toString(36).substring(2)), - "sha2-256" ); - const keyPair = await cryptoKeys.getKeyPairFromSeed(key, "rsa"); + const keyPair = await cryptoKeys.getKeyPairFromSeed(key.bytes, "rsa"); // put it on the browser IPNS keychain and name it await ipfsBrowser.key.import(keyName, keyPair.privateKey); diff --git a/examples/browser-ipns-publish/package.json b/examples/browser-ipns-publish/package.json index db9638da7e..9a754fb148 100644 --- a/examples/browser-ipns-publish/package.json +++ b/examples/browser-ipns-publish/package.json @@ -16,11 +16,11 @@ "human-crypto-keys": "^0.1.4", "ipfs": "^0.55.4", "ipfs-http-client": "^50.1.2", - "ipfs-utils": "^8.1.2", - "ipns": "^0.12.0", + "ipfs-utils": "^8.1.4", + "ipns": "^0.13.2", "it-last": "^1.0.4", "p-retry": "^4.2.0", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "browserslist": [ "last 2 versions and not dead and > 2%" @@ -28,7 +28,7 @@ "devDependencies": { "delay": "^5.0.0", "execa": "^5.0.0", - "ipfsd-ctl": "^8.0.1", + "ipfsd-ctl": "^9.0.0", "go-ipfs": "0.8.0", "parcel": "2.0.0-beta.2", "path": "^0.12.7", diff --git a/examples/circuit-relaying/package.json b/examples/circuit-relaying/package.json index 25b1b60661..f994841d0a 100644 --- a/examples/circuit-relaying/package.json +++ b/examples/circuit-relaying/package.json @@ -17,8 +17,8 @@ "delay": "^5.0.0", "ipfs": "^0.55.4", "ipfs-pubsub-room": "^2.0.1", - "libp2p-websockets": "^0.15.6", - "uint8arrays": "^2.1.3" + "libp2p-websockets": "^0.16.1", + "uint8arrays": "^2.1.6" }, "devDependencies": { "execa": "^5.0.0", diff --git a/examples/custom-ipfs-repo/index.js b/examples/custom-ipfs-repo/index.js index d73c0086a0..d4343da3fa 100644 --- a/examples/custom-ipfs-repo/index.js +++ b/examples/custom-ipfs-repo/index.js @@ -1,73 +1,87 @@ 'use strict' const IPFS = require('ipfs') -const Repo = require('ipfs-repo') -const fsLock = require('ipfs-repo/src/lock') +const { + createRepo, + locks: { + fs: fsLock + } +} = require('ipfs-repo') const all = require('it-all') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayConcat = require('uint8arrays/concat') +const DatastoreFS = require('datastore-fs') +const BlockstoreDatastoreAdapter = require('blockstore-datastore-adapter') -// Create our custom options -const customRepositoryOptions = { +// multiformat codecs to support +const codecs = [ + require('@ipld/dag-pb'), + require('@ipld/dag-cbor'), + require('multiformats/codecs/raw') +].reduce((acc, curr) => { + acc[curr.name] = curr + acc[curr.code] = curr - /** - * IPFS nodes store different information in separate storageBackends, or datastores. - * Each storage backend can use the same type of datastore or a different one — you - * could store your keys in a levelDB database while everything else is in files, - * for example. (See https://github.com/ipfs/interface-datastore for more about datastores.) - */ - storageBackends: { - root: require('datastore-fs'), // version and config data will be saved here - blocks: require('datastore-fs'), - keys: require('datastore-fs'), - datastore: require('datastore-fs') - }, + return acc +}, {}) - /** - * Storage Backend Options will get passed into the instantiation of their counterpart - * in `storageBackends`. If you create a custom datastore, this is where you can pass in - * custom constructor arguments. You can see an S3 datastore example at: - * https://github.com/ipfs/js-datastore-s3/tree/master/examples/full-s3-repo - * - * NOTE: The following options are being overriden for demonstration purposes only. - * In most instances you can simply use the default options, by not passing in any - * overrides, which is recommended if you have no need to override. - */ - storageBackendOptions: { - root: { - extension: '.ipfsroot', // Defaults to ''. Used by datastore-fs; Appended to all files - errorIfExists: false, // Used by datastore-fs; If the datastore exists, don't throw an error - createIfMissing: true // Used by datastore-fs; If the datastore doesn't exist yet, create it - }, - blocks: { - sharding: false, // Used by IPFSRepo Blockstore to determine sharding; Ignored by datastore-fs - extension: '.ipfsblock', // Defaults to '.data'. - errorIfExists: false, - createIfMissing: true - }, - keys: { - extension: '.ipfskey', // No extension by default - errorIfExists: false, - createIfMissing: true - }, - datastore: { - extension: '.ipfsds', // No extension by default - errorIfExists: false, - createIfMissing: true +async function main () { + const path = '/tmp/custom-repo/.ipfs' + + // Support dag-pb and dag-cbor at a minimum + const loadCodec = (nameOrCode) => { + if (codecs[nameOrCode]) { + return codecs[nameOrCode] } - }, - /** - * A custom lock can be added here. Or the build in Repo `fs` or `memory` locks can be used. - * See https://github.com/ipfs/js-ipfs-repo for more details on setting the lock. - */ - lock: fsLock -} + throw new Error(`Could not load codec for ${nameOrCode}`) + } -async function main () { // Initialize our IPFS node with the custom repo options const node = await IPFS.create({ - repo: new Repo('/tmp/custom-repo/.ipfs', customRepositoryOptions), + repo: createRepo(path, loadCodec, { + /** + * IPFS repos store different types of information in separate datastores. + * Each storage backend can use the same type of datastore or a different one — for example + * you could store your keys in a levelDB database while everything else is in files. + * See https://www.npmjs.com/package/interface-datastore for more about datastores. + */ + root: new DatastoreFS(path, { + extension: '.ipfsroot', // Defaults to '', appended to all files + errorIfExists: false, // If the datastore exists, don't throw an error + createIfMissing: true // If the datastore doesn't exist yet, create it + }), + // blocks is a blockstore, all other backends are datastores - but we can wrap a datastore + // in an adapter to turn it into a blockstore + blocks: new BlockstoreDatastoreAdapter( + new DatastoreFS(`${path}/blocks`, { + extension: '.ipfsblock', + errorIfExists: false, + createIfMissing: true + }) + ), + keys: new DatastoreFS(`${path}/keys`, { + extension: '.ipfskey', + errorIfExists: false, + createIfMissing: true + }), + datastore: new DatastoreFS(`${path}/datastore`, { + extension: '.ipfsds', + errorIfExists: false, + createIfMissing: true + }), + pins: new DatastoreFS(`${path}/pins`, { + extension: '.ipfspin', + errorIfExists: false, + createIfMissing: true + }) + }, { + /** + * A custom lock can be added here. Or the build in Repo `fs` or `memory` locks can be used. + * See https://github.com/ipfs/js-ipfs-repo for more details on setting the lock. + */ + lock: fsLock + }), // This just means we dont try to connect to the network which isn't necessary // to demonstrate custom repos diff --git a/examples/custom-ipfs-repo/package.json b/examples/custom-ipfs-repo/package.json index 61a6fa8d1a..946a458929 100644 --- a/examples/custom-ipfs-repo/package.json +++ b/examples/custom-ipfs-repo/package.json @@ -10,10 +10,14 @@ }, "license": "MIT", "dependencies": { - "datastore-fs": "4.0.0", + "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-pb": "^2.1.3", + "blockstore-datastore-adapter": "^1.0.0", + "datastore-fs": "^5.0.1", "ipfs": "^0.55.4", - "ipfs-repo": "^9.1.6", - "it-all": "^1.0.4" + "ipfs-repo": "^11.0.0", + "it-all": "^1.0.4", + "multiformats": "^9.4.1" }, "devDependencies": { "execa": "^5.0.0", diff --git a/examples/custom-ipld-formats/daemon-node.js b/examples/custom-ipld-formats/daemon-node.js index fae244020f..c50b50cb74 100644 --- a/examples/custom-ipld-formats/daemon-node.js +++ b/examples/custom-ipld-formats/daemon-node.js @@ -1,70 +1,33 @@ -// ordinarily we'd open a PR against the multicodec module to get our -// codec number added but since we're just testing we shim our new -// codec into the base-table.json file - this has to be done -// before requiring other modules as the int table will become read-only -const codecName = 'dag-test' -const codecNumber = 392091 - -const table = require('multicodec/src/base-table') -// @ts-ignore -table.baseTable = { - ...table.baseTable, - [codecName]: codecNumber -} - -// now require modules as usual const IPFSDaemon = require('ipfs-daemon') -const multihashing = require('multihashing-async') -const multihash = multihashing.multihash -const multicodec = require('multicodec') -const CID = require('cids') const ipfsHttpClient = require('ipfs-http-client') const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') async function main () { - // see https://github.com/ipld/interface-ipld-format for the interface definition - const format = { - codec: codecNumber, - defaultHashAlg: multicodec.SHA2_256, - util: { - serialize (data) { - return Buffer.from(JSON.stringify(data)) - }, - deserialize (buf) { - return JSON.parse(uint8ArrayToString(buf)) - }, - async cid (buf) { - const multihash = await multihashing(buf, format.defaultHashAlg) - - return new CID(1, format.codec, multihash) - } - }, - resolver: { - resolve: (buf, path) => { - return { - value: format.util.deserialize(buf), - remainderPath: path - } - } - } + // see https://github.com/multiformats/js-multiformats#multicodec-encoders--decoders--codecs for the interface definition + const codec = { + name: 'dag-test', + code: 392091, + encode: (data) => uint8ArrayFromString(JSON.stringify(data)), + decode: (buf) => JSON.parse(uint8ArrayToString(buf)) } // start an IPFS Daemon const daemon = new IPFSDaemon({ ipld: { - formats: [ - format + codecs: [ + codec ] } }) await daemon.start() // in another process: - const client = ipfsHttpClient({ + const client = ipfsHttpClient.create({ url: `http://localhost:${daemon._httpApi._apiServers[0].info.port}`, ipld: { - formats: [ - format + codecs: [ + codec ] } }) @@ -74,8 +37,8 @@ async function main () { } const cid = await client.dag.put(data, { - format: codecName, - hashAlg: multihash.codes[format.defaultHashAlg] + format: 'dag-test', + hashAlg: 'sha2-256' }) console.info(`Put ${JSON.stringify(data)} = CID(${cid})`) diff --git a/examples/custom-ipld-formats/in-process-node.js b/examples/custom-ipld-formats/in-process-node.js index 3bfcee48e2..06f3153a62 100644 --- a/examples/custom-ipld-formats/in-process-node.js +++ b/examples/custom-ipld-formats/in-process-node.js @@ -1,47 +1,22 @@ -// ordinarily we'd open a PR against the multicodec module to get our -// codec number added but since we're just testing we shim our new -// codec into the base-table.json file - this has to be done -// before requiring other modules as the int table will become read-only -const codecName = 'dag-test' -const codecNumber = 392091 +'use strict' -const table = require('multicodec/src/base-table') -// @ts-ignore -table.baseTable = { - ...table.baseTable, - [codecName]: codecNumber -} - -// now require modules as usual const IPFS = require('ipfs-core') -const multihashing = require('multihashing-async') -const multicodec = require('multicodec') -const CID = require('cids') +const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') async function main () { - // see https://github.com/ipld/interface-ipld-format for the interface definition - const format = { - codec: codecNumber, - defaultHashAlg: multicodec.SHA2_256, - util: { - serialize (data) { - return Buffer.from(JSON.stringify(data)) - }, - deserialize (buf) { - return JSON.parse(buf.toString('utf8')) - }, - async cid (buf) { - const multihash = await multihashing(buf, format.defaultHashAlg) - - return new CID(1, format.codec, multihash) - } - } + // see https://github.com/multiformats/js-multiformats#multicodec-encoders--decoders--codecs for the interface definition + const codec = { + name: 'dag-test', + code: 392091, + encode: (data) => uint8ArrayFromString(JSON.stringify(data)), + decode: (buf) => JSON.parse(uint8ArrayToString(buf)) } const node = await IPFS.create({ ipld: { - formats: [ - format + codecs: [ + codec ] } }) @@ -51,8 +26,8 @@ async function main () { } const cid = await node.dag.put(data, { - format: codecName, - hashAlg: format.defaultHashAlg + format: 'dag-test', + hashAlg: 'sha2-256' }) console.info(`Put ${JSON.stringify(data)} = CID(${cid})`) diff --git a/examples/custom-ipld-formats/package.json b/examples/custom-ipld-formats/package.json index 3a8201559e..48dbb44437 100644 --- a/examples/custom-ipld-formats/package.json +++ b/examples/custom-ipld-formats/package.json @@ -1,5 +1,5 @@ { - "name": "skipped-example-custom-ipld-formats", + "name": "example-custom-ipld-formats", "version": "1.0.0", "private": true, "scripts": { @@ -11,12 +11,11 @@ "test-ipfs-example": "^3.0.0" }, "dependencies": { - "cids": "^1.1.6", + "dag-jose": "^1.0.0", "ipfs-daemon": "^0.7.2", "ipfs-core": "^0.8.0", "ipfs-http-client": "^50.1.2", - "multicodec": "^3.0.1", - "multihashing-async": "^2.1.2", - "uint8arrays": "^2.1.3" + "multiformats": "^9.4.1", + "uint8arrays": "^2.1.6" } } diff --git a/examples/custom-libp2p/package.json b/examples/custom-libp2p/package.json index 77ce9d62a0..a6c4849c8b 100644 --- a/examples/custom-libp2p/package.json +++ b/examples/custom-libp2p/package.json @@ -11,9 +11,9 @@ "license": "MIT", "dependencies": { "ipfs": "^0.55.4", - "libp2p": "^0.31.6", + "libp2p": "^0.32.0", "libp2p-bootstrap": "^0.12.3", - "libp2p-kad-dht": "^0.22.0", + "libp2p-kad-dht": "^0.23.1", "libp2p-mdns": "^0.16.0", "libp2p-mplex": "^0.10.2", "libp2p-noise": "^3.0.0", diff --git a/examples/explore-ethereum-blockchain/CHANGELOG.md b/examples/explore-ethereum-blockchain/CHANGELOG.md deleted file mode 100644 index 7b352c392b..0000000000 --- a/examples/explore-ethereum-blockchain/CHANGELOG.md +++ /dev/null @@ -1,16 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. -See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. - -## [1.0.2](https://github.com/ipfs/js-ipfs/compare/example-explore-ethereum-blockchain@1.0.1...example-explore-ethereum-blockchain@1.0.2) (2020-04-08) - -**Note:** Version bump only for package example-explore-ethereum-blockchain - - - - - -## 1.0.1 (2020-03-31) - -**Note:** Version bump only for package example-explore-ethereum-blockchain diff --git a/examples/explore-ethereum-blockchain/README.md b/examples/explore-ethereum-blockchain/README.md deleted file mode 100644 index e88a326217..0000000000 --- a/examples/explore-ethereum-blockchain/README.md +++ /dev/null @@ -1,58 +0,0 @@ -# Use IPFS to explore the Ethereum Blockchain - -> This is a pre-example to a full Ethereum to IPFS bridge. It shows how to resolve Ethereum hashes through the IPFS DAG get API. - -## Before you start - -First clone this repo, install dependencies in the project root and build the project. - -```console -$ git clone https://github.com/ipfs/js-ipfs.git -$ cd js-ipfs -$ npm install -$ npm run build -``` - -## Running the example - -Make sure to have the latest js-ipfs installed by doing - -```sh -> npm install ipfs -g -``` - -If this is the first time you use js-ipfs, make sure to init your repo with - -```sh -> jsipfs init -``` - -## Load ethereum chain data into ipfs - -We've some ethereum blocks available at [eth-stuffs](./eth-stuffs) folder, you can add them to ipfs by running: - -```sh -> ./load-eth-stuffs.sh -bagiacgzah24drzou2jlkixpblbgbg6nxfrasoklzttzoht5hixhxz3rlncyq -bagiacgzanm7fiqpp7zcfehhd7apxpo4stdxx7wxn7eqrsgolj76t22dintgq -bagiacgzau7z2cpinv6u3rnsa73ssc46cpongn7zh6ztjwo7hh7ao42cj4lha -baglacgzaoc2jzhhxe6psrvq4ixlykpky2a23e3ltnhqpjrji3uyg6rnulxpq -baglacgza2vwiqlrqgkz5jdpkzmkqznntozcnnoycn4swddtxi7njcjsmfpda -baglacgza2vwiqlrqgkz5jdpkzmkqznntozcnnoycn4swddtxi7njcjsmfpda -baglacgzar5mhc23wfjccxfkpf23kbufqqjsqg4t7btaocaraycwlxbaerq2q -baglacgzasflr3hpssk5fpdheemyogi4df2zatql5z3pp7izau7d37ryijgca -baglacgzae6kz4xubhfygknh7yqk2fbk4xztmnvwkwm36knjwukmyfepjveda -baglacgzalfkeokwk7nvwenmr2k3e3f6khvch2bw54nhr25vjmjy2lshmx5mas -``` - -## Explore these blocks using the DAG API - -NOTE: Currently your js-ipfs daemon must NOT be running for the following examples to work. - -Some examples - -```sh -> jsipfs dag get bagiacgzah24drzou2jlkixpblbgbg6nxfrasoklzttzoht5hixhxz3rlncyq/ -> jsipfs dag get bagiacgzah24drzou2jlkixpblbgbg6nxfrasoklzttzoht5hixhxz3rlncyq/parentHash -... -``` diff --git a/examples/explore-ethereum-blockchain/eth-stuffs/block_302515 b/examples/explore-ethereum-blockchain/eth-stuffs/block_302515 deleted file mode 100644 index 1e6b8fdc5179ffc55027bd82152b9eb5b657faab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 530 zcmey##J|Ay4NLWj2X~YtT;^%9w4W;28NW4Ov3H7YlQ+}+f=+9{1+r&+U)(z$TeZ4% zYx=e`SEMcrxk^vzDiQv$(CM$kmC5c?UZwlreJ=8M`rOBFKYYzvS(x3~cVdD6xt}3- zwSygZ*EuMveXz^a;JN*lYwp6&3ws!reOvRUYeCoxd2#78q09a^yFQzA;74Ax#{=n^ zIcpCvCh2c|oV-PGL9+3Z{mdSEeR2Bf#|6qZ=Wx%ep0&&;^J^}TV93=??t(iR8Sn!I zWN8*;4zX-PHV|tL$?Al_Iur=$Ybz2yP-Atp*+5ma*_KnWnc2RfC1S4T?f%q~jMSps z%)C?uU4_tK!v&dNC9|cU#H|rN5LNHYsDESzQ*mH)r+HOrY@0^O=MM9Z=PB$mbBpu- F007XRfQtYC diff --git a/examples/explore-ethereum-blockchain/eth-stuffs/block_302516 b/examples/explore-ethereum-blockchain/eth-stuffs/block_302516 deleted file mode 100644 index 9c4a667658040dd45491fa8890b5708b6f25236c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 530 zcmey##J}J+e|*K~4QG@pW}V`Y`|2-Xc<5Z#^2fKu8z+7I`ix)X#RAzgzAx?_kF8qW zx;1^w z^*cMsbmFCLzSB?Hy414Wee5q=wlb-3>4`V{BNm+AWb=mY80YeRWjlk5GIH-!>!q$# z{yx>Bz38C6Z_cqr3j~gHPjfN5zs>Q%^6d>zJfams(jIXp={QcE7Cy@~C`f82BLfz| zfe=#wG8S|a9Z-syMDDM&3!EzMPM4Gr=Sa9uFnN^|1|nJrS`rki75=k+{~=MCNv aCz^5a!S)QX#YIS~LwB85W$ diff --git a/examples/explore-ethereum-blockchain/eth-stuffs/block_302517 b/examples/explore-ethereum-blockchain/eth-stuffs/block_302517 deleted file mode 100644 index 3df292fd2d8fedbed277049fac4886e2c583ff0c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 527 zcmey##IqpVF2wQuKbIhx$3NuDcTbw}zW&zRpNf)_r~UuhW@O}?T_AhL_r<;Au~n;E zx2A79b4BX1kgN2Rt`gx73!VNtT$$`XCGiBu4K1sqMuw%lYY*C#S4SFh-&~+G;kPoI z-4j2%Io?zBwHABF?{7Bh)V!ROR^$`>@cb;^1z|7b#ih@LF8kl?`fSpHA9>Lp52R=2 ztUbV(q`&oX@)kvc3U)FwjC9aeWYBK0;%c*js(7<4k4!^L#9YnC{ZDo@Ki#|X`OzgWVzU9Q*Pf=4Rx70*RG(qu>Nf-{9 z7M`Gm!42Ig&bQ-N!L~8&3lg^U9Tvo&VeH3^lh9$Cvf(wLP+sJjR82aI;n&B8r&`5m zBrgiMH=LUiVR-v*8#%W`pm_x{mHA2SNHjECzs8-=YC4VKg)jAMxws%h3?Hk45`ci9 zgR9R?(0!JGz~V!Qf8O6pMil;URvk+(DN;BLkl4po`jIM>rqw{feL|iiK#oyAKmpw!JR6m!sa zD-(~H&yU2!W1!~Ww*A(2&w1~sh;nbPag%3YhYz$eUEN0vn0H`vSqh-^4%c%Xh|mJ)IR diff --git a/examples/explore-ethereum-blockchain/eth-stuffs/state_00_302516 b/examples/explore-ethereum-blockchain/eth-stuffs/state_00_302516 deleted file mode 100644 index e03c01b8c967bfafb62a9743c7de7a57bc7f145a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 532 zcmV+v0_*+x0ui8(SaEB1Dnu)lPcF8g(6EAK19E>1z~KTQz^uE31dKJHXFD&P7|bRc z02(ZqIW_(5PiBFxrMcmPtTp~2=##EQp!9KIR>!91bov*8+|%Oa>(`X0!bMH?IYlFo z*5yp{OQ1Gf`92;}*K7_;41oO6?WNRTG;Eh|Xk{bP+fPKmROm(%+WQE(*V@xGq(CO|O;L zpk%N48iwhKvJPJ0%zMp{jJlPz_yc98Y`_pWjr#z^X`s0`bwc7qrF0Y=Mj#td;#PZH zNB@vq?8JIw;Er##j;NpvY7qEhY^>bvE7^VSCG4Q$c=40);9G~b68BNXz_1aZEH~^K z9!6iwJW7?!El+<4fo6VCL>Us>57?Q$zy}UxpfzLn=2=_?qwJWtS4Jj!^T66f_G}{p ziL0qWbtT5%hM;!CO&2aqo0Jz4>9GzjeY1NRaQ`kfjgkin{q=sX6V5kB5oM@FAP>F~y=r{^*^ZlFY7%cn?^jsrOI@?mpD WNbzGJ*a|rQqrf@luYbat7AJt9-35&R diff --git a/examples/explore-ethereum-blockchain/eth-stuffs/state_0_302516 b/examples/explore-ethereum-blockchain/eth-stuffs/state_0_302516 deleted file mode 100644 index 43b36d5b2612f7e2f4b532473e4ba5f8905f6d39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 532 zcmV+v0_*+x0ui8*R~?=5lDb!n7aKor0N9mDw`kde;W>k>5d1~j&nqx`aTfyRLv6G&iJ4_EtP!y+WF5FI6@E) zub})nX@E?$RZ{)Df9>(+;~*oT`!Cst?!5Ci#j!!}Y$&KRNH^`WPzZGf5%$|rA$)~Zabi;-gY4%gScfTkF3zOXq%D9MNj#lmYH|J zzg^~%r-EMAvsu`c+G5xD`X=VZYGC2KWKnL>pgA!i%JB^FPN5wzP?UJ;uop&*S?+_ivYAr^c9 diff --git a/examples/explore-ethereum-blockchain/eth-stuffs/state_r_302516 b/examples/explore-ethereum-blockchain/eth-stuffs/state_r_302516 deleted file mode 100644 index dfc5a46ccb..0000000000 --- a/examples/explore-ethereum-blockchain/eth-stuffs/state_r_302516 +++ /dev/null @@ -1 +0,0 @@ -ù '•ž^9pe4ÿÄ¢…\¾fÆÖʳ7å56¢™‚‘é© ¢|å­Ì¾ñ©·0Ûƒõ8·­oTÈ–}ÊŠfÛCÿÏ cì?’BàÉ£-B!^N¯1nǺšn nR“Á«[ÙÛ~Û YQžüfÏÞÌžPaùäù#f¢Õg÷¼ðµÑRØá|¾ ¦ÄìX¸âÉ72a˜±’Q2Ú±ëð…ß$³ûÅ¥² aé,¯ ˆ’`…F1Å?Qmÿë3#{fM ð(˜wg Ì™Þœw¸G¡2¹?‘óƒÁ?xN@Öõ#ï(§*(5º Öïh1“˜ŽäªüëÔN\Æ#Í”U€à²šÖr®°h UqT9|ü·>â"¦åIwêšïïŸÄ4š¾þn3. åR’I‘­ÅI~&V” üä$ö…œY£¹3Oʦr ´^”hÞ•ÿ‘\i/kqßu1€çU†uZ©*H9’ Ï7 ^4÷”Ùª†ìzÏ¿Ø´¾•†øžYëÛô r†ñ‚`;O'™ŠÂµ˜Nà½Y1Í—w á'˜2ôÔß 2J?ZêV¦‰ð©ÈÝß1yF1r66#Ïu‚ö7/.„ øÁktI2.Nˆ®8UŒeê Žâík y»ÜÜ|G â úÚV‰wÒ;Tk†Ûg¬ÐíI;ìêú+¼kíñ'Hiq¶€ \ No newline at end of file diff --git a/examples/explore-ethereum-blockchain/load-eth-stuffs.sh b/examples/explore-ethereum-blockchain/load-eth-stuffs.sh deleted file mode 100755 index 0c5c48074c..0000000000 --- a/examples/explore-ethereum-blockchain/load-eth-stuffs.sh +++ /dev/null @@ -1,13 +0,0 @@ -# Blocks -jsipfs block put --format=eth-block --mhtype=keccak-256 eth-stuffs/block_302515 -jsipfs block put --format=eth-block --mhtype=keccak-256 eth-stuffs/block_302516 -jsipfs block put --format=eth-block --mhtype=keccak-256 eth-stuffs/block_302517 - -# State Trie -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_000017_302516 -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_00001_302516 -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_00001_302516 -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_000_302516 -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_00_302516 -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_0_302516 -jsipfs block put --format=eth-state-trie --mhtype=keccak-256 eth-stuffs/state_r_302516 diff --git a/examples/explore-ethereum-blockchain/package.json b/examples/explore-ethereum-blockchain/package.json deleted file mode 100644 index 1c7ce4211b..0000000000 --- a/examples/explore-ethereum-blockchain/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "example-explore-ethereum-blockchain", - "description": "Exploring the ethereum blockchain with ipld", - "version": "1.0.2", - "main": "index.js", - "private": true, - "scripts": { - "test": "test-ipfs-example" - }, - "keywords": [], - "license": "MIT", - "devDependencies": { - "ipfs": "^0.55.4", - "ipfs-http-client": "^50.1.2", - "ipfsd-ctl": "^8.0.1", - "ipld-ethereum": "^6.0.0", - "test-ipfs-example": "^3.0.0" - } -} diff --git a/examples/explore-ethereum-blockchain/test.js b/examples/explore-ethereum-blockchain/test.js deleted file mode 100644 index c359779f59..0000000000 --- a/examples/explore-ethereum-blockchain/test.js +++ /dev/null @@ -1,55 +0,0 @@ -'use strict' - -const fs = require('fs-extra') -const path = require('path') -const { createFactory } = require('ipfsd-ctl') -const df = createFactory({ - ipfsModule: require('ipfs'), - ipfsHttpModule: require('ipfs-http-client') -}, { - js: { - ipfsBin: require.resolve('ipfs/src/cli.js') - } -}) - -async function runTest () { - const ipfsd = await df.spawn({ - type: 'proc', - test: true, - ipfsOptions: { - ipld: { - formats: [ - ...Object.values(require('ipld-ethereum')) - ] - } - } - }) - - const cids = [] - - console.info('Importing eth-blocks') - for (const file of await fs.readdir(path.join(__dirname, 'eth-stuffs'))) { - const ethBlock = await fs.readFile(path.join(__dirname, 'eth-stuffs', file)) - const block = await ipfsd.api.block.put(ethBlock, { - format: 'eth-block', - mhtype: 'keccak-256' - }) - - cids.push(block.cid) - } - - console.info('Reading eth-blocks back out') - for (const cid of cids) { - try { - await ipfsd.api.dag.get(cid) - console.error('block was ok', cid.toString()) - } catch (err) { - console.error('block was invalid', cid.toString()) - console.error(err) - } - } - - await ipfsd.stop() -} - -module.exports = runTest diff --git a/examples/http-client-browser-pubsub/package.json b/examples/http-client-browser-pubsub/package.json index 34d9954860..f2ea806f10 100644 --- a/examples/http-client-browser-pubsub/package.json +++ b/examples/http-client-browser-pubsub/package.json @@ -22,7 +22,7 @@ "execa": "^5.0.0", "go-ipfs": "0.8.0", "ipfs": "^0.55.4", - "ipfsd-ctl": "^8.0.1", + "ipfsd-ctl": "^9.0.0", "parcel": "2.0.0-beta.2", "test-ipfs-example": "^3.0.0" } diff --git a/examples/http-client-bundle-webpack/package.json b/examples/http-client-bundle-webpack/package.json index 6767c63227..b0ee7225ff 100644 --- a/examples/http-client-bundle-webpack/package.json +++ b/examples/http-client-bundle-webpack/package.json @@ -25,7 +25,7 @@ "copy-webpack-plugin": "^8.1.0", "execa": "^5.0.0", "ipfs": "^0.55.4", - "ipfsd-ctl": "^8.0.1", + "ipfsd-ctl": "^9.0.0", "react-hot-loader": "^4.12.21", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0", diff --git a/examples/http-client-name-api/package.json b/examples/http-client-name-api/package.json index 1d93e11e71..06b3c78dab 100644 --- a/examples/http-client-name-api/package.json +++ b/examples/http-client-name-api/package.json @@ -6,8 +6,8 @@ "private": true, "scripts": { "clean": "rimraf ./dist ./.cache ./.parcel-cache", - "build": "parcel build index.html --no-scope-hoist", - "start": "parcel index.html -p 8888", + "build": "parcel build public/index.html --no-scope-hoist", + "start": "parcel public/index.html -p 8888", "test": "test-ipfs-example" }, "author": "Tara Vancil ", @@ -18,7 +18,7 @@ "devDependencies": { "execa": "^5.0.0", "go-ipfs": "0.8.0", - "ipfsd-ctl": "^8.0.1", + "ipfsd-ctl": "^9.0.0", "parcel": "2.0.0-beta.2", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0" diff --git a/examples/http-client-name-api/index.html b/examples/http-client-name-api/public/index.html similarity index 100% rename from examples/http-client-name-api/index.html rename to examples/http-client-name-api/public/index.html diff --git a/examples/http-client-name-api/index.js b/examples/http-client-name-api/public/index.js similarity index 100% rename from examples/http-client-name-api/index.js rename to examples/http-client-name-api/public/index.js diff --git a/examples/ipfs-101/package.json b/examples/ipfs-101/package.json index 838e8b167b..51cc4d6fc1 100644 --- a/examples/ipfs-101/package.json +++ b/examples/ipfs-101/package.json @@ -12,7 +12,7 @@ "dependencies": { "ipfs": "^0.55.4", "it-all": "^1.0.4", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "devDependencies": { "test-ipfs-example": "^3.0.0" diff --git a/examples/ipfs-client-add-files/package.json b/examples/ipfs-client-add-files/package.json index b95fe7ff03..54a3de0561 100644 --- a/examples/ipfs-client-add-files/package.json +++ b/examples/ipfs-client-add-files/package.json @@ -16,7 +16,7 @@ "devDependencies": { "execa": "^5.0.0", "ipfs": "^0.55.4", - "ipfsd-ctl": "^8.0.1", + "ipfsd-ctl": "^9.0.0", "parcel": "2.0.0-beta.2", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0" diff --git a/examples/test-ipfs-example/package.json b/examples/test-ipfs-example/package.json index 5f4bcf4559..d1cd293400 100644 --- a/examples/test-ipfs-example/package.json +++ b/examples/test-ipfs-example/package.json @@ -12,7 +12,7 @@ "fs-extra": "^9.0.1", "http-server": "^0.12.3", "nightwatch": "^1.2.4", - "uint8arrays": "^2.1.3", + "uint8arrays": "^2.1.6", "which": "^2.0.1" } } diff --git a/examples/traverse-ipld-graphs/README.md b/examples/traverse-ipld-graphs/README.md index f43e57f484..c5851be143 100644 --- a/examples/traverse-ipld-graphs/README.md +++ b/examples/traverse-ipld-graphs/README.md @@ -50,15 +50,6 @@ See [ipld/interface-ipld-format](https://github.com/ipld/interface-ipld-format) ## [resolve through graphs of different kind](./get-path-accross-formats.js) -## [explore a graph with the .tree](./tree.js) - -## [traverse through a slice of the ethereum blockchain](./eth.js) - -## [traverse through a git repo](./git.js) -The example objects contained in "git-objects" have already been decompressed with zlib. An example of how to do this: - - $ cat .git/objects/7d/df25817f57c2090a9568cdb17106a76dad7d04 | zlib-flate -uncompress > 7ddf25817f57c2090a9568cdb17106a76dad7d04 - ## Video of the demos Find a video with a walkthrough of this examples on Youtube: diff --git a/examples/traverse-ipld-graphs/eth.js b/examples/traverse-ipld-graphs/eth.js deleted file mode 100644 index adcadf854b..0000000000 --- a/examples/traverse-ipld-graphs/eth.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' - -const createNode = require('./create-node') -const path = require('path') -const multihashing = require('multihashing-async') -const Block = require('ipld-block') -const CID = require('cids') -const fs = require('fs').promises -const uint8ArrayToString = require('uint8arrays/to-string') - -async function main () { - const ipfs = await createNode({ - ipld: { - formats: [ - ...Object.values(require('ipld-ethereum')) - ] - } - }) - - console.log('\nStart of the example:') - - const ethBlocks = [ - path.join(__dirname, '/eth-blocks/block_302516'), - path.join(__dirname, '/eth-blocks/block_302517') - ] - - for (const ethBlockPath of ethBlocks) { - const data = await fs.readFile(ethBlockPath) - const multihash = await multihashing(data, 'keccak-256') - - const cid = new CID(1, 'eth-block', multihash) - // console.log(cid.toBaseEncodedString()) - - await ipfs.block.put(new Block(data, cid)) - } - - const block302516 = new CID('z43AaGEywSDX5PUJcrn5GfZmb6FjisJyR7uahhWPk456f7k7LDA') - const block302517 = new CID('z43AaGF42R2DXsU65bNnHRCypLPr9sg6D7CUws5raiqATVaB1jj') - let res - - res = await ipfs.dag.get(block302516, { path: 'number' }) - console.log(uint8ArrayToString(res.value, 'base16')) - - res = await ipfs.dag.get(block302517, { path: 'parent/number' }) - console.log(uint8ArrayToString(res.value, 'base16')) -} - -main() diff --git a/examples/traverse-ipld-graphs/get-path-accross-formats.js b/examples/traverse-ipld-graphs/get-path-accross-formats.js index 387ee40473..e7fc23df36 100644 --- a/examples/traverse-ipld-graphs/get-path-accross-formats.js +++ b/examples/traverse-ipld-graphs/get-path-accross-formats.js @@ -1,9 +1,6 @@ 'use strict' const createNode = require('./create-node') -const { - DAGNode -} = require('ipld-dag-pb') const uint8ArrayFromString = require('uint8arrays/from-string') async function main () { @@ -12,7 +9,10 @@ async function main () { console.log('\nStart of the example:') const someData = uint8ArrayFromString('capoeira') - const pbNode = new DAGNode(someData) + const pbNode = { + Data: someData, + Links: [] + } const pbNodeCid = await ipfs.dag.put(pbNode, { format: 'dag-pb', @@ -27,7 +27,7 @@ async function main () { const cborNodeCid = await ipfs.dag.put(myData, { format: 'dag-cbor', - hashAlg: 'sha3-512' + hashAlg: 'sha2-512' }) const result = await ipfs.dag.get(cborNodeCid, { diff --git a/examples/traverse-ipld-graphs/git.js b/examples/traverse-ipld-graphs/git.js deleted file mode 100644 index 00901e15d5..0000000000 --- a/examples/traverse-ipld-graphs/git.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const createNode = require('./create-node') -const path = require('path') -const multihashing = require('multihashing-async') -const Block = require('ipld-block') -const CID = require('cids') -const fs = require('fs').promises -const uint8ArrayToString = require('uint8arrays/to-string') - -async function main () { - const ipfs = await createNode({ - ipld: { - formats: [ - require('ipld-git') - ] - } - }) - - console.log('\nStart of the example:') - - const gitObjects = [ - path.join(__dirname, '/git-objects/0f328c91df28c5c01b9e9f9f7e663191fa156593'), - path.join(__dirname, '/git-objects/177bf18bc707d82b21cdefd0b43b38fc8c5c13fe'), - path.join(__dirname, '/git-objects/23cc25f631cb076d5de5036c87678ea713cbaa6a'), - path.join(__dirname, '/git-objects/4e425dba7745a781f0712c9a01455899e8c0c249'), - path.join(__dirname, '/git-objects/6850c7be7136e6be00976ddbae80671b945c3e9d'), - path.join(__dirname, '/git-objects/a5095353cd62a178663dd26efc2d61f4f61bccbe'), - path.join(__dirname, '/git-objects/dc9bd15e8b81b6565d3736f9c308bd1bba60f33a'), - path.join(__dirname, '/git-objects/e68e6f6e31857877a79fd6b3956898436bb5a76f'), - path.join(__dirname, '/git-objects/ee62b3d206cb23f939208898f32d8708c0e3fa3c'), - path.join(__dirname, '/git-objects/ee71cef5001b84b0314438f76cf0acd338a2fd21') - ] - - await Promise.all(gitObjects.map(async gitObjectsPath => { - const data = await fs.readFile(gitObjectsPath) - const multihash = await multihashing(data, 'sha1') - - const cid = new CID(1, 'git-raw', multihash) - console.log(cid.toString()) - - await ipfs.block.put(new Block(data, cid)) - })) - - const v1tag = new CID('z8mWaGfwSWLMPJ6Q2JdsAjGiXTf61Nbue') - - async function logResult (fn, comment) { - const result = await fn() - - if (result.value instanceof Uint8Array) { // Blobs (files) are returned as buffer instance - result.value = uint8ArrayToString(result.value) - } - - console.log('-'.repeat(80)) - console.log(comment) - console.log(result.value) - } - - await logResult(() => ipfs.dag.get(v1tag), 'Tag object:') - await logResult(() => ipfs.dag.get(v1tag, { path: '/object/message' }), 'Tagged commit message:') - await logResult(() => ipfs.dag.get(v1tag, { path: '/object/parents/0/message' }), 'Parent of tagged commit:') - await logResult(() => ipfs.dag.get(v1tag, { path: '/object/tree/src/hash/hello/hash' }), '/src/hello file:') - await logResult(() => ipfs.dag.get(v1tag, { path: '/object/parents/0/tree/src/hash/hello/hash' }), 'previous version of /src/hello file:') -} - -main() diff --git a/examples/traverse-ipld-graphs/package.json b/examples/traverse-ipld-graphs/package.json index 8fdf3842f5..285dc41267 100644 --- a/examples/traverse-ipld-graphs/package.json +++ b/examples/traverse-ipld-graphs/package.json @@ -13,12 +13,10 @@ "test-ipfs-example": "^3.0.0" }, "dependencies": { - "cids": "^1.1.6", + "@ipld/dag-pb": "^2.1.3", "ipfs": "^0.55.4", - "ipld-block": "^0.11.0", - "ipld-dag-pb": "^0.22.1", "ipld-git": "^0.6.1", "ipld-ethereum": "^6.0.0", - "multihashing-async": "^2.1.2" + "multiformats": "^9.4.1" } } diff --git a/examples/traverse-ipld-graphs/test.js b/examples/traverse-ipld-graphs/test.js index 5ad2a2d0e2..f7b2b6e5de 100644 --- a/examples/traverse-ipld-graphs/test.js +++ b/examples/traverse-ipld-graphs/test.js @@ -18,15 +18,6 @@ async function runTest () { console.info('Testing get-path-accross-formats.js') await waitForOutput('capoeira', path.resolve(__dirname, 'get-path-accross-formats.js')) - console.info('Testing tree.js') - await waitForOutput("hobbies/0/Links", path.resolve(__dirname, 'tree.js')) - - console.info('Testing eth.js') - await waitForOutput('302516', path.resolve(__dirname, 'eth.js')) - - console.info('Testing git.js') - await waitForOutput("'hello world!'", path.resolve(__dirname, 'git.js')) - console.info('Done!') } diff --git a/examples/traverse-ipld-graphs/tree.js b/examples/traverse-ipld-graphs/tree.js deleted file mode 100644 index d9754591c1..0000000000 --- a/examples/traverse-ipld-graphs/tree.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict' - -const createNode = require('./create-node') -const { - DAGNode -} = require('ipld-dag-pb') -const uint8ArrayFromString = require('uint8arrays/from-string') - -async function main () { - const ipfs = await createNode() - - console.log('\nStart of the example:') - - const someData = uint8ArrayFromString('capoeira') - const pbNode = new DAGNode(someData) - - const pbNodeCid = await ipfs.dag.put(pbNode, { - format: 'dag-pb', - hashAlg: 'sha2-256' - }) - - const myData = { - name: 'David', - likes: ['js-ipfs', 'icecream', 'steak'], - hobbies: [pbNodeCid] - } - - const cborNodeCid = await ipfs.dag.put(myData, { - format: 'dag-cbor', - hashAlg: 'sha3-512' - }) - - for await (const path of ipfs.dag.tree(cborNodeCid, { recursive: true })) { - console.log(path) - } - - await ipfs.stop() -} - -main() diff --git a/examples/types-use-ipfs-from-ts/src/main.ts b/examples/types-use-ipfs-from-ts/src/main.ts index 5f84fd8408..545a6fa719 100644 --- a/examples/types-use-ipfs-from-ts/src/main.ts +++ b/examples/types-use-ipfs-from-ts/src/main.ts @@ -1,5 +1,5 @@ import { IPFS, create } from 'ipfs' -import CID from 'cids' +import { CID } from 'multiformats/cid' export default async function main() { const node = await create() @@ -14,7 +14,7 @@ export default async function main() { console.log('Added file:', file.path, file.cid.toString()) try { - // @ts-expect-error CID has no toUpperCase method + // @ts-expect-error CID has no toUpperCase method file.cid.toUpperCase() } catch (error) { diff --git a/examples/types-use-ipfs-from-typed-js/src/main.js b/examples/types-use-ipfs-from-typed-js/src/main.js index c5279b646c..1ad34c9393 100644 --- a/examples/types-use-ipfs-from-typed-js/src/main.js +++ b/examples/types-use-ipfs-from-typed-js/src/main.js @@ -1,7 +1,7 @@ const { create } = require('ipfs') /** * @typedef {import('ipfs').IPFS} IPFS - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID */ async function main () { @@ -17,7 +17,7 @@ async function main () { console.log('Added file:', file.path, file.cid.toString()) try { - // @ts-expect-error CID has no toUpperCase method + // @ts-expect-error CID has no toUpperCase method file.cid.toUpperCase() } catch(error) { diff --git a/packages/ipfs/package-list.json b/package-list.json similarity index 76% rename from packages/ipfs/package-list.json rename to package-list.json index 0dcf92686b..6087e02c9b 100644 --- a/packages/ipfs/package-list.json +++ b/package-list.json @@ -9,16 +9,13 @@ ], "rows": [ "Files", - ["ipfs/js-ipfs-unixfs", "ipfs-unixfs-exporter"], - ["ipfs/js-ipfs-unixfs", "ipfs-unixfs-importer"], ["ipfs/js-ipfs-unixfs", "ipfs-unixfs"], "Repo", ["ipfs/js-ipfs-repo", "ipfs-repo"], + ["ipfs/js-ipfs-repo-migrations", "ipfs-repo-migrations"], "Exchange", - ["ipfs/js-ipfs-block-service", "ipfs-block-service"], - ["ipfs/js-ipfs-block", "ipfs-block"], ["ipfs/js-ipfs-bitswap", "ipfs-bitswap"], "IPNS", @@ -31,7 +28,6 @@ ["ipfs/js-ipfsd-ctl", "ipfsd-ctl"], ["ipfs/is-ipfs", "is-ipfs"], ["ipfs/aegir", "aegir"], - ["ipfs/js-ipfs-repo-migrations", "ipfs-repo-migrations"], "libp2p", ["libp2p/js-libp2p", "libp2p"], @@ -51,14 +47,12 @@ ["libp2p/js-libp2p-delegated-peer-routing", "libp2p-delegated-peer-routing"], "IPLD", - ["ipld/js-ipld", "ipld"], - ["ipld/js-ipld-dag-pb", "ipld-dag-pb"], - ["ipld/js-ipld-dag-cbor", "ipld-dag-cbor"], + ["ipld/js-dag-pb", "@ipld/dag-pb"], + ["ipld/js-dag-cbor", "@ipld/dag-cbor"], "Multiformats", - ["multiformats/js-multihashing", "multihashing"], + ["multiformats/js-multiformats", "multiformats"], ["multiformats/js-mafmt", "mafmt"], - ["multiformats/js-multiaddr", "multiaddr"], - ["multiformats/js-multihash", "multihashes"] + ["multiformats/js-multiaddr", "multiaddr"] ] } diff --git a/package.json b/package.json index 72c51eb108..118b7a76da 100644 --- a/package.json +++ b/package.json @@ -257,7 +257,6 @@ "Jonathan Commins ", "leekt216 ", "Jacob Karlsson ", - "Jorropo ", - "Holodisc " + "noah the goodra " ] } diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json index 44cefd1393..d8b120c182 100644 --- a/packages/interface-ipfs-core/package.json +++ b/packages/interface-ipfs-core/package.json @@ -36,23 +36,18 @@ ] }, "dependencies": { + "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-pb": "^2.1.3", "abort-controller": "^3.0.0", - "aegir": "^33.0.0", - "chai": "^4.2.0", - "chai-as-promised": "^7.1.1", - "chai-subset": "^1.6.0", - "cids": "^1.1.6", + "aegir": "^34.0.2", "delay": "^5.0.0", - "dirty-chai": "^2.0.1", "err-code": "^3.0.1", - "ipfs-unixfs": "^4.0.3", - "ipfs-unixfs-importer": "^7.0.3", - "ipfs-utils": "^8.1.2", - "ipld-block": "^0.11.0", - "ipld-dag-cbor": "^1.0.0", - "ipld-dag-pb": "^0.22.1", - "ipns": "^0.12.0", - "is-ipfs": "^5.0.0", + "interface-blockstore": "^1.0.0", + "ipfs-unixfs": "^5.0.0", + "ipfs-unixfs-importer": "^8.0.0", + "ipfs-utils": "^8.1.4", + "ipns": "^0.13.2", + "is-ipfs": "^6.0.1", "iso-random-stream": "^2.0.0", "it-all": "^1.0.4", "it-buffer-stream": "^2.0.0", @@ -62,18 +57,17 @@ "it-last": "^1.0.4", "it-map": "^1.0.4", "it-pushable": "^1.4.0", - "libp2p-crypto": "^0.19.3", - "libp2p-websockets": "^0.15.6", - "multiaddr": "^9.0.1", - "multibase": "^4.0.2", - "multihashing-async": "^2.1.2", + "libp2p-crypto": "^0.19.6", + "libp2p-websockets": "^0.16.1", + "multiaddr": "^10.0.0", + "multiformats": "^9.4.1", "nanoid": "^3.1.12", "native-abort-controller": "^1.0.3", "p-map": "^4.0.0", "p-retry": "^4.5.0", - "peer-id": "^0.14.1", + "peer-id": "^0.15.1", "readable-stream": "^3.4.0", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "contributors": [ "Alan Shaw ", diff --git a/packages/interface-ipfs-core/src/add-all.js b/packages/interface-ipfs-core/src/add-all.js index 84667f2c8c..552c5a5120 100644 --- a/packages/interface-ipfs-core/src/add-all.js +++ b/packages/interface-ipfs-core/src/add-all.js @@ -15,6 +15,8 @@ const { isNode } = require('ipfs-utils/src/env') const { getDescribe, getIt, expect } = require('./utils/mocha') const uint8ArrayFromString = require('uint8arrays/from-string') const bufferStream = require('it-buffer-stream') +const raw = require('multiformats/codecs/raw') +const dagPb = require('@ipld/dag-pb') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -79,7 +81,7 @@ module.exports = (common, options) => { expect(filesAdded).to.have.length(1) const file = filesAdded[0] - expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid.toString()) expect(file.path).to.equal('testfile.txt') }) @@ -124,7 +126,7 @@ module.exports = (common, options) => { const root = await last(ipfs.addAll(dirs)) expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal(fixtures.directory.cid) + expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) }) it('should add a nested directory as array of tupples with progress', async function () { @@ -162,7 +164,7 @@ module.exports = (common, options) => { const root = await last(ipfs.addAll(dirs, { progress: handler })) expect(progressSizes).to.deep.equal(total) expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal(fixtures.directory.cid) + expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) }) it('should receive progress path as empty string when adding content without paths', async function () { @@ -243,7 +245,7 @@ module.exports = (common, options) => { const file = filesAdded[0] const wrapped = filesAdded[1] - expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid.toString()) expect(file.path).to.equal('testfile.txt') expect(wrapped.path).to.equal('') }) @@ -392,7 +394,7 @@ module.exports = (common, options) => { expect(files.length).to.equal(1) expect(files[0].cid.toString()).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') - expect(files[0].cid.codec).to.equal('raw') + expect(files[0].cid.code).to.equal(raw.code) expect(files[0].size).to.equal(3) }) @@ -411,7 +413,7 @@ module.exports = (common, options) => { expect(files.length).to.equal(1) expect(files[0].cid.toString()).to.equal('bafybeifmayxiu375ftlgydntjtffy5cssptjvxqw6vyuvtymntm37mpvua') - expect(files[0].cid.codec).to.equal('dag-pb') + expect(files[0].cid.code).to.equal(dagPb.code) expect(files[0].size).to.equal(18) }) @@ -427,7 +429,7 @@ module.exports = (common, options) => { expect(files.length).to.equal(1) expect(files[0].cid.toString()).to.equal('QmaZTosBmPwo9LQ48ESPCEcNuX2kFxkpXYy8i3rxqBdzRG') - expect(files[0].cid.codec).to.equal('dag-pb') + expect(files[0].cid.code).to.equal(dagPb.code) expect(files[0].size).to.equal(11) }) diff --git a/packages/interface-ipfs-core/src/add.js b/packages/interface-ipfs-core/src/add.js index d62af444dc..86f3de4e0b 100644 --- a/packages/interface-ipfs-core/src/add.js +++ b/packages/interface-ipfs-core/src/add.js @@ -11,6 +11,8 @@ const echoUrl = (text) => `${process.env.ECHO_SERVER}/download?data=${encodeURIC const redirectUrl = (url) => `${process.env.ECHO_SERVER}/redirect?to=${encodeURI(url)}` const uint8ArrayFromString = require('uint8arrays/from-string') const last = require('it-last') +const raw = require('multiformats/codecs/raw') +const dagPb = require('@ipld/dag-pb') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -76,8 +78,8 @@ module.exports = (common, options) => { it('should add a Uint8Array', async () => { const file = await ipfs.add(fixtures.smallFile.data) - expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal(fixtures.smallFile.cid) + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid.toString()) + expect(file.path).to.equal(fixtures.smallFile.cid.toString()) // file.size counts the overhead by IPLD nodes and unixfs protobuf expect(file.size).greaterThan(fixtures.smallFile.data.length) }) @@ -85,8 +87,8 @@ module.exports = (common, options) => { it('should add a BIG Uint8Array', async () => { const file = await ipfs.add(fixtures.bigFile.data) - expect(file.cid.toString()).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) + expect(file.cid.toString()).to.equal(fixtures.bigFile.cid.toString()) + expect(file.path).to.equal(fixtures.bigFile.cid.toString()) // file.size counts the overhead by IPLD nodes and unixfs protobuf expect(file.size).greaterThan(fixtures.bigFile.data.length) }) @@ -101,8 +103,8 @@ module.exports = (common, options) => { const file = await ipfs.add(fixtures.bigFile.data, { progress: handler }) - expect(file.cid.toString()).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) + expect(file.cid.toString()).to.equal(fixtures.bigFile.cid.toString()) + expect(file.path).to.equal(fixtures.bigFile.cid.toString()) expect(progCalled).to.be.true() expect(accumProgress).to.equal(fixtures.bigFile.data.length) }) @@ -117,8 +119,8 @@ module.exports = (common, options) => { const file = await ipfs.add(fixtures.emptyFile.data, { progress: handler }) - expect(file.cid.toString()).to.equal(fixtures.emptyFile.cid) - expect(file.path).to.equal(fixtures.emptyFile.cid) + expect(file.cid.toString()).to.equal(fixtures.emptyFile.cid.toString()) + expect(file.path).to.equal(fixtures.emptyFile.cid.toString()) expect(progCalled).to.be.true() expect(accumProgress).to.equal(fixtures.emptyFile.data.length) }) @@ -140,8 +142,8 @@ module.exports = (common, options) => { it('should add an empty file without progress enabled', async () => { const file = await ipfs.add(fixtures.emptyFile.data) - expect(file.cid.toString()).to.equal(fixtures.emptyFile.cid) - expect(file.path).to.equal(fixtures.emptyFile.cid) + expect(file.cid.toString()).to.equal(fixtures.emptyFile.cid.toString()) + expect(file.path).to.equal(fixtures.emptyFile.cid.toString()) }) it('should add a Uint8Array as tuple', async () => { @@ -149,7 +151,7 @@ module.exports = (common, options) => { const file = await ipfs.add(tuple) - expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid.toString()) expect(file.path).to.equal('testfile.txt') }) @@ -204,7 +206,7 @@ module.exports = (common, options) => { const stats = await ipfs.files.stat(`/ipfs/${wrapper.cid}/testfile.txt`) - expect(`${stats.cid}`).to.equal(fixtures.smallFile.cid) + expect(`${stats.cid}`).to.equal(fixtures.smallFile.cid.toString()) }) it('should add with only-hash=true', async function () { @@ -349,7 +351,7 @@ module.exports = (common, options) => { }) expect(file.cid.toString()).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') - expect(file.cid.codec).to.equal('raw') + expect(file.cid.code).to.equal(raw.code) expect(file.size).to.equal(3) }) @@ -367,7 +369,7 @@ module.exports = (common, options) => { }) expect(file.cid.toString()).to.equal('bafybeifmayxiu375ftlgydntjtffy5cssptjvxqw6vyuvtymntm37mpvua') - expect(file.cid.codec).to.equal('dag-pb') + expect(file.cid.code).to.equal(dagPb.code) expect(file.size).to.equal(18) }) diff --git a/packages/interface-ipfs-core/src/bitswap/transfer.js b/packages/interface-ipfs-core/src/bitswap/transfer.js index 1af720055c..346017e60d 100644 --- a/packages/interface-ipfs-core/src/bitswap/transfer.js +++ b/packages/interface-ipfs-core/src/bitswap/transfer.js @@ -3,23 +3,13 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { isWebWorker } = require('ipfs-utils/src/env') -const CID = require('cids') const { randomBytes } = require('iso-random-stream') -const Block = require('ipld-block') const concat = require('it-concat') const { nanoid } = require('nanoid') const uint8ArrayFromString = require('uint8arrays/from-string') const pmap = require('p-map') -const multihashing = require('multihashing-async') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -const makeBlock = async () => { - const d = uint8ArrayFromString(`IPFS is awesome ${nanoid()}`) - const h = await multihashing(d, 'sha2-256') - - return new Block(d, new CID(h)) -} - /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** * @param {Factory} factory @@ -41,16 +31,16 @@ module.exports = (factory, options) => { const remote = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api const local = (await factory.spawn({ type: 'proc', ipfsOptions })).api await local.swarm.connect(remote.peerId.addresses[0]) - const block = await makeBlock() + const data = uint8ArrayFromString(`IPFS is awesome ${nanoid()}`) - await local.block.put(block) - const b = await remote.block.get(block.cid) + const cid = await local.block.put(data) + const b = await remote.block.get(cid) - expect(b.data).to.eql(block.data) + expect(b).to.equalBytes(data) }) it('3 peers', async () => { - const blocks = await Promise.all([...Array(6).keys()].map(() => makeBlock())) + const blocks = Array(6).fill(0).map(() => uint8ArrayFromString(`IPFS is awesome ${nanoid()}`)) const remote1 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api const remote2 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api const local = (await factory.spawn({ type: 'proc', ipfsOptions })).api @@ -58,17 +48,19 @@ module.exports = (factory, options) => { await local.swarm.connect(remote2.peerId.addresses[0]) await remote1.swarm.connect(remote2.peerId.addresses[0]) - await remote1.block.put(blocks[0]) - await remote1.block.put(blocks[1]) - await remote2.block.put(blocks[2]) - await remote2.block.put(blocks[3]) - await local.block.put(blocks[4]) - await local.block.put(blocks[5]) + // order is important + const cids = [] + cids.push(await remote1.block.put(blocks[0])) + cids.push(await remote1.block.put(blocks[1])) + cids.push(await remote2.block.put(blocks[2])) + cids.push(await remote2.block.put(blocks[3])) + cids.push(await local.block.put(blocks[4])) + cids.push(await local.block.put(blocks[5])) - await pmap(blocks, async (block) => { - expect(await remote1.block.get(block.cid)).to.eql(block) - expect(await remote2.block.get(block.cid)).to.eql(block) - expect(await local.block.get(block.cid)).to.eql(block) + await pmap(blocks, async (block, i) => { + expect(await remote1.block.get(cids[i])).to.eql(block) + expect(await remote2.block.get(cids[i])).to.eql(block) + expect(await local.block.get(cids[i])).to.eql(block) }, { concurrency: 3 }) }) }) diff --git a/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js b/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js index 1ca5dce2b9..72ee450d07 100644 --- a/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js +++ b/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js @@ -5,6 +5,7 @@ const { getDescribe, getIt } = require('../utils/mocha') const { waitForWantlistKey } = require('./utils') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') +const { CID } = require('multiformats/cid') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -28,7 +29,7 @@ module.exports = (common, options) => { // webworkers are not dialable because webrtc is not available ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api // Add key to the wantlist for ipfsB - ipfsB.block.get(key).catch(() => { /* is ok, expected on teardown */ }) + ipfsB.block.get(CID.parse(key)).catch(() => { /* is ok, expected on teardown */ }) await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) diff --git a/packages/interface-ipfs-core/src/bitswap/wantlist.js b/packages/interface-ipfs-core/src/bitswap/wantlist.js index c209b039f9..0fb961a5d1 100644 --- a/packages/interface-ipfs-core/src/bitswap/wantlist.js +++ b/packages/interface-ipfs-core/src/bitswap/wantlist.js @@ -6,7 +6,7 @@ const { waitForWantlistKey, waitForWantlistKeyToBeRemoved } = require('./utils') const { isWebWorker } = require('ipfs-utils/src/env') const testTimeout = require('../utils/test-timeout') const { AbortController } = require('native-abort-controller') -const CID = require('cids') +const { CID } = require('multiformats/cid') const delay = require('delay') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') @@ -32,7 +32,7 @@ module.exports = (common, options) => { // webworkers are not dialable because webrtc is not available ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api // Add key to the wantlist for ipfsB - ipfsB.block.get(key).catch(() => { /* is ok, expected on teardown */ }) + ipfsB.block.get(CID.parse(key)).catch(() => { /* is ok, expected on teardown */ }) await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) @@ -58,7 +58,7 @@ module.exports = (common, options) => { it('should remove blocks from the wantlist when requests are cancelled', async () => { const controller = new AbortController() - const cid = new CID('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KaGa') + const cid = CID.parse('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KaGa') const getPromise = ipfsA.dag.get(cid, { signal: controller.signal @@ -76,7 +76,7 @@ module.exports = (common, options) => { it('should keep blocks in the wantlist when only one request is cancelled', async () => { const controller = new AbortController() const otherController = new AbortController() - const cid = new CID('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1Kaaa') + const cid = CID.parse('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1Kaaa') const getPromise = ipfsA.dag.get(cid, { signal: controller.signal diff --git a/packages/interface-ipfs-core/src/block/get.js b/packages/interface-ipfs-core/src/block/get.js index c9790daa1f..bbb74ceb37 100644 --- a/packages/interface-ipfs-core/src/block/get.js +++ b/packages/interface-ipfs-core/src/block/get.js @@ -2,8 +2,8 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const multihashing = require('multihashing-async') -const CID = require('cids') +const { identity } = require('multiformats/hashes/identity') +const { CID } = require('multiformats/cid') const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') @@ -18,90 +18,76 @@ module.exports = (common, options) => { describe('.block.get', () => { const data = uint8ArrayFromString('blorb') - let ipfs, hash + let ipfs, cid before(async () => { ipfs = (await common.spawn()).api - const block = await ipfs.block.put(data) - hash = block.cid.multihash + cid = await ipfs.block.put(data) }) after(() => common.clean()) it('should respect timeout option when getting a block', () => { - return testTimeout(() => ipfs.block.get(new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rA3'), { + return testTimeout(() => ipfs.block.get(CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rA3'), { timeout: 1 })) }) - it('should get by CID object', async () => { - const cid = new CID(hash) + it('should get by CID', async () => { const block = await ipfs.block.get(cid) - expect(block.data).to.eql(uint8ArrayFromString('blorb')) - expect(block.cid.multihash).to.eql(cid.multihash) - }) - - it('should get by CID in string', async () => { - const block = await ipfs.block.get(multihashing.multihash.toB58String(hash)) - - expect(block.data).to.eql(uint8ArrayFromString('blorb')) - expect(block.cid.multihash).to.eql(hash) + expect(block).to.equalBytes(uint8ArrayFromString('blorb')) }) it('should get an empty block', async () => { - const res = await ipfs.block.put(new Uint8Array(0), { + const cid = await ipfs.block.put(new Uint8Array(0), { format: 'dag-pb', mhtype: 'sha2-256', version: 0 }) - const block = await ipfs.block.get(res.cid) - - expect(block.data).to.eql(new Uint8Array(0)) + const block = await ipfs.block.get(cid) + expect(block).to.equalBytes(new Uint8Array(0)) }) it('should get a block added as CIDv0 with a CIDv1', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await ipfs.block.put(input, { version: 0 }) - - const cidv0 = res.cid + const cidv0 = await ipfs.block.put(input) expect(cidv0.version).to.equal(0) const cidv1 = cidv0.toV1() const block = await ipfs.block.get(cidv1) - expect(block.data).to.eql(input) + expect(block).to.equalBytes(input) }) it('should get a block added as CIDv1 with a CIDv0', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await ipfs.block.put(input, { version: 1 }) - - const cidv1 = res.cid + const cidv1 = await ipfs.block.put(input, { + version: 1, + format: 'dag-pb' + }) expect(cidv1.version).to.equal(1) const cidv0 = cidv1.toV0() const block = await ipfs.block.get(cidv0) - expect(block.data).to.eql(input) + expect(block).to.equalBytes(input) }) it('should get a block with an identity CID, without putting first', async () => { const identityData = uint8ArrayFromString('A16461736466190144', 'base16upper') - const identityHash = await multihashing(identityData, 'identity') - const identityCID = new CID(1, 'dag-cbor', identityHash) + const identityHash = await identity.digest(identityData) + const identityCID = CID.createV1(identity.code, identityHash) const block = await ipfs.block.get(identityCID) - expect(block.data).to.eql(identityData) + expect(block).to.equalBytes(identityData) }) it('should return an error for an invalid CID', () => { return expect(ipfs.block.get('Non-base58 character')).to.eventually.be.rejected .and.be.an.instanceOf(Error) - .and.have.property('message') - .that.includes('Non-base58 character') }) }) } diff --git a/packages/interface-ipfs-core/src/block/put.js b/packages/interface-ipfs-core/src/block/put.js index 3be1ca96a8..a25d4e5e86 100644 --- a/packages/interface-ipfs-core/src/block/put.js +++ b/packages/interface-ipfs-core/src/block/put.js @@ -2,11 +2,12 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const Block = require('ipld-block') -const multihash = require('multihashing-async').multihash -const CID = require('cids') +const { base58btc } = require('multiformats/bases/base58') +const { CID } = require('multiformats/cid') const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') +const raw = require('multiformats/codecs/raw') +const { sha512 } = require('multiformats/hashes/sha2') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -30,66 +31,37 @@ module.exports = (common, options) => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const blob = uint8ArrayFromString('blorb') - const block = await ipfs.block.put(blob) + const cid = await ipfs.block.put(blob) - expect(block.data).to.be.eql(blob) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - }) - - it('should put a buffer, using CID', async () => { - const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const cid = new CID(expectedHash) - const blob = uint8ArrayFromString('blorb') - - const block = await ipfs.block.put(blob, { cid: cid }) - - expect(block.data).to.be.eql(blob) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - }) - - it('should put a buffer, using CID string', async () => { - const expectedCid = 'bafyreietui4xdkiu4xvmx4fi2jivjtndbhb4drzpxomrjvd4mdz4w2avra' - const blob = uint8ArrayFromString(JSON.stringify({ hello: 'world' })) - - const block = await ipfs.block.put(blob, { cid: expectedCid }) - - expect(block.data).to.be.eql(blob) - expect(block.cid.toString()).to.eql(expectedCid) + expect(cid.toString()).to.equal(expectedHash) + expect(cid.bytes).to.equalBytes(base58btc.decode(`z${expectedHash}`)) }) it('should put a buffer, using options', async () => { const blob = uint8ArrayFromString(`TEST${Math.random()}`) - const block = await ipfs.block.put(blob, { + const cid = await ipfs.block.put(blob, { format: 'raw', mhtype: 'sha2-512', version: 1, pin: true }) - expect(block.data).to.be.eql(blob) - expect(block.cid.version).to.equal(1) - expect(block.cid.codec).to.equal('raw') - expect(multihash.decode(block.cid.multihash).name).to.equal('sha2-512') - expect(await all(ipfs.pin.ls({ paths: block.cid }))).to.have.lengthOf(1) + expect(cid.version).to.equal(1) + expect(cid.code).to.equal(raw.code) + expect(cid.multihash.code).to.equal(sha512.code) + + expect(await all(ipfs.pin.ls({ paths: cid }))).to.have.lengthOf(1) }) it('should put a Block instance', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const cid = new CID(expectedHash) - const b = new Block(uint8ArrayFromString('blorb'), cid) - - const block = await ipfs.block.put(b) + const expectedCID = CID.parse(expectedHash) + const b = uint8ArrayFromString('blorb') - expect(block.data).to.eql(uint8ArrayFromString('blorb')) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - }) - - it('should error with array of blocks', () => { - const blob = uint8ArrayFromString('blorb') + const cid = await ipfs.block.put(b) - return expect(ipfs.block.put([blob, blob])).to.eventually.be.rejected - .and.be.an.instanceOf(Error) + expect(cid.multihash.bytes).to.equalBytes(expectedCID.multihash.bytes) }) }) } diff --git a/packages/interface-ipfs-core/src/block/rm.js b/packages/interface-ipfs-core/src/block/rm.js index 80380cf159..1331b7d35e 100644 --- a/packages/interface-ipfs-core/src/block/rm.js +++ b/packages/interface-ipfs-core/src/block/rm.js @@ -7,7 +7,8 @@ const { nanoid } = require('nanoid') const all = require('it-all') const last = require('it-last') const drain = require('it-drain') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const raw = require('multiformats/codecs/raw') const testTimeout = require('../utils/test-timeout') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -27,7 +28,7 @@ module.exports = (common, options) => { after(() => common.clean()) it('should respect timeout option when removing a block', () => { - return testTimeout(() => drain(ipfs.block.rm(new CID('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), { + return testTimeout(() => drain(ipfs.block.rm(CID.parse('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), { timeout: 1 }))) }) @@ -41,7 +42,7 @@ module.exports = (common, options) => { // block should be present in the local store const localRefs = await all(ipfs.refs.local()) expect(localRefs).to.have.property('length').that.is.greaterThan(0) - expect(localRefs.find(ref => ref.ref === new CID(1, 'raw', cid.multihash).toString())).to.be.ok() + expect(localRefs.find(ref => ref.ref === CID.createV1(raw.code, cid.multihash).toString())).to.be.ok() const result = await all(ipfs.block.rm(cid)) expect(result).to.be.an('array').and.to.have.lengthOf(1) @@ -50,52 +51,28 @@ module.exports = (common, options) => { // did we actually remove the block? const localRefsAfterRemove = await all(ipfs.refs.local()) - expect(localRefsAfterRemove.find(ref => ref.ref === new CID(1, 'raw', cid.multihash).toString())).to.not.be.ok() - }) - - it('should remove by CID in string', async () => { - const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', - hashAlg: 'sha2-256' - }) - const result = await all(ipfs.block.rm(cid.toString())) - - expect(result).to.be.an('array').and.to.have.lengthOf(1) - expect(result[0].cid.toString()).to.equal(cid.toString()) - expect(result[0]).to.not.have.property('error') - }) - - it('should remove by CID in buffer', async () => { - const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', - hashAlg: 'sha2-256' - }) - const result = await all(ipfs.block.rm(cid.bytes)) - - expect(result).to.be.an('array').and.to.have.lengthOf(1) - expect(result[0].cid.toString()).to.equal(cid.toString()) - expect(result[0]).to.not.have.property('error') + expect(localRefsAfterRemove.find(ref => ref.ref === CID.createV1(raw.code, cid.multihash).toString())).to.not.be.ok() }) it('should remove multiple CIDs', async () => { - const cids = [ - await ipfs.dag.put(uint8ArrayFromString(nanoid()), { + const cids = await Promise.all([ + ipfs.dag.put(uint8ArrayFromString(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }), - await ipfs.dag.put(uint8ArrayFromString(nanoid()), { + ipfs.dag.put(uint8ArrayFromString(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }), - await ipfs.dag.put(uint8ArrayFromString(nanoid()), { + ipfs.dag.put(uint8ArrayFromString(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) - ] + ]) const result = await all(ipfs.block.rm(cids)) - expect(result).to.be.an('array').and.to.have.lengthOf(3) + expect(result).to.have.lengthOf(3) result.forEach((res, index) => { expect(res.cid.toString()).to.equal(cids[index].toString()) diff --git a/packages/interface-ipfs-core/src/block/stat.js b/packages/interface-ipfs-core/src/block/stat.js index 72d71b4203..4b08e489d2 100644 --- a/packages/interface-ipfs-core/src/block/stat.js +++ b/packages/interface-ipfs-core/src/block/stat.js @@ -2,7 +2,7 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') @@ -17,27 +17,25 @@ module.exports = (common, options) => { describe('.block.stat', () => { const data = uint8ArrayFromString('blorb') - let ipfs, hash + let ipfs, cid before(async () => { ipfs = (await common.spawn()).api - const block = await ipfs.block.put(data) - hash = block.cid.multihash + cid = await ipfs.block.put(data) }) after(() => common.clean()) it('should respect timeout option when statting a block', () => { - return testTimeout(() => ipfs.block.stat(new CID('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), { + return testTimeout(() => ipfs.block.stat(CID.parse('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), { timeout: 1 })) }) it('should stat by CID', async () => { - const cid = new CID(hash) const stats = await ipfs.block.stat(cid) expect(stats.cid.toString()).to.equal(cid.toString()) - expect(stats).to.have.property('size') + expect(stats).to.have.property('size', data.length) }) it('should return error for missing argument', () => { diff --git a/packages/interface-ipfs-core/src/cat.js b/packages/interface-ipfs-core/src/cat.js index b44759e776..293afc3012 100644 --- a/packages/interface-ipfs-core/src/cat.js +++ b/packages/interface-ipfs-core/src/cat.js @@ -5,12 +5,13 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayConcat = require('uint8arrays/concat') const { fixtures } = require('./utils') -const CID = require('cids') +const { CID } = require('multiformats/cid') const all = require('it-all') const drain = require('it-drain') const { getDescribe, getIt, expect } = require('./utils/mocha') const testTimeout = require('./utils/test-timeout') const { importer } = require('ipfs-unixfs-importer') +const blockstore = require('./utils/blockstore-adapter') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -31,12 +32,12 @@ module.exports = (common, options) => { after(() => common.clean()) before(() => Promise.all([ - all(importer([{ content: fixtures.smallFile.data }], ipfs.block)), - all(importer([{ content: fixtures.bigFile.data }], ipfs.block)) + all(importer([{ content: fixtures.smallFile.data }], blockstore(ipfs))), + all(importer([{ content: fixtures.bigFile.data }], blockstore(ipfs))) ])) it('should respect timeout option when catting files', () => { - return testTimeout(() => drain(ipfs.cat(new CID('QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbS1'), { + return testTimeout(() => drain(ipfs.cat(CID.parse('QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbS1'), { timeout: 1 }))) }) @@ -47,14 +48,14 @@ module.exports = (common, options) => { }) it('should cat with a Uint8Array multihash', async () => { - const cid = new CID(fixtures.smallFile.cid).multihash + const cid = fixtures.smallFile.cid const data = uint8ArrayConcat(await all(ipfs.cat(cid))) expect(uint8ArrayToString(data)).to.contain('Plz add me!') }) it('should cat with a CID object', async () => { - const cid = new CID(fixtures.smallFile.cid) + const cid = fixtures.smallFile.cid const data = uint8ArrayConcat(await all(ipfs.cat(cid))) expect(uint8ArrayToString(data)).to.contain('Plz add me!') @@ -63,12 +64,11 @@ module.exports = (common, options) => { it('should cat a file added as CIDv0 with a CIDv1', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], ipfs.block)) + const res = await all(importer([{ content: input }], blockstore(ipfs))) - const cidv0 = res[0].cid - expect(cidv0.version).to.equal(0) + expect(res).to.have.nested.property('[0].cid.version', 0) - const cidv1 = cidv0.toV1() + const cidv1 = res[0].cid.toV1() const output = uint8ArrayConcat(await all(ipfs.cat(cidv1))) expect(output).to.eql(input) @@ -77,12 +77,11 @@ module.exports = (common, options) => { it('should cat a file added as CIDv1 with a CIDv0', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], ipfs.block, { cidVersion: 1, rawLeaves: false })) + const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) - const cidv1 = res[0].cid - expect(cidv1.version).to.equal(1) + expect(res).to.have.nested.property('[0].cid.version', 1) - const cidv0 = cidv1.toV0() + const cidv0 = res[0].cid.toV0() const output = uint8ArrayConcat(await all(ipfs.cat(cidv0))) expect(output.slice()).to.eql(input) @@ -104,7 +103,7 @@ module.exports = (common, options) => { it('should cat with IPFS path, nested value', async () => { const fileToAdd = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await all(importer([fileToAdd], ipfs.block)) + const filesAdded = await all(importer([fileToAdd], blockstore(ipfs))) const file = await filesAdded.find((f) => f.path === 'a') expect(file).to.exist() @@ -117,7 +116,7 @@ module.exports = (common, options) => { it('should cat with IPFS path, deeply nested value', async () => { const fileToAdd = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await all(importer([fileToAdd], ipfs.block)) + const filesAdded = await all(importer([fileToAdd], blockstore(ipfs))) const file = filesAdded.find((f) => f.path === 'a') expect(file).to.exist() @@ -145,7 +144,7 @@ module.exports = (common, options) => { it('should error on dir path', async () => { const file = { path: 'dir/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await all(importer([file], ipfs.block)) + const filesAdded = await all(importer([file], blockstore(ipfs))) expect(filesAdded.length).to.equal(2) const files = filesAdded.filter((file) => file.path === 'dir') diff --git a/packages/interface-ipfs-core/src/dag/get.js b/packages/interface-ipfs-core/src/dag/get.js index f41aaa6991..f063224914 100644 --- a/packages/interface-ipfs-core/src/dag/get.js +++ b/packages/interface-ipfs-core/src/dag/get.js @@ -2,16 +2,19 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const dagCBOR = require('ipld-dag-cbor') +const dagPB = require('@ipld/dag-pb') +const dagCBOR = require('@ipld/dag-cbor') const { importer } = require('ipfs-unixfs-importer') const { UnixFS } = require('ipfs-unixfs') const all = require('it-all') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') +const { base32 } = require('multiformats/bases/base32') const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') -const multihashing = require('multihashing-async') +const { identity } = require('multiformats/hashes/identity') +const dagCbor = require('@ipld/dag-cbor') +const blockstore = require('../utils/blockstore-adapter') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -37,26 +40,32 @@ module.exports = (common, options) => { before(async () => { const someData = uint8ArrayFromString('some other data') - pbNode = new DAGNode(someData) + pbNode = { + Data: someData, + Links: [] + } cborNode = { data: someData } - nodePb = new DAGNode(uint8ArrayFromString('I am inside a Protobuf')) - cidPb = await dagPB.util.cid(nodePb.serialize()) + nodePb = { + Data: uint8ArrayFromString('I am inside a Protobuf'), + Links: [] + } + cidPb = CID.createV0(await sha256.digest(dagPB.encode(nodePb))) nodeCbor = { someData: 'I am inside a Cbor object', pb: cidPb } - cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) + cidCbor = CID.createV1(dagCBOR.code, await sha256.digest(dagCBOR.encode(nodeCbor))) await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' }) await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) }) it('should respect timeout option when getting a DAG node', () => { - return testTimeout(() => ipfs.dag.get(new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'), { + return testTimeout(() => ipfs.dag.get(CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'), { timeout: 1 })) }) @@ -70,7 +79,7 @@ module.exports = (common, options) => { const result = await ipfs.dag.get(cid) const node = result.value - expect(pbNode.toJSON()).to.eql(node.toJSON()) + expect(pbNode).to.eql(node) }) it('should get a dag-cbor node', async () => { @@ -92,8 +101,8 @@ module.exports = (common, options) => { const node = result.value - const cid = await dagPB.util.cid(node.serialize()) - expect(cid).to.eql(cidPb) + const cid = CID.createV0(await sha256.digest(dagPB.encode(node))) + expect(cid.equals(cidPb)).to.be.true() }) it('should get a dag-pb node local value', async function () { @@ -113,8 +122,8 @@ module.exports = (common, options) => { const node = result.value - const cid = await dagCBOR.util.cid(dagCBOR.util.serialize(node)) - expect(cid).to.eql(cidCbor) + const cid = CID.createV1(dagCBOR.code, await sha256.digest(dagCBOR.encode(node))) + expect(cid.equals(cidCbor)).to.be.true() }) it('should get a dag-cbor node local value', async () => { @@ -156,9 +165,16 @@ module.exports = (common, options) => { it('should get a node added as CIDv0 with a CIDv1', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const node = new DAGNode(input) + const node = { + Data: input, + Links: [] + } - const cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(node, { + format: 'dag-pb', + hashAlg: 'sha2-256', + version: 0 + }) expect(cid.version).to.equal(0) const cidv1 = cid.toV1() @@ -170,7 +186,7 @@ module.exports = (common, options) => { it('should get a node added as CIDv1 with a CIDv0', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], ipfs.block, { + const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) @@ -190,8 +206,8 @@ module.exports = (common, options) => { } const cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - expect(cid.codec).to.equal('dag-cbor') - expect(cid.toBaseEncodedString('base32')).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') + expect(cid.code).to.equal(dagCbor.code) + expect(cid.toString(base32)).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') const result = await ipfs.dag.get(cid, { path: 'foo' @@ -229,10 +245,10 @@ module.exports = (common, options) => { it('should be able to get a dag-cbor node with the identity hash', async () => { const identityData = uint8ArrayFromString('A16461736466190144', 'base16upper') - const identityHash = await multihashing(identityData, 'identity') - const identityCID = new CID(1, 'dag-cbor', identityHash) + const identityHash = await identity.digest(identityData) + const identityCID = CID.createV1(identity.code, identityHash) const result = await ipfs.dag.get(identityCID) - expect(result.value).to.deep.equal({ asdf: 324 }) + expect(result.value).to.deep.equal(identityData) }) it('should throw error for invalid string CID input', () => { diff --git a/packages/interface-ipfs-core/src/dag/index.js b/packages/interface-ipfs-core/src/dag/index.js index 7e9b65b519..097678e07f 100644 --- a/packages/interface-ipfs-core/src/dag/index.js +++ b/packages/interface-ipfs-core/src/dag/index.js @@ -4,8 +4,7 @@ const { createSuite } = require('../utils/suite') const tests = { get: require('./get'), put: require('./put'), - resolve: require('./resolve'), - tree: require('./tree') + resolve: require('./resolve') } module.exports = createSuite(tests) diff --git a/packages/interface-ipfs-core/src/dag/put.js b/packages/interface-ipfs-core/src/dag/put.js index ea8fe7ea21..e391b4c39f 100644 --- a/packages/interface-ipfs-core/src/dag/put.js +++ b/packages/interface-ipfs-core/src/dag/put.js @@ -2,11 +2,9 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const dagCBOR = require('ipld-dag-cbor') -const CID = require('cids') -const multihash = require('multihashing-async').multihash +const dagCbor = require('@ipld/dag-cbor') +const { CID } = require('multiformats/cid') +const { sha256, sha512 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -25,24 +23,13 @@ module.exports = (common, options) => { after(() => common.clean()) - let pbNode - let cborNode - - before((done) => { - const someData = uint8ArrayFromString('some data') - - try { - pbNode = new DAGNode(someData) - } catch (err) { - return done(err) - } - - cborNode = { - data: someData - } - - done() - }) + const pbNode = { + Data: uint8ArrayFromString('some data'), + Links: [] + } + const cborNode = { + data: uint8ArrayFromString('some other data') + } it('should put dag-pb with default hash func (sha2-256)', () => { return ipfs.dag.put(pbNode, { @@ -51,10 +38,10 @@ module.exports = (common, options) => { }) }) - it('should put dag-pb with custom hash func (sha3-512)', () => { + it('should put dag-pb with non-default hash func (sha2-512)', () => { return ipfs.dag.put(pbNode, { format: 'dag-pb', - hashAlg: 'sha3-512' + hashAlg: 'sha2-512' }) }) @@ -65,10 +52,10 @@ module.exports = (common, options) => { }) }) - it('should put dag-cbor with custom hash func (sha3-512)', () => { + it('should put dag-cbor with non-default hash func (sha2-512)', () => { return ipfs.dag.put(cborNode, { format: 'dag-cbor', - hashAlg: 'sha3-512' + hashAlg: 'sha2-512' }) }) @@ -78,9 +65,12 @@ module.exports = (common, options) => { hashAlg: 'sha2-256' }) expect(cid).to.exist() - expect(CID.isCID(cid)).to.equal(true) + expect(cid).to.be.an.instanceOf(CID) + + const bytes = dagCbor.encode(cborNode) + const hash = await sha256.digest(bytes) + const _cid = CID.createV1(dagCbor.code, hash) - const _cid = await dagCBOR.util.cid(dagCBOR.util.serialize(cborNode)) expect(cid.bytes).to.eql(_cid.bytes) }) @@ -90,17 +80,17 @@ module.exports = (common, options) => { it('should set defaults when calling put without options', async () => { const cid = await ipfs.dag.put(cborNode) - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') + expect(cid.code).to.equal(dagCbor.code) + expect(cid.multihash.code).to.equal(sha256.code) }) - it('should override hash algoritm default and resolve with it', async () => { + it('should override hash algorithm default and resolve with it', async () => { const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', - hashAlg: 'sha3-512' + hashAlg: 'sha2-512' }) - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha3-512') + expect(cid.code).to.equal(dagCbor.code) + expect(cid.multihash.code).to.equal(sha512.code) }) it.skip('should put by passing the cid instead of format and hashAlg', (done) => {}) diff --git a/packages/interface-ipfs-core/src/dag/resolve.js b/packages/interface-ipfs-core/src/dag/resolve.js index 2f5359377a..230ddda5b7 100644 --- a/packages/interface-ipfs-core/src/dag/resolve.js +++ b/packages/interface-ipfs-core/src/dag/resolve.js @@ -1,8 +1,7 @@ /* eslint-env mocha */ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const dagPB = require('@ipld/dag-pb') const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -115,11 +114,21 @@ module.exports = (common, options) => { it('should resolve a path inside a dag-pb node linked to from another dag-pb node', async () => { const someData = uint8ArrayFromString('some other data') - const childNode = new DAGNode(someData) + const childNode = { + Data: someData, + Links: [] + } const childCid = await ipfs.dag.put(childNode, { format: 'dag-pb', hashAlg: 'sha2-256' }) - const linkToChildNode = await childNode.toDAGLink({ name: 'foo', cidVersion: 0 }) - const parentNode = new DAGNode(uint8ArrayFromString('derp'), [linkToChildNode]) + const linkToChildNode = { + Name: 'foo', + Tsize: dagPB.encode(childNode).length, + Hash: childCid + } + const parentNode = { + Data: uint8ArrayFromString('derp'), + Links: [linkToChildNode] + } const parentCid = await ipfs.dag.put(parentNode, { format: 'dag-pb', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(parentCid, { path: '/foo' }) diff --git a/packages/interface-ipfs-core/src/dag/tree.js b/packages/interface-ipfs-core/src/dag/tree.js deleted file mode 100644 index 236d332f57..0000000000 --- a/packages/interface-ipfs-core/src/dag/tree.js +++ /dev/null @@ -1,96 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const dagCBOR = require('ipld-dag-cbor') -const all = require('it-all') -const drain = require('it-drain') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const CID = require('cids') -const testTimeout = require('../utils/test-timeout') - -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ -/** - * @param {Factory} common - * @param {Object} options - */ -module.exports = (common, options) => { - const describe = getDescribe(options) - const it = getIt(options) - - describe('.dag.tree', () => { - let ipfs - - before(async () => { ipfs = (await common.spawn()).api }) - - after(() => common.clean()) - - let nodePb - let nodeCbor - let cidPb - let cidCbor - - before(async function () { - nodePb = new DAGNode(uint8ArrayFromString('I am inside a Protobuf')) - cidPb = await dagPB.util.cid(nodePb.serialize()) - - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: cidPb - } - cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - - await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' }) - await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - }) - - it('should respect timeout option when resolving a DAG tree', () => { - return testTimeout(() => drain(ipfs.dag.tree(new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rA8'), { - timeout: 1 - }))) - }) - - it('should get tree with CID', async () => { - const paths = await all(ipfs.dag.tree(cidCbor)) - expect(paths).to.eql([ - 'pb', - 'someData' - ]) - }) - - it('should get tree with CID and path', async () => { - const paths = await all(ipfs.dag.tree(cidCbor, { - path: 'someData' - })) - expect(paths).to.eql([]) - }) - - it('should get tree with CID recursive (accross different formats)', async () => { - const paths = await all(ipfs.dag.tree(cidCbor, { recursive: true })) - expect(paths).to.have.members([ - 'pb', - 'someData', - 'pb/Links', - 'pb/Data' - ]) - }) - - it('should get tree with CID and path recursive', async () => { - const paths = await all(ipfs.dag.tree(cidCbor, { - path: 'pb', - recursive: true - })) - expect(paths).to.have.members([ - 'Links', - 'Data' - ]) - }) - - it('should throw error for invalid CID input', () => { - return expect(all(ipfs.dag.tree('INVALID CID'))) - .to.eventually.be.rejected() - }) - }) -} diff --git a/packages/interface-ipfs-core/src/dht/provide.js b/packages/interface-ipfs-core/src/dht/provide.js index bcf18ae8ad..b75082ee4d 100644 --- a/packages/interface-ipfs-core/src/dht/provide.js +++ b/packages/interface-ipfs-core/src/dht/provide.js @@ -2,7 +2,7 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const CID = require('cids') +const { CID } = require('multiformats/cid') const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -35,7 +35,7 @@ module.exports = (common, options) => { }) it('should not provide if block not found locally', () => { - const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') + const cid = CID.parse('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') return expect(all(ipfs.dht.provide(cid))).to.eventually.be.rejected .and.be.an.instanceOf(Error) diff --git a/packages/interface-ipfs-core/src/dht/utils.js b/packages/interface-ipfs-core/src/dht/utils.js index 96a391d989..42d934021d 100644 --- a/packages/interface-ipfs-core/src/dht/utils.js +++ b/packages/interface-ipfs-core/src/dht/utils.js @@ -1,11 +1,11 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const multihashing = require('multihashing-async') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') exports.fakeCid = async (data) => { const bytes = data || uint8ArrayFromString(`TEST${Math.random()}`) - const mh = await multihashing(bytes, 'sha2-256') - return new CID(0, 'dag-pb', mh) + const mh = await sha256.digest(bytes) + return CID.createV0(mh) } diff --git a/packages/interface-ipfs-core/src/files/cp.js b/packages/interface-ipfs-core/src/files/cp.js index 0ad20e99fc..c3a9682948 100644 --- a/packages/interface-ipfs-core/src/files/cp.js +++ b/packages/interface-ipfs-core/src/files/cp.js @@ -7,9 +7,8 @@ const { nanoid } = require('nanoid') const all = require('it-all') const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const mh = require('multihashing-async').multihash -const Block = require('ipld-block') -const CID = require('cids') +const { identity } = require('multiformats/hashes/identity') +const { CID } = require('multiformats/cid') const { randomBytes } = require('iso-random-stream') const createShardedDirectory = require('../utils/create-sharded-directory') const isShardAtPath = require('../utils/is-shard-at-path') @@ -72,8 +71,11 @@ module.exports = (common, options) => { const src1 = `/src2-${Math.random()}` const parent = `/output-${Math.random()}` - const cid = new CID(1, 'identity', mh.encode(uint8ArrayFromString('derp'), 'identity')) - await ipfs.block.put(new Block(uint8ArrayFromString('derp'), cid), { cid }) + const hash = await identity.digest(uint8ArrayFromString('derp')) + const cid = CID.createV1(identity.code, hash) + await ipfs.block.put(uint8ArrayFromString('derp'), { + mhtype: 'identity' + }) await ipfs.files.cp(`/ipfs/${cid}`, parent) await ipfs.files.write(src1, [], { @@ -81,7 +83,7 @@ module.exports = (common, options) => { }) await expect(ipfs.files.cp(src1, `${parent}/child`)).to.eventually.be.rejectedWith(Error) - .that.has.property('message').that.matches(/"identity"/) + .that.has.property('message').that.matches(/unsupported codec/i) }) it('refuses to copy files to an exsting file', async () => { diff --git a/packages/interface-ipfs-core/src/files/ls.js b/packages/interface-ipfs-core/src/files/ls.js index 7c272a9363..3daa550435 100644 --- a/packages/interface-ipfs-core/src/files/ls.js +++ b/packages/interface-ipfs-core/src/files/ls.js @@ -3,10 +3,11 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { getDescribe, getIt, expect } = require('../utils/mocha') -const CID = require('cids') +const { CID } = require('multiformats/cid') const createShardedDirectory = require('../utils/create-sharded-directory') const all = require('it-all') const { randomBytes } = require('iso-random-stream') +const raw = require('multiformats/codecs/raw') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -42,7 +43,7 @@ module.exports = (common, options) => { const files = await all(ipfs.files.ls('/')) expect(files).to.have.lengthOf(1).and.to.containSubset([{ - cid: new CID('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), + cid: CID.parse('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), name: fileName, size: content.length, type: 'file' @@ -70,7 +71,7 @@ module.exports = (common, options) => { const files = await all(ipfs.files.ls(`/${dirName}`)) expect(files).to.have.lengthOf(1).and.to.containSubset([{ - cid: new CID('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), + cid: CID.parse('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), name: fileName, size: content.length, type: 'file' @@ -88,7 +89,7 @@ module.exports = (common, options) => { const files = await all(ipfs.files.ls(`/${fileName}`)) expect(files).to.have.lengthOf(1).and.to.containSubset([{ - cid: new CID('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), + cid: CID.parse('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), name: fileName, size: content.length, type: 'file' @@ -111,7 +112,7 @@ module.exports = (common, options) => { const stats = await ipfs.files.stat(filePath) const { value: node } = await ipfs.dag.get(stats.cid) - expect(node).to.have.nested.property('Links[0].Hash.codec', 'raw') + expect(node).to.have.nested.property('Links[0].Hash.code', raw.code) const child = node.Links[0] const files = await all(ipfs.files.ls(`/ipfs/${child.Hash}`)) @@ -137,7 +138,7 @@ module.exports = (common, options) => { const cid = stats.cid const { value: node } = await ipfs.dag.get(cid) - expect(node).to.have.nested.property('Links[0].Hash.codec', 'raw') + expect(node).to.have.nested.property('Links[0].Hash.code', raw.code) const child = node.Links[0] const dir = `/dir-with-raw-${Math.random()}` diff --git a/packages/interface-ipfs-core/src/files/mkdir.js b/packages/interface-ipfs-core/src/files/mkdir.js index b55f200c20..d71c1793bd 100644 --- a/packages/interface-ipfs-core/src/files/mkdir.js +++ b/packages/interface-ipfs-core/src/files/mkdir.js @@ -3,7 +3,7 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -const multihash = require('multihashing-async').multihash +const { sha512 } = require('multiformats/hashes/sha2') const createShardedDirectory = require('../utils/create-sharded-directory') const all = require('it-all') const isShardAtPath = require('../utils/is-shard-at-path') @@ -160,8 +160,7 @@ module.exports = (common, options) => { hashAlg: 'sha2-512' }) - await expect(ipfs.files.stat(subDirectoryPath)).to.eventually.have.nested.property('cid.multihash') - .that.satisfies(hash => multihash.decode(hash).name === 'sha2-512') + await expect(ipfs.files.stat(subDirectoryPath)).to.eventually.have.nested.property('cid.multihash.code', sha512.code) }) it('should make directory and have default mode', async function () { diff --git a/packages/interface-ipfs-core/src/files/rm.js b/packages/interface-ipfs-core/src/files/rm.js index 99465f5d8b..2b9749a2fd 100644 --- a/packages/interface-ipfs-core/src/files/rm.js +++ b/packages/interface-ipfs-core/src/files/rm.js @@ -18,7 +18,7 @@ module.exports = (common, options) => { const it = getIt(options) describe('.files.rm', function () { - this.timeout(120 * 1000) + this.timeout(300 * 1000) let ipfs diff --git a/packages/interface-ipfs-core/src/files/stat.js b/packages/interface-ipfs-core/src/files/stat.js index ee39e44e07..d46a912d64 100644 --- a/packages/interface-ipfs-core/src/files/stat.js +++ b/packages/interface-ipfs-core/src/files/stat.js @@ -6,11 +6,11 @@ const { nanoid } = require('nanoid') const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const createShardedDirectory = require('../utils/create-sharded-directory') -const CID = require('cids') -const mh = require('multihashing-async').multihash -const Block = require('ipld-block') +const { CID } = require('multiformats/cid') +const { identity } = require('multiformats/hashes/identity') const { randomBytes } = require('iso-random-stream') const isShardAtPath = require('../utils/is-shard-at-path') +const raw = require('multiformats/codecs/raw') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -111,7 +111,7 @@ module.exports = (common, options) => { const stats = await ipfs.files.stat(filePath) const { value: node } = await ipfs.dag.get(stats.cid) - expect(node).to.have.nested.property('Links[0].Hash.codec', 'raw') + expect(node).to.have.nested.property('Links[0].Hash.code', raw.code) const child = node.Links[0] @@ -134,7 +134,7 @@ module.exports = (common, options) => { const { value: node } = await ipfs.dag.get(stats.cid) const child = node.Links[0] - expect(child.Hash.codec).to.equal('raw') + expect(child.Hash.code).to.equal(raw.code) const dir = `/dir-with-raw-${Math.random()}` const path = `${dir}/raw-${Math.random()}` @@ -165,8 +165,11 @@ module.exports = (common, options) => { it('stats an identity CID', async () => { const data = uint8ArrayFromString('derp') const path = `/test-${nanoid()}/identity.node` - const cid = new CID(1, 'identity', mh.encode(data, 'identity')) - await ipfs.block.put(new Block(data, cid)) + const hash = await identity.digest(data) + const cid = CID.createV1(identity.code, hash) + await ipfs.block.put(data, { + mhtype: 'identity' + }) await ipfs.files.cp(`/ipfs/${cid}`, path, { parents: true }) @@ -343,14 +346,14 @@ module.exports = (common, options) => { }) it('should stat outside of mfs', async () => { - const stat = await ipfs.files.stat('/ipfs/' + fixtures.smallFile.cid) + const stat = await ipfs.files.stat(`/ipfs/${fixtures.smallFile.cid}`) stat.cid = stat.cid.toString() expect(stat).to.include({ type: 'file', blocks: 0, size: 12, - cid: fixtures.smallFile.cid, + cid: fixtures.smallFile.cid.toString(), cumulativeSize: 20, withLocality: false }) diff --git a/packages/interface-ipfs-core/src/files/write.js b/packages/interface-ipfs-core/src/files/write.js index f6ef1a8fbb..4befe577b3 100644 --- a/packages/interface-ipfs-core/src/files/write.js +++ b/packages/interface-ipfs-core/src/files/write.js @@ -6,7 +6,7 @@ const uint8ArrayConcat = require('uint8arrays/concat') const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const { isNode } = require('ipfs-utils/src/env') -const multihash = require('multihashing-async').multihash +const { sha512 } = require('multiformats/hashes/sha2') const traverseLeafNodes = require('../utils/traverse-leaf-nodes') const createShardedDirectory = require('../utils/create-sharded-directory') const createTwoShards = require('../utils/create-two-shards') @@ -14,6 +14,7 @@ const { randomBytes } = require('iso-random-stream') const { randomStream } = require('iso-random-stream') const all = require('it-all') const isShardAtPath = require('../utils/is-shard-at-path') +const raw = require('multiformats/codecs/raw') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -57,7 +58,7 @@ module.exports = (common, options) => { } describe('.files.write', function () { - this.timeout(120 * 1000) + this.timeout(300 * 1000) let ipfs @@ -105,7 +106,7 @@ module.exports = (common, options) => { })).to.eventually.be.rejected() }) - it('explodes if given a negtive offset', async () => { + it('explodes if given a negative offset', async () => { await expect(ipfs.files.write('/foo-negative-offset', uint8ArrayFromString('foo'), { offset: -1 })).to.eventually.be.rejected() @@ -403,9 +404,14 @@ module.exports = (common, options) => { const stats = await ipfs.files.stat(path) + let leafCount = 0 + for await (const { cid } of traverseLeafNodes(ipfs, stats.cid)) { - expect(cid.codec).to.equal('raw') + leafCount++ + expect(cid.code).to.equal(raw.code) } + + expect(leafCount).to.be.greaterThan(0) }) }) @@ -570,10 +576,7 @@ module.exports = (common, options) => { hashAlg: 'sha2-512' }) - await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.multihash') - .that.satisfies(hash => { - return multihash.decode(hash).name === 'sha2-512' - }) + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.multihash.code', sha512.code) const actualBytes = uint8ArrayConcat(await all(ipfs.files.read(filePath))) diff --git a/packages/interface-ipfs-core/src/get.js b/packages/interface-ipfs-core/src/get.js index c1f6e7671c..81a623fcfd 100644 --- a/packages/interface-ipfs-core/src/get.js +++ b/packages/interface-ipfs-core/src/get.js @@ -5,7 +5,7 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayConcat = require('uint8arrays/concat') const { fixtures } = require('./utils') -const CID = require('cids') +const { CID } = require('multiformats/cid') const all = require('it-all') const drain = require('it-drain') const last = require('it-last') @@ -13,6 +13,7 @@ const map = require('it-map') const { getDescribe, getIt, expect } = require('./utils/mocha') const testTimeout = require('./utils/test-timeout') const { importer } = require('ipfs-unixfs-importer') +const blockstore = require('./utils/blockstore-adapter') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -30,14 +31,14 @@ module.exports = (common, options) => { before(async () => { ipfs = (await common.spawn()).api - await drain(importer([{ content: fixtures.smallFile.data }], ipfs.block)) - await drain(importer([{ content: fixtures.bigFile.data }], ipfs.block)) + await drain(importer([{ content: fixtures.smallFile.data }], blockstore(ipfs))) + await drain(importer([{ content: fixtures.bigFile.data }], blockstore(ipfs))) }) after(() => common.clean()) it('should respect timeout option when getting files', () => { - return testTimeout(() => drain(ipfs.get(new CID('QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbS1'), { + return testTimeout(() => drain(ipfs.get(CID.parse('QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbS1'), { timeout: 1 }))) }) @@ -45,23 +46,14 @@ module.exports = (common, options) => { it('should get with a base58 encoded multihash', async () => { const files = await all(ipfs.get(fixtures.smallFile.cid)) expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(uint8ArrayToString(uint8ArrayConcat(await all(files[0].content)))).to.contain('Plz add me!') - }) - - it('should get with a Uint8Array multihash', async () => { - const cidBuf = new CID(fixtures.smallFile.cid).multihash - - const files = await all(ipfs.get(cidBuf)) - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].path).to.eql(fixtures.smallFile.cid.toString()) expect(uint8ArrayToString(uint8ArrayConcat(await all(files[0].content)))).to.contain('Plz add me!') }) it('should get a file added as CIDv0 with a CIDv1', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], ipfs.block)) + const res = await all(importer([{ content: input }], blockstore(ipfs))) const cidv0 = res[0].cid expect(cidv0.version).to.equal(0) @@ -75,7 +67,7 @@ module.exports = (common, options) => { it('should get a file added as CIDv1 with a CIDv0', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], ipfs.block, { cidVersion: 1, rawLeaves: false })) + const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) const cidv1 = res[0].cid expect(cidv1.version).to.equal(1) @@ -89,7 +81,7 @@ module.exports = (common, options) => { it('should get a file added as CIDv1 with rawLeaves', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], ipfs.block, { cidVersion: 1, rawLeaves: true })) + const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: true })) const cidv1 = res[0].cid expect(cidv1.version).to.equal(1) @@ -101,7 +93,7 @@ module.exports = (common, options) => { it('should get a BIG file', async () => { for await (const file of ipfs.get(fixtures.bigFile.cid)) { - expect(file.path).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid.toString()) const content = uint8ArrayConcat(await all(file.content)) expect(content.length).to.eql(fixtures.bigFile.data.length) expect(content.slice()).to.eql(fixtures.bigFile.data) @@ -127,11 +119,11 @@ module.exports = (common, options) => { emptyDir('files/empty') ] - const res = await all(importer(dirs, ipfs.block)) + const res = await all(importer(dirs, blockstore(ipfs))) const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal(fixtures.directory.cid) + expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) let files = await all((async function * () { for await (let { path, content } of ipfs.get(fixtures.directory.cid)) { @@ -184,7 +176,7 @@ module.exports = (common, options) => { content('jungle.txt', 'foo/bar/jungle.txt') ] - const res = await all(importer(dirs, ipfs.block)) + const res = await all(importer(dirs, blockstore(ipfs))) const root = res[res.length - 1] expect(root.path).to.equal('test-folder') expect(root.cid.toString()).to.equal('QmVMXXo3c2bDPH9ayy2VKoXpykfYJHwAcU5YCJjPf7jg3g') @@ -225,7 +217,7 @@ module.exports = (common, options) => { content: fixtures.smallFile.data } - const fileAdded = await last(importer([file], ipfs.block)) + const fileAdded = await last(importer([file], blockstore(ipfs))) expect(fileAdded).to.have.property('path', 'a') const files = await all(ipfs.get(`/ipfs/${fileAdded.cid}/testfile.txt`)) diff --git a/packages/interface-ipfs-core/src/ls.js b/packages/interface-ipfs-core/src/ls.js index 63b11dd383..af316c3cfd 100644 --- a/packages/interface-ipfs-core/src/ls.js +++ b/packages/interface-ipfs-core/src/ls.js @@ -4,7 +4,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('./utils/mocha') const all = require('it-all') -const CID = require('cids') +const { CID } = require('multiformats/cid') const testTimeout = require('./utils/test-timeout') const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}` @@ -30,7 +30,7 @@ module.exports = (common, options) => { after(() => common.clean()) it('should respect timeout option when listing files', () => { - return testTimeout(() => ipfs.ls(new CID('QmNonExistentCiD8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXg'), { + return testTimeout(() => ipfs.ls(CID.parse('QmNonExistentCiD8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXg'), { timeout: 1 })) }) @@ -58,7 +58,7 @@ module.exports = (common, options) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal(fixtures.directory.cid) + expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' const output = await all(ipfs.ls(cid)) diff --git a/packages/interface-ipfs-core/src/miscellaneous/id.js b/packages/interface-ipfs-core/src/miscellaneous/id.js index cd6148fd6a..31fbc42c9e 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/id.js +++ b/packages/interface-ipfs-core/src/miscellaneous/id.js @@ -3,7 +3,6 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { Multiaddr } = require('multiaddr') -const CID = require('cids') const { isWebWorker } = require('ipfs-utils/src/env') const retry = require('p-retry') @@ -29,7 +28,6 @@ module.exports = (common, options) => { it('should get the node ID', async () => { const res = await ipfs.id() expect(res).to.have.a.property('id').that.is.a('string') - expect(CID.isCID(new CID(res.id))).to.equal(true) expect(res).to.have.a.property('publicKey') expect(res).to.have.a.property('agentVersion').that.is.a('string') expect(res).to.have.a.property('protocolVersion').that.is.a('string') diff --git a/packages/interface-ipfs-core/src/miscellaneous/resolve.js b/packages/interface-ipfs-core/src/miscellaneous/resolve.js index 4700756a2b..425d6dda48 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/resolve.js +++ b/packages/interface-ipfs-core/src/miscellaneous/resolve.js @@ -4,7 +4,7 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const isIpfs = require('is-ipfs') const { nanoid } = require('nanoid') -const multibase = require('multibase') +const { base64url } = require('multiformats/bases/base64') const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') const { isWebWorker } = require('ipfs-utils/src/env') @@ -39,11 +39,13 @@ module.exports = (common, options) => { }) it('should resolve an IPFS hash and return a base64url encoded CID in path', async () => { - const { cid } = await ipfs.add(uint8ArrayFromString('base64url encoded')) + const { cid } = await ipfs.add(uint8ArrayFromString('base64url encoded'), { + cidVersion: 1 + }) const path = await ipfs.resolve(`/ipfs/${cid}`, { cidBase: 'base64url' }) const [,, cidStr] = path.split('/') - expect(multibase.isEncoded(cidStr)).to.equal('base64url') + expect(cidStr).to.equal(cid.toString(base64url)) }) // Test resolve turns /ipfs/QmRootHash/path/to/file into /ipfs/QmFileHash diff --git a/packages/interface-ipfs-core/src/name/publish.js b/packages/interface-ipfs-core/src/name/publish.js index 68fe8b65ba..07f9b75664 100644 --- a/packages/interface-ipfs-core/src/name/publish.js +++ b/packages/interface-ipfs-core/src/name/publish.js @@ -6,7 +6,7 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { fixture } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const last = require('it-last') -const CID = require('cids') +const PeerId = require('peer-id') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -40,7 +40,7 @@ module.exports = (common, options) => { const res = await ipfs.name.publish(value, { allowOffline: true }) expect(res).to.exist() - expect(new CID(res.name).toV1().toString('base36')).to.equal(new CID(self.id).toV1().toString('base36')) + expect(PeerId.parse(res.name).toString()).to.equal(PeerId.parse(self.id).toString()) expect(res.value).to.equal(`/ipfs/${value}`) }) @@ -67,7 +67,7 @@ module.exports = (common, options) => { const res = await ipfs.name.publish(value, options) expect(res).to.exist() - expect(new CID(res.name).toV1().toString('base36')).to.equal(new CID(self.id).toV1().toString('base36')) + expect(PeerId.parse(res.name).toString()).to.equal(PeerId.parse(self.id).toString()) expect(res.value).to.equal(`/ipfs/${value}`) }) @@ -87,7 +87,7 @@ module.exports = (common, options) => { const res = await ipfs.name.publish(value, options) expect(res).to.exist() - expect(new CID(res.name).toV1().toString('base36')).to.equal(new CID(key.id).toV1().toString('base36')) + expect(PeerId.parse(res.name).toString()).to.equal(PeerId.parse(key.id).toString()) expect(res.value).to.equal(`/ipfs/${value}`) }) }) diff --git a/packages/interface-ipfs-core/src/name/resolve.js b/packages/interface-ipfs-core/src/name/resolve.js index 917eb50d6b..7a8834e2e1 100644 --- a/packages/interface-ipfs-core/src/name/resolve.js +++ b/packages/interface-ipfs-core/src/name/resolve.js @@ -4,8 +4,10 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') -const CID = require('cids') +const PeerId = require('peer-id') const last = require('it-last') +const { CID } = require('multiformats/cid') +const Digest = require('multiformats/hashes/digest') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -42,16 +44,16 @@ module.exports = (common, options) => { it('should resolve a record from peerid as cidv1 in base32', async function () { this.timeout(20 * 1000) - const { path } = await ipfs.add(uint8ArrayFromString('should resolve a record from cidv1b32')) + const { cid } = await ipfs.add(uint8ArrayFromString('should resolve a record from cidv1b32')) const { id: peerId } = await ipfs.id() - await ipfs.name.publish(path, { allowOffline: true }) + await ipfs.name.publish(cid, { allowOffline: true }) // Represent Peer ID as CIDv1 Base32 // https://github.com/libp2p/specs/blob/master/RFC/0001-text-peerid-cid.md - const keyCid = new CID(peerId).toV1().toString('base32') + const keyCid = CID.createV1(0x72, Digest.decode(PeerId.parse(peerId).toBytes())) const resolvedPath = await last(ipfs.name.resolve(`/ipns/${keyCid}`)) - expect(resolvedPath).to.equal(`/ipfs/${path}`) + expect(resolvedPath).to.equal(`/ipfs/${cid}`) }) it('should resolve a record recursive === false', async () => { diff --git a/packages/interface-ipfs-core/src/object/data.js b/packages/interface-ipfs-core/src/object/data.js index ab89b44239..86450961f0 100644 --- a/packages/interface-ipfs-core/src/object/data.js +++ b/packages/interface-ipfs-core/src/object/data.js @@ -25,7 +25,7 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should get data by multihash', async () => { + it('should get data by CID', async () => { const testObj = { Data: uint8ArrayFromString(nanoid()), Links: [] @@ -34,19 +34,7 @@ module.exports = (common, options) => { const nodeCid = await ipfs.object.put(testObj) const data = await ipfs.object.data(nodeCid) - expect(testObj.Data).to.deep.equal(data) - }) - - it('should get data by base58 encoded multihash string', async () => { - const testObj = { - Data: uint8ArrayFromString(nanoid()), - Links: [] - } - - const nodeCid = await ipfs.object.put(testObj) - - const data = await ipfs.object.data(nodeCid.toV0().toString(), { enc: 'base58' }) - expect(testObj.Data).to.eql(data) + expect(testObj.Data).to.equalBytes(data) }) it('returns error for request without argument', () => { diff --git a/packages/interface-ipfs-core/src/object/get.js b/packages/interface-ipfs-core/src/object/get.js index 6b5f941a80..2d925e4702 100644 --- a/packages/interface-ipfs-core/src/object/get.js +++ b/packages/interface-ipfs-core/src/object/get.js @@ -1,14 +1,14 @@ /* eslint-env mocha */ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const dagPB = require('@ipld/dag-pb') const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const { UnixFS } = require('ipfs-unixfs') const { randomBytes } = require('iso-random-stream') -const { asDAGLink } = require('./utils') const uint8ArrayFromString = require('uint8arrays/from-string') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -43,39 +43,35 @@ module.exports = (common, options) => { // because js-ipfs-api can't infer if the // returned Data is Uint8Array or String if (typeof node2.Data === 'string') { - node2 = new DAGNode(uint8ArrayFromString(node2.Data), node2.Links, node2.size) + node2 = { + Data: uint8ArrayFromString(node2.Data), + Links: node2.Links + } } expect(node1.Data).to.eql(node2.Data) expect(node1.Links).to.eql(node2.Links) }) - it('should get object by multihash string', async () => { - const obj = { - Data: uint8ArrayFromString(nanoid()), + it('should get object with links by multihash string', async () => { + const node1a = { + Data: uint8ArrayFromString('Some data 1'), Links: [] } - - const node1Cid = await ipfs.object.put(obj) - const node1 = await ipfs.object.get(node1Cid) - let node2 = await ipfs.object.get(node1Cid.toBaseEncodedString()) - - // because js-ipfs-api can't infer if the - // returned Data is Uint8Array or String - if (typeof node2.Data === 'string') { - node2 = new DAGNode(uint8ArrayFromString(node2.Data), node2.Links, node2.size) + const node2 = { + Data: uint8ArrayFromString('Some data 2'), + Links: [] + } + const node2Buf = dagPB.encode(node2) + const link = { + Name: 'some-link', + Tsize: node2Buf.length, + Hash: CID.createV0(await sha256.digest(node2Buf)) + } + const node1b = { + Data: node1a.Data, + Links: node1a.Links.concat(link) } - - expect(node1.Data).to.deep.equal(node2.Data) - expect(node1.Links).to.deep.equal(node2.Links) - }) - - it('should get object with links by multihash string', async () => { - const node1a = new DAGNode(uint8ArrayFromString('Some data 1')) - const node2 = new DAGNode(uint8ArrayFromString('Some data 2')) - - const link = await asDAGLink(node2, 'some-link') - const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) const node1bCid = await ipfs.object.put(node1b) let node1c = await ipfs.object.get(node1bCid) @@ -83,7 +79,10 @@ module.exports = (common, options) => { // because js-ipfs-api can't infer if the // returned Data is Uint8Array or String if (typeof node1c.Data === 'string') { - node1c = new DAGNode(uint8ArrayFromString(node1c.Data), node1c.Links, node1c.size) + node1c = { + Data: uint8ArrayFromString(node1c.Data), + Links: node1c.Links + } } expect(node1a.Data).to.eql(node1c.Data) @@ -102,27 +101,10 @@ module.exports = (common, options) => { // because js-ipfs-api can't infer if the // returned Data is Uint8Array or String if (typeof node1b.Data === 'string') { - node1b = new DAGNode(uint8ArrayFromString(node1b.Data), node1b.Links, node1b.size) - } - - expect(node1a.Data).to.eql(node1b.Data) - expect(node1a.Links).to.eql(node1b.Links) - }) - - it('should get object by base58 encoded multihash string', async () => { - const obj = { - Data: uint8ArrayFromString(nanoid()), - Links: [] - } - - const node1aCid = await ipfs.object.put(obj) - const node1a = await ipfs.object.get(node1aCid) - let node1b = await ipfs.object.get(node1aCid.toBaseEncodedString(), { enc: 'base58' }) - - // because js-ipfs-api can't infer if the - // returned Data is Uint8Array or String - if (typeof node1b.Data === 'string') { - node1b = new DAGNode(uint8ArrayFromString(node1b.Data), node1b.Links, node1b.size) + node1b = { + Data: uint8ArrayFromString(node1b.Data), + Links: node1b.Links + } } expect(node1a.Data).to.eql(node1b.Data) diff --git a/packages/interface-ipfs-core/src/object/links.js b/packages/interface-ipfs-core/src/object/links.js index d37a1e786d..64dc175e31 100644 --- a/packages/interface-ipfs-core/src/object/links.js +++ b/packages/interface-ipfs-core/src/object/links.js @@ -2,11 +2,11 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const dagPB = require('@ipld/dag-pb') const { nanoid } = require('nanoid') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { asDAGLink } = require('./utils') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -42,44 +42,30 @@ module.exports = (common, options) => { }) it('should get links by multihash', async () => { - const node1a = new DAGNode(uint8ArrayFromString('Some data 1')) - const node2 = new DAGNode(uint8ArrayFromString('Some data 2')) - - const link = await asDAGLink(node2, 'some-link') - - const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - const node1bCid = await ipfs.object.put(node1b) - - const links = await ipfs.object.links(node1bCid) - - expect(links).to.have.lengthOf(1) - expect(node1b.Links).to.deep.equal(links) - }) - - it('should get links by base58 encoded multihash', async () => { - const testObj = { - Data: uint8ArrayFromString(nanoid()), + const node1a = { + Data: uint8ArrayFromString('Some data 1'), Links: [] } - - const cid = await ipfs.object.put(testObj) - const node = await ipfs.object.get(cid) - - const links = await ipfs.object.links(cid.bytes, { enc: 'base58' }) - expect(node.Links).to.deep.equal(links) - }) - - it('should get links by base58 encoded multihash string', async () => { - const testObj = { - Data: uint8ArrayFromString(nanoid()), + const node2 = { + Data: uint8ArrayFromString('Some data 2'), Links: [] } + const node2Buf = dagPB.encode(node2) + const link = { + Name: 'some-link', + Tsize: node2Buf.length, + Hash: CID.createV0(await sha256.digest(node2Buf)) + } + const node1b = { + Data: node1a.Data, + Links: node1a.Links.concat(link) + } + const node1bCid = await ipfs.object.put(node1b) - const cid = await ipfs.object.put(testObj) - const node = await ipfs.object.get(cid) + const links = await ipfs.object.links(node1bCid) - const links = await ipfs.object.links(cid.toBaseEncodedString(), { enc: 'base58' }) - expect(node.Links).to.deep.equal(links) + expect(links).to.have.lengthOf(1) + expect(node1b.Links).to.deep.equal(links) }) it('should get links from CBOR object', async () => { diff --git a/packages/interface-ipfs-core/src/object/new.js b/packages/interface-ipfs-core/src/object/new.js index 0b3116f564..fa0df1eb7e 100644 --- a/packages/interface-ipfs-core/src/object/new.js +++ b/packages/interface-ipfs-core/src/object/new.js @@ -25,12 +25,12 @@ module.exports = (common, options) => { it('should create a new object with no template', async () => { const cid = await ipfs.object.new() - expect(cid.toBaseEncodedString()).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + expect(cid.toString()).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') }) it('should create a new object with unixfs-dir template', async () => { const cid = await ipfs.object.new({ template: 'unixfs-dir' }) - expect(cid.toBaseEncodedString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + expect(cid.toString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') }) }) } diff --git a/packages/interface-ipfs-core/src/object/patch/add-link.js b/packages/interface-ipfs-core/src/object/patch/add-link.js index 82747319c7..3682533e04 100644 --- a/packages/interface-ipfs-core/src/object/patch/add-link.js +++ b/packages/interface-ipfs-core/src/object/patch/add-link.js @@ -2,10 +2,10 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const dagPB = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../../utils/mocha') -const { asDAGLink } = require('../utils') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -33,15 +33,29 @@ module.exports = (common, options) => { Links: [] } // link to add - const node2 = new DAGNode(uint8ArrayFromString('some other node')) + const node2 = { + Data: uint8ArrayFromString('some other node'), + Links: [] + } // note: we need to put the linked obj, otherwise IPFS won't // timeout. Reason: it needs the node to get its size await ipfs.object.put(node2) - const link = await asDAGLink(node2, 'link-to-node') + const node2Buf = dagPB.encode(node2) + const link = { + Name: 'link-to-node', + Tsize: node2Buf.length, + Hash: CID.createV0(await sha256.digest(node2Buf)) + } // manual create dag step by step - const node1a = new DAGNode(obj.Data, obj.Links) - const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1a = { + Data: obj.Data, + Links: obj.Links + } + const node1b = { + Data: node1a.Data, + Links: node1a.Links.concat(link) + } const node1bCid = await ipfs.object.put(node1b) // add link with patch.addLink diff --git a/packages/interface-ipfs-core/src/object/patch/rm-link.js b/packages/interface-ipfs-core/src/object/patch/rm-link.js index b9184abc02..4b5f574e15 100644 --- a/packages/interface-ipfs-core/src/object/patch/rm-link.js +++ b/packages/interface-ipfs-core/src/object/patch/rm-link.js @@ -2,8 +2,10 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') +const dagPB = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../../utils/mocha') -const { asDAGLink } = require('../utils') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -39,7 +41,12 @@ module.exports = (common, options) => { const nodeCid = await ipfs.object.put(obj1) const childCid = await ipfs.object.put(obj2) const child = await ipfs.object.get(childCid) - const childAsDAGLink = await asDAGLink(child, 'my-link') + const childBuf = dagPB.encode(child) + const childAsDAGLink = { + Name: 'my-link', + Tsize: childBuf.length, + Hash: CID.createV0(await sha256.digest(childBuf)) + } const parentCid = await ipfs.object.patch.addLink(nodeCid, childAsDAGLink) const withoutChildCid = await ipfs.object.patch.rmLink(parentCid, childAsDAGLink) diff --git a/packages/interface-ipfs-core/src/object/put.js b/packages/interface-ipfs-core/src/object/put.js index 44aa25058f..288d6abebf 100644 --- a/packages/interface-ipfs-core/src/object/put.js +++ b/packages/interface-ipfs-core/src/object/put.js @@ -2,11 +2,13 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const dagPB = require('@ipld/dag-pb') const { nanoid } = require('nanoid') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { asDAGLink } = require('./utils') +const first = require('it-first') +const drain = require('it-drain') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -37,58 +39,48 @@ module.exports = (common, options) => { const cid = await ipfs.object.put(obj) const node = await ipfs.object.get(cid) - const nodeJSON = node.toJSON() - expect(obj.Data).to.deep.equal(nodeJSON.data) - expect(obj.Links).to.deep.equal(nodeJSON.links) + expect(node).to.deep.equal(obj) }) - it('should put a JSON encoded Uint8Array', async () => { + it('should pin an object when putting', async () => { const obj = { Data: uint8ArrayFromString(nanoid()), Links: [] } - const obj2 = { - Data: obj.Data.toString(), - Links: obj.Links - } - - const buf = uint8ArrayFromString(JSON.stringify(obj2)) + const cid = await ipfs.object.put(obj, { + pin: true + }) + const pin = await first(ipfs.pin.ls({ + paths: cid + })) - const cid = await ipfs.object.put(buf, { enc: 'json' }) - - const node = await ipfs.object.get(cid) - const nodeJSON = node.toJSON() - expect(nodeJSON.data).to.eql(node.Data) + expect(pin).to.have.deep.property('cid', cid) + expect(pin).to.have.property('type', 'recursive') }) - it('should put a Protobuf encoded Uint8Array', async () => { - const node = new DAGNode(uint8ArrayFromString(nanoid())) - const serialized = node.serialize() - - const cid = await ipfs.object.put(serialized, { enc: 'protobuf' }) - const node2 = await ipfs.object.get(cid) - expect(node2.Data).to.deep.equal(node.Data) - expect(node2.Links).to.deep.equal(node.Links) - }) + it('should not pin an object by default', async () => { + const obj = { + Data: uint8ArrayFromString(nanoid()), + Links: [] + } - it('should put a Uint8Array as data', async () => { - const data = uint8ArrayFromString(nanoid()) + const cid = await ipfs.object.put(obj) - const cid = await ipfs.object.put(data) - const node = await ipfs.object.get(cid) - const nodeJSON = node.toJSON() - expect(data).to.deep.equal(nodeJSON.data) - expect([]).to.deep.equal(nodeJSON.links) + return expect(drain(ipfs.pin.ls({ + paths: cid + }))).to.eventually.be.rejectedWith(/not pinned/) }) it('should put a Protobuf DAGNode', async () => { - const dNode = new DAGNode(uint8ArrayFromString(nanoid())) + const dNode = { + Data: uint8ArrayFromString(nanoid()), + Links: [] + } const cid = await ipfs.object.put(dNode) const node = await ipfs.object.get(cid) - expect(dNode.Data).to.deep.equal(node.Data) - expect(dNode.Links).to.deep.equal(node.Links) + expect(dNode).to.deep.equal(node) }) it('should fail if a string is passed', () => { @@ -96,12 +88,24 @@ module.exports = (common, options) => { }) it('should put a Protobuf DAGNode with a link', async () => { - const node1a = new DAGNode(uint8ArrayFromString(nanoid())) - const node2 = new DAGNode(uint8ArrayFromString(nanoid())) - - const link = await asDAGLink(node2, 'some-link') - - const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1a = { + Data: uint8ArrayFromString(nanoid()), + Links: [] + } + const node2 = { + Data: uint8ArrayFromString(nanoid()), + Links: [] + } + const node2Buf = dagPB.encode(node2) + const link = { + Name: 'some-link', + Tsize: node2Buf.length, + Hash: CID.createV0(await sha256.digest(node2Buf)) + } + const node1b = { + Data: node1a.Data, + Links: node1a.Links.concat(link) + } const cid = await ipfs.object.put(node1b) const node = await ipfs.object.get(cid) diff --git a/packages/interface-ipfs-core/src/object/stat.js b/packages/interface-ipfs-core/src/object/stat.js index f312eeaf33..ba69a37be6 100644 --- a/packages/interface-ipfs-core/src/object/stat.js +++ b/packages/interface-ipfs-core/src/object/stat.js @@ -2,10 +2,11 @@ 'use strict' const uint8ArrayFromString = require('uint8arrays/from-string') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const dagPB = require('@ipld/dag-pb') +const { nanoid } = require('nanoid') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { asDAGLink } = require('./utils') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -36,75 +37,48 @@ module.exports = (common, options) => { const cid = await ipfs.object.put(testObj) const stats = await ipfs.object.stat(cid) const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + Hash: CID.parse('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ'), NumLinks: 0, BlockSize: 17, LinksSize: 2, DataSize: 15, CumulativeSize: 17 } - expect(expected).to.deep.equal(stats) + + expect(stats).to.deep.equal(expected) }) it('should get stats for object with links by multihash', async () => { - const node1a = new DAGNode(uint8ArrayFromString('Some data 1')) - const node2 = new DAGNode(uint8ArrayFromString('Some data 2')) - - const link = await asDAGLink(node2, 'some-link') - - const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - const node1bCid = await ipfs.object.put(node1b) - - const stats = await ipfs.object.stat(node1bCid) - const expected = { - Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', - NumLinks: 1, - BlockSize: 64, - LinksSize: 53, - DataSize: 11, - CumulativeSize: 77 + const node1a = { + Data: uint8ArrayFromString(nanoid()), + Links: [] } - expect(expected).to.eql(stats) - }) - - it('should get stats by base58 encoded multihash', async () => { - const testObj = { - Data: uint8ArrayFromString('get test object'), + const node2 = { + Data: uint8ArrayFromString(nanoid()), Links: [] } - - const cid = await ipfs.object.put(testObj) - - const stats = await ipfs.object.stat(cid.bytes) - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 + const node2Buf = dagPB.encode(node2) + const link = { + Name: 'some-link', + Tsize: node2Buf.length, + Hash: CID.createV0(await sha256.digest(node2Buf)) } - expect(expected).to.deep.equal(stats) - }) - - it('should get stats by base58 encoded multihash string', async () => { - const testObj = { - Data: uint8ArrayFromString('get test object'), - Links: [] + const node1b = { + Data: node1a.Data, + Links: node1a.Links.concat(link) } + const node1bCid = await ipfs.object.put(node1b) - const cid = await ipfs.object.put(testObj) - - const stats = await ipfs.object.stat(cid.toBaseEncodedString()) + const stats = await ipfs.object.stat(node1bCid) const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 + Hash: node1bCid, + NumLinks: 1, + BlockSize: 74, + LinksSize: 53, + DataSize: 21, + CumulativeSize: 97 } - expect(expected).to.deep.equal(stats) + expect(stats).to.deep.equal(expected) }) it('returns error for request without argument', () => { diff --git a/packages/interface-ipfs-core/src/object/utils.js b/packages/interface-ipfs-core/src/object/utils.js deleted file mode 100644 index e16547e037..0000000000 --- a/packages/interface-ipfs-core/src/object/utils.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict' - -const dagPB = require('ipld-dag-pb') - -const calculateCid = node => dagPB.util.cid(node.serialize(), { cidVersion: 0 }) - -const asDAGLink = async (node, name = '') => { - const cid = await calculateCid(node) - return new dagPB.DAGLink(name, node.size, cid) -} - -module.exports = { - calculateCid, - asDAGLink -} diff --git a/packages/interface-ipfs-core/src/pin/add.js b/packages/interface-ipfs-core/src/pin/add.js index 7b4ce227fa..a71f1398ec 100644 --- a/packages/interface-ipfs-core/src/pin/add.js +++ b/packages/interface-ipfs-core/src/pin/add.js @@ -6,9 +6,6 @@ const { fixtures, clearPins, expectPinned, expectNotPinned, pinTypes } = require const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') const drain = require('it-drain') -const { - DAGNode -} = require('ipld-dag-pb') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -101,14 +98,13 @@ module.exports = (common, options) => { .with(/already pinned recursively/) }) - it('should fail to pin a hash not in datastore', function () { + it('should fail to pin a hash not in datastore', async function () { this.slow(3 * 1000) this.timeout(5 * 1000) const falseHash = `${`${fixtures.directory.cid}`.slice(0, -2)}ss` - return expect(ipfs.pin.add(falseHash, { timeout: '2s' })) - .to.eventually.be.rejected() - // TODO: http api TimeoutErrors do not have this property - // .with.a.property('code').that.equals('ERR_TIMEOUT') + + await expect(ipfs.pin.add(falseHash, { timeout: '2s' })) + .to.eventually.be.rejected().with.property('name', 'TimeoutError') }) it('needs all children in datastore to pin recursively', async function () { @@ -117,7 +113,7 @@ module.exports = (common, options) => { await all(ipfs.block.rm(fixtures.directory.files[0].cid)) await expect(ipfs.pin.add(fixtures.directory.cid, { timeout: '2s' })) - .to.eventually.be.rejected() + .to.eventually.be.rejected().with.property('name', 'TimeoutError') }) it('should pin dag-cbor', async () => { @@ -153,7 +149,10 @@ module.exports = (common, options) => { }) it('should pin dag-cbor with dag-pb child', async () => { - const child = await ipfs.dag.put(new DAGNode(uint8ArrayFromString(`${Math.random()}`)), { + const child = await ipfs.dag.put({ + Data: uint8ArrayFromString(`${Math.random()}`), + Links: [] + }, { format: 'dag-pb', hashAlg: 'sha2-256' }) diff --git a/packages/interface-ipfs-core/src/pin/remote/ls.js b/packages/interface-ipfs-core/src/pin/remote/ls.js index d76654361c..888572937f 100644 --- a/packages/interface-ipfs-core/src/pin/remote/ls.js +++ b/packages/interface-ipfs-core/src/pin/remote/ls.js @@ -4,7 +4,7 @@ const { clearRemotePins, addRemotePins, clearServices } = require('../utils') const { getDescribe, getIt, expect } = require('../../utils/mocha') const all = require('it-all') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -19,10 +19,10 @@ module.exports = (common, options) => { const KEY = process.env.PINNING_SERVIEC_KEY const SERVICE = 'pinbot' - const cid1 = new CID('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') - const cid2 = new CID('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') - const cid3 = new CID('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - const cid4 = new CID('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') + const cid1 = CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') + const cid2 = CID.parse('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') + const cid3 = CID.parse('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + const cid4 = CID.parse('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') describe('.pin.remote.ls', function () { this.timeout(50 * 1000) diff --git a/packages/interface-ipfs-core/src/pin/remote/rm-all.js b/packages/interface-ipfs-core/src/pin/remote/rm-all.js index 5036792e87..81885afe0a 100644 --- a/packages/interface-ipfs-core/src/pin/remote/rm-all.js +++ b/packages/interface-ipfs-core/src/pin/remote/rm-all.js @@ -3,7 +3,7 @@ const { clearRemotePins, addRemotePins, clearServices } = require('../utils') const { getDescribe, getIt, expect } = require('../../utils/mocha') -const CID = require('cids') +const { CID } = require('multiformats/cid') const all = require('it-all') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -19,10 +19,10 @@ module.exports = (common, options) => { const KEY = process.env.PINNING_SERVIEC_KEY const SERVICE = 'pinbot' - const cid1 = new CID('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') - const cid2 = new CID('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') - const cid3 = new CID('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - const cid4 = new CID('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') + const cid1 = CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') + const cid2 = CID.parse('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') + const cid3 = CID.parse('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + const cid4 = CID.parse('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') describe('.pin.remote.rmAll()', function () { this.timeout(50 * 1000) diff --git a/packages/interface-ipfs-core/src/pin/remote/rm.js b/packages/interface-ipfs-core/src/pin/remote/rm.js index 32b8ba7ea0..a2073233c2 100644 --- a/packages/interface-ipfs-core/src/pin/remote/rm.js +++ b/packages/interface-ipfs-core/src/pin/remote/rm.js @@ -3,7 +3,7 @@ const { clearRemotePins, addRemotePins, clearServices } = require('../utils') const { getDescribe, getIt, expect } = require('../../utils/mocha') -const CID = require('cids') +const { CID } = require('multiformats/cid') const all = require('it-all') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -19,10 +19,10 @@ module.exports = (common, options) => { const KEY = process.env.PINNING_SERVIEC_KEY const SERVICE = 'pinbot' - const cid1 = new CID('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') - const cid2 = new CID('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') - const cid3 = new CID('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - const cid4 = new CID('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') + const cid1 = CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') + const cid2 = CID.parse('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') + const cid3 = CID.parse('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + const cid4 = CID.parse('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') describe('.pin.remote.rm()', function () { this.timeout(50 * 1000) diff --git a/packages/interface-ipfs-core/src/pin/utils.js b/packages/interface-ipfs-core/src/pin/utils.js index 1b5721c704..eea1d59902 100644 --- a/packages/interface-ipfs-core/src/pin/utils.js +++ b/packages/interface-ipfs-core/src/pin/utils.js @@ -2,7 +2,7 @@ const { expect } = require('../utils/mocha') const loadFixture = require('aegir/utils/fixtures') -const CID = require('cids') +const { CID } = require('multiformats/cid') const drain = require('it-drain') const map = require('it-map') const fromString = require('uint8arrays/from-string') @@ -18,23 +18,23 @@ const pinTypes = { const fixtures = Object.freeze({ // NOTE: files under 'directory' need to be different than standalone ones in 'files' directory: Object.freeze({ - cid: new CID('QmY8KdYQSYKFU5hM7F5ioZ5yYSgV5VZ1kDEdqfRL3rFgcd'), + cid: CID.parse('QmY8KdYQSYKFU5hM7F5ioZ5yYSgV5VZ1kDEdqfRL3rFgcd'), files: Object.freeze([Object.freeze({ path: 'test-folder/ipfs-add.js', data: loadFixture('test/fixtures/test-folder/ipfs-add.js', 'interface-ipfs-core'), - cid: new CID('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') + cid: CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') }), Object.freeze({ path: 'test-folder/files/ipfs.txt', data: loadFixture('test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core'), - cid: new CID('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') + cid: CID.parse('QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w') })]) }), files: Object.freeze([Object.freeze({ data: fromString('Plz add me!\n'), - cid: new CID('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + cid: CID.parse('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') }), Object.freeze({ data: loadFixture('test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - cid: new CID('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') + cid: CID.parse('QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu') })]) }) diff --git a/packages/interface-ipfs-core/src/refs-local.js b/packages/interface-ipfs-core/src/refs-local.js index 6942f88afb..a137bb4d04 100644 --- a/packages/interface-ipfs-core/src/refs-local.js +++ b/packages/interface-ipfs-core/src/refs-local.js @@ -6,8 +6,9 @@ const { getDescribe, getIt, expect } = require('./utils/mocha') const all = require('it-all') const { importer } = require('ipfs-unixfs-importer') const drain = require('it-drain') -const CID = require('cids') +const { CID } = require('multiformats/cid') const uint8ArrayEquals = require('uint8arrays/equals') +const blockstore = require('./utils/blockstore-adapter') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -40,7 +41,7 @@ module.exports = (common, options) => { content('holmes.txt') ] - const imported = await all(importer(dirs, ipfs.block)) + const imported = await all(importer(dirs, blockstore(ipfs))) // otherwise go-ipfs doesn't show them in the local refs await drain(ipfs.pin.addAll(imported.map(i => i.cid))) @@ -50,17 +51,17 @@ module.exports = (common, options) => { expect( cids.find(cid => { - const multihash = new CID(cid).multihash + const multihash = CID.parse(cid).multihash.bytes - return uint8ArrayEquals(imported[0].cid.multihash, multihash) + return uint8ArrayEquals(imported[0].cid.multihash.bytes, multihash) }) ).to.be.ok() expect( cids.find(cid => { - const multihash = new CID(cid).multihash + const multihash = CID.parse(cid).multihash.bytes - return uint8ArrayEquals(imported[1].cid.multihash, multihash) + return uint8ArrayEquals(imported[1].cid.multihash.bytes, multihash) }) ).to.be.ok() }) diff --git a/packages/interface-ipfs-core/src/refs.js b/packages/interface-ipfs-core/src/refs.js index 422cda18a5..618838c221 100644 --- a/packages/interface-ipfs-core/src/refs.js +++ b/packages/interface-ipfs-core/src/refs.js @@ -3,14 +3,11 @@ const { getDescribe, getIt, expect } = require('./utils/mocha') const loadFixture = require('aegir/utils/fixtures') -const CID = require('cids') +const { CID } = require('multiformats/cid') const all = require('it-all') const drain = require('it-drain') const testTimeout = require('./utils/test-timeout') - -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink +const dagPb = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') @@ -65,7 +62,6 @@ module.exports = (common, options) => { } const refs = await all(ipfs.refs(p, params)) - // Sort the refs not to lock-in the iteration order // Check there was no error and the refs match what was expected expect(refs.map(r => r.ref).sort()).to.eql(expected.sort()) @@ -322,15 +318,24 @@ function getRefsTests () { function loadPbContent (ipfs, node) { const store = { - putData: async (data) => { - const res = await ipfs.block.put(new DAGNode(data).serialize()) - return res.cid + putData: (data) => { + return ipfs.block.put( + dagPb.encode({ + Data: data, + Links: [] + }) + ) }, - putLinks: async (links) => { - const res = await ipfs.block.put(new DAGNode('', links.map(({ name, cid }) => { - return new DAGLink(name, 8, cid) - })).serialize()) - return res.cid + putLinks: (links) => { + return ipfs.block.put(dagPb.encode({ + Links: links.map(({ name, cid }) => { + return { + Name: name, + Tsize: 8, + Hash: CID.parse(cid) + } + }) + })) } } return loadContent(ipfs, store, node) @@ -338,16 +343,18 @@ function loadPbContent (ipfs, node) { function loadDagContent (ipfs, node) { const store = { - putData: async (data) => { + putData: (data) => { const inner = new UnixFS({ type: 'file', data: data }) - const serialized = new DAGNode(inner.marshal()).serialize() - const res = await ipfs.block.put(serialized) - return res.cid + const serialized = dagPb.encode({ + Data: inner.marshal(), + Links: [] + }) + return ipfs.block.put(serialized) }, putLinks: (links) => { const obj = {} for (const { name, cid } of links) { - obj[name] = new CID(cid) + obj[name] = CID.parse(cid) } return ipfs.dag.put(obj) } diff --git a/packages/interface-ipfs-core/src/repo/gc.js b/packages/interface-ipfs-core/src/repo/gc.js index 50252a0dc1..8d0a2a5f1a 100644 --- a/packages/interface-ipfs-core/src/repo/gc.js +++ b/packages/interface-ipfs-core/src/repo/gc.js @@ -3,10 +3,24 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { DAGNode } = require('ipld-dag-pb') const all = require('it-all') const drain = require('it-drain') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base64 } = require('multiformats/bases/base64') + +async function getBaseEncodedMultihashes (ipfs) { + const refs = await all(ipfs.refs.local()) + + return refs.map(r => base64.encode(CID.parse(r.ref).multihash.bytes)) +} + +async function shouldHaveRef (ipfs, cid) { + return expect(getBaseEncodedMultihashes(ipfs)).to.eventually.include(base64.encode(cid.multihash.bytes)) +} + +async function shouldNotHaveRef (ipfs, cid) { + return expect(getBaseEncodedMultihashes(ipfs)).to.eventually.not.include(base64.encode(cid.multihash.bytes)) +} /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -40,9 +54,6 @@ module.exports = (common, options) => { }) it('should clean up unpinned data', async () => { - // Get initial list of local blocks - const refsBeforeAdd = await all(ipfs.refs.local()) - // Add some data. Note: this will implicitly pin the data, which causes // some blocks to be added for the data itself and for the pinning // information that refers to the blocks @@ -51,17 +62,14 @@ module.exports = (common, options) => { // Get the list of local blocks after the add, should be bigger than // the initial list and contain hash - const refsAfterAdd = await all(ipfs.refs.local()) - expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) - expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(cid.multihash) + await shouldHaveRef(ipfs, cid) // Run garbage collection await drain(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is still pinned - const refsAfterGc = await all(ipfs.refs.local()) - expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(cid.multihash) + await shouldHaveRef(ipfs, cid) // Unpin the data await ipfs.pin.rm(cid) @@ -70,14 +78,10 @@ module.exports = (common, options) => { await all(ipfs.repo.gc()) // The list of local blocks should no longer contain the hash - const refsAfterUnpinAndGc = await all(ipfs.refs.local()) - expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(cid.multihash) + await shouldNotHaveRef(ipfs, cid) }) it('should clean up removed MFS files', async () => { - // Get initial list of local blocks - const refsBeforeAdd = await all(ipfs.refs.local()) - // Add a file to MFS await ipfs.files.write('/test', uint8ArrayFromString('oranges'), { create: true }) const stats = await ipfs.files.stat('/test') @@ -85,17 +89,14 @@ module.exports = (common, options) => { // Get the list of local blocks after the add, should be bigger than // the initial list and contain hash - const refsAfterAdd = await all(ipfs.refs.local()) - expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) - expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(stats.cid.multihash) + await shouldHaveRef(ipfs, stats.cid) // Run garbage collection await drain(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is in MFS - const refsAfterGc = await all(ipfs.refs.local()) - expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(stats.cid.multihash) + await shouldHaveRef(ipfs, stats.cid) // Remove the file await ipfs.files.rm('/test') @@ -104,14 +105,10 @@ module.exports = (common, options) => { await drain(ipfs.repo.gc()) // The list of local blocks should no longer contain the hash - const refsAfterUnpinAndGc = await all(ipfs.refs.local()) - expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(stats.cid.multihash) + await shouldNotHaveRef(ipfs, stats.cid) }) it('should clean up block only after unpinned and removed from MFS', async () => { - // Get initial list of local blocks - const refsBeforeAdd = await all(ipfs.refs.local()) - // Add a file to MFS await ipfs.files.write('/test', uint8ArrayFromString('peaches'), { create: true }) const stats = await ipfs.files.stat('/test') @@ -122,22 +119,19 @@ module.exports = (common, options) => { const block = await ipfs.block.get(mfsFileCid) // Add the data to IPFS (which implicitly pins the data) - const addRes = await ipfs.add(block.data) + const addRes = await ipfs.add(block) const dataCid = addRes.cid // Get the list of local blocks after the add, should be bigger than // the initial list and contain the data hash - const refsAfterAdd = await all(ipfs.refs.local()) - expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) - expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash) + await shouldHaveRef(ipfs, dataCid) // Run garbage collection await drain(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is pinned and in MFS - const refsAfterGc = await all(ipfs.refs.local()) - expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash) + await shouldHaveRef(ipfs, dataCid) // Remove the file await ipfs.files.rm('/test') @@ -147,9 +141,8 @@ module.exports = (common, options) => { // Get the list of local blocks after GC, should still contain the hash, // because the file is still pinned - const refsAfterRmAndGc = await all(ipfs.refs.local()) - expect(refsAfterRmAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(mfsFileCid.multihash) - expect(refsAfterRmAndGc.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash) + await shouldNotHaveRef(ipfs, mfsFileCid) + await shouldHaveRef(ipfs, dataCid) // Unpin the data await ipfs.pin.rm(dataCid) @@ -158,15 +151,11 @@ module.exports = (common, options) => { await drain(ipfs.repo.gc()) // The list of local blocks should no longer contain the hashes - const refsAfterUnpinAndGc = await all(ipfs.refs.local()) - expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(mfsFileCid.multihash) - expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(dataCid.multihash) + await shouldNotHaveRef(ipfs, mfsFileCid) + await shouldNotHaveRef(ipfs, dataCid) }) it('should clean up indirectly pinned data after recursive pin removal', async () => { - // Get initial list of local blocks - const refsBeforeAdd = await all(ipfs.refs.local()) - // Add some data const addRes = await ipfs.add(uint8ArrayFromString('pears')) const dataCid = addRes.cid @@ -175,11 +164,14 @@ module.exports = (common, options) => { await ipfs.pin.rm(dataCid) // Create a link to the data from an object - const obj = await new DAGNode(uint8ArrayFromString('fruit'), [{ - Name: 'p', - Hash: dataCid, - Tsize: addRes.size - }]) + const obj = { + Data: uint8ArrayFromString('fruit'), + Links: [{ + Name: 'p', + Hash: dataCid, + Tsize: addRes.size + }] + } // Put the object into IPFS const objCid = await ipfs.object.put(obj) @@ -189,10 +181,8 @@ module.exports = (common, options) => { // Get the list of local blocks after the add, should be bigger than // the initial list and contain data and object hash - const refsAfterAdd = await all(ipfs.refs.local()) - expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) - expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(objCid.multihash) - expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash) + await shouldHaveRef(ipfs, objCid) + await shouldHaveRef(ipfs, dataCid) // Recursively pin the object await ipfs.pin.add(objCid, { recursive: true }) @@ -206,8 +196,8 @@ module.exports = (common, options) => { // Get the list of local blocks after GC, should still contain the data // hash, because the data is still (indirectly) pinned - const refsAfterGc = await all(ipfs.refs.local()) - expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash) + await shouldHaveRef(ipfs, objCid) + await shouldHaveRef(ipfs, dataCid) // Recursively unpin the object await ipfs.pin.rm(objCid.toString()) @@ -216,9 +206,8 @@ module.exports = (common, options) => { await drain(ipfs.repo.gc()) // The list of local blocks should no longer contain the hashes - const refsAfterUnpinAndGc = await all(ipfs.refs.local()) - expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(objCid.multihash) - expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(dataCid.multihash) + await shouldNotHaveRef(ipfs, objCid) + await shouldNotHaveRef(ipfs, dataCid) }) }) } diff --git a/packages/interface-ipfs-core/src/swarm/addrs.js b/packages/interface-ipfs-core/src/swarm/addrs.js index 810f80027e..ac5f6f41a2 100644 --- a/packages/interface-ipfs-core/src/swarm/addrs.js +++ b/packages/interface-ipfs-core/src/swarm/addrs.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const CID = require('cids') +const PeerId = require('peer-id') const { Multiaddr } = require('multiaddr') const { getDescribe, getIt, expect } = require('../utils/mocha') const { isWebWorker } = require('ipfs-utils/src/env') @@ -38,7 +38,7 @@ module.exports = (common, options) => { expect(peers).to.be.an('array') for (const peer of peers) { - expect(CID.isCID(new CID(peer.id))).to.be.true() + expect(PeerId.parse(peer.id)).to.be.ok() expect(peer).to.have.a.property('addrs').that.is.an('array') for (const ma of peer.addrs) { diff --git a/packages/interface-ipfs-core/src/swarm/peers.js b/packages/interface-ipfs-core/src/swarm/peers.js index 08b08c8f26..aab9ce9cbe 100644 --- a/packages/interface-ipfs-core/src/swarm/peers.js +++ b/packages/interface-ipfs-core/src/swarm/peers.js @@ -2,7 +2,7 @@ 'use strict' const { Multiaddr } = require('multiaddr') -const CID = require('cids') +const PeerId = require('peer-id') const delay = require('delay') const { isBrowser, isWebWorker } = require('ipfs-utils/src/env') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -44,7 +44,7 @@ module.exports = (common, options) => { expect(peer).to.have.a.property('addr') expect(Multiaddr.isMultiaddr(peer.addr)).to.equal(true) expect(peer).to.have.a.property('peer').that.is.a('string') - expect(CID.isCID(new CID(peer.peer))).to.equal(true) + expect(PeerId.parse(peer.peer)).to.be.ok() expect(peer).to.not.have.a.property('latency') /* TODO: These assertions must be uncommented as soon as diff --git a/packages/interface-ipfs-core/src/utils/blockstore-adapter.js b/packages/interface-ipfs-core/src/utils/blockstore-adapter.js new file mode 100644 index 0000000000..b635c2f123 --- /dev/null +++ b/packages/interface-ipfs-core/src/utils/blockstore-adapter.js @@ -0,0 +1,52 @@ +'use strict' + +const { BlockstoreAdapter } = require('interface-blockstore') +const raw = require('multiformats/codecs/raw') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const { sha256 } = require('multiformats/hashes/sha2') +const uint8ArrayToString = require('uint8arrays/to-string') + +const formats = { + [raw.code]: raw.name, + [dagPb.code]: dagPb.name, + [dagCbor.code]: dagCbor.name +} + +const hashes = { + [sha256.code]: sha256.name +} + +class IPFSBlockstore extends BlockstoreAdapter { + /** + * @param {import('ipfs-core-types').IPFS} ipfs + */ + constructor (ipfs) { + super() + + this.ipfs = ipfs + } + + /** + * @param {import(multiformats/cid).CID} cid + * @param {Uint8Array} buf + */ + async put (cid, buf) { + const c = await this.ipfs.block.put(buf, { + format: formats[cid.code], + mhtype: hashes[cid.multihash.code], + version: cid.version + }) + + if (uint8ArrayToString(c.multihash.bytes, 'base64') !== uint8ArrayToString(cid.multihash.bytes, 'base64')) { + throw new Error('Multihashes of stored blocks did not match') + } + } +} + +/** + * @param {import('ipfs-core-types').IPFS} ipfs + */ +module.exports = (ipfs) => { + return new IPFSBlockstore(ipfs) +} diff --git a/packages/interface-ipfs-core/src/utils/index.js b/packages/interface-ipfs-core/src/utils/index.js index 66b3394686..7046a117a5 100644 --- a/packages/interface-ipfs-core/src/utils/index.js +++ b/packages/interface-ipfs-core/src/utils/index.js @@ -1,5 +1,6 @@ 'use strict' +const { CID } = require('multiformats/cid') const fromString = require('uint8arrays/from-string') const loadFixture = require('aegir/utils/fixtures') @@ -7,7 +8,7 @@ const ONE_MEG = Math.pow(2, 20) exports.fixtures = Object.freeze({ directory: Object.freeze({ - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + cid: CID.parse('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'), files: Object.freeze({ 'pp.txt': loadFixture('test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), 'holmes.txt': loadFixture('test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), @@ -18,15 +19,15 @@ exports.fixtures = Object.freeze({ }) }), smallFile: Object.freeze({ - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + cid: CID.parse('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP'), data: fromString('Plz add me!\n') }), bigFile: Object.freeze({ - cid: 'QmcKEs7mbxbGPPc2zo77E6CPwgaSbY4SmD2MFh16AqaR9e', + cid: CID.parse('QmcKEs7mbxbGPPc2zo77E6CPwgaSbY4SmD2MFh16AqaR9e'), data: Uint8Array.from(new Array(ONE_MEG * 15).fill(0)) }), emptyFile: Object.freeze({ - cid: 'QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH', + cid: CID.parse('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH'), data: new Uint8Array(0) }) }) diff --git a/packages/interface-ipfs-core/src/utils/mocha.js b/packages/interface-ipfs-core/src/utils/mocha.js index e781c8c88f..de96e039a2 100644 --- a/packages/interface-ipfs-core/src/utils/mocha.js +++ b/packages/interface-ipfs-core/src/utils/mocha.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') +const { expect } = require('aegir/utils/chai') -// Do not reorder these statements - https://github.com/chaijs/chai/issues/1298 -chai.use(require('chai-as-promised')) -chai.use(require('dirty-chai')) -chai.use(require('chai-subset')) - -module.exports.expect = chai.expect +module.exports.expect = expect const isObject = (o) => Object.prototype.toString.call(o) === '[object Object]' diff --git a/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js b/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js index 7fab114fb4..4b919e1f53 100644 --- a/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js +++ b/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js @@ -1,6 +1,6 @@ 'use strict' -module.exports = function traverseLeafNodes (ipfs, cid) { +module.exports = async function * traverseLeafNodes (ipfs, cid) { async function * traverse (cid) { const { value: node } = await ipfs.dag.get(cid) @@ -13,8 +13,10 @@ module.exports = function traverseLeafNodes (ipfs, cid) { return } - node.Links.forEach(link => traverse(link.Hash)) + for (const link of node.Links) { + yield * traverse(link.Hash) + } } - return traverse(cid) + yield * traverse(cid) } diff --git a/packages/ipfs-cli/package.json b/packages/ipfs-cli/package.json index c0aa2614d7..13a8fc6192 100644 --- a/packages/ipfs-cli/package.json +++ b/packages/ipfs-cli/package.json @@ -26,13 +26,13 @@ "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "clean": "rimraf ./dist", - "dep-check": "aegir dep-check -i cid-tool -i ipfs-core-types", + "dep-check": "aegir dep-check -i ipfs-core-types", "build": "aegir build --no-bundle" }, "dependencies": { + "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-pb": "^2.1.3", "byteman": "^1.3.5", - "cid-tool": "^3.0.0", - "cids": "^1.1.6", "debug": "^4.1.1", "err-code": "^3.0.1", "execa": "^5.0.0", @@ -42,39 +42,37 @@ "ipfs-core-utils": "^0.8.3", "ipfs-daemon": "^0.7.2", "ipfs-http-client": "^50.1.2", - "ipfs-repo": "^9.1.6", - "ipfs-utils": "^8.1.2", - "ipld-dag-cbor": "^1.0.0", - "ipld-dag-pb": "^0.22.1", + "ipfs-repo": "^11.0.0", + "ipfs-utils": "^8.1.4", "it-all": "^1.0.4", "it-concat": "^2.0.0", "it-first": "^1.0.4", - "it-glob": "0.0.11", + "it-glob": "0.0.13", "it-pipe": "^1.1.0", + "it-split": "^0.0.1", "jsondiffpatch": "^0.4.1", - "libp2p-crypto": "^0.19.3", - "mafmt": "^9.0.0", - "multiaddr": "^9.0.1", - "multiaddr-to-uri": "^7.0.0", - "multibase": "^4.0.2", - "multihashing-async": "^2.1.2", + "libp2p-crypto": "^0.19.6", + "mafmt": "^10.0.0", + "multiaddr": "^10.0.0", + "multiaddr-to-uri": "^8.0.0", + "multiformats": "^9.4.1", "parse-duration": "^1.0.0", "pretty-bytes": "^5.4.1", "progress": "^2.0.3", "stream-to-it": "^0.2.2", - "streaming-iterables": "^5.0.2", - "uint8arrays": "^2.1.3", + "streaming-iterables": "^6.0.0", + "uint8arrays": "^2.1.6", "yargs": "^16.0.3" }, "devDependencies": { "@types/progress": "^2.0.3", "@types/yargs": "^16.0.0", - "aegir": "^33.0.0", + "aegir": "^34.0.2", "nanoid": "^3.1.12", "ncp": "^2.0.0", - "peer-id": "^0.14.1", + "peer-id": "^0.15.1", "rimraf": "^3.0.2", - "sinon": "^10.0.1", + "sinon": "^11.1.1", "string-argv": "^0.3.1", "temp-write": "^4.0.0" } diff --git a/packages/ipfs-cli/src/commands/add.js b/packages/ipfs-cli/src/commands/add.js index fa914cff09..0930e44ef1 100644 --- a/packages/ipfs-cli/src/commands/add.js +++ b/packages/ipfs-cli/src/commands/add.js @@ -6,15 +6,12 @@ const { promisify } = require('util') const getFolderSize = promisify(require('get-folder-size')) // @ts-ignore no types const byteman = require('byteman') -const mh = require('multihashing-async').multihash -const multibase = require('multibase') const { createProgressBar, coerceMtime, coerceMtimeNsecs, stripControlCharacters } = require('../utils') -const { cidToString } = require('ipfs-core-utils/src/cid') const globSource = require('ipfs-utils/src/files/glob-source') const { default: parseDuration } = require('parse-duration') @@ -95,11 +92,10 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, hash: { type: 'string', - choices: Object.keys(mh.names), describe: 'Hash function to use. Will set CID version to 1 if used. (experimental)', default: 'sha2-256' }, @@ -171,10 +167,10 @@ module.exports = { * @param {import('../types').Context} argv.ctx * @param {boolean} argv.trickle * @param {number} argv.shardSplitThreshold - * @param {import('cids').CIDVersion} argv.cidVersion + * @param {import('multiformats/cid').CIDVersion} argv.cidVersion * @param {boolean} argv.rawLeaves * @param {boolean} argv.onlyHash - * @param {import('multihashes').HashName} argv.hash + * @param {string} argv.hash * @param {boolean} argv.wrapWithDirectory * @param {boolean} argv.pin * @param {string} argv.chunker @@ -194,7 +190,7 @@ module.exports = { * @param {boolean} argv.preserveMode * @param {boolean} argv.preserveMtime * @param {number} argv.mode - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {boolean} argv.enableShardingExperiment */ async handler ({ @@ -305,6 +301,7 @@ module.exports = { }] // Pipe to ipfs.add tagging with mode and mtime let finalCid + const base = await ipfs.bases.getBase(cidBase) try { for await (const { cid, path } of ipfs.addAll(source, options)) { @@ -318,7 +315,7 @@ module.exports = { } const pathStr = stripControlCharacters(path) - const cidStr = cidToString(cid, { base: cidBase }) + const cidStr = cid.toString(base.encoder) let message = cidStr if (!quiet) { @@ -342,7 +339,7 @@ module.exports = { } if (quieter && finalCid) { - log(cidToString(finalCid, { base: cidBase })) + log(finalCid.toString(base.encoder)) } } } diff --git a/packages/ipfs-cli/src/commands/bitswap/stat.js b/packages/ipfs-cli/src/commands/bitswap/stat.js index 5c74614597..f7b06595a5 100644 --- a/packages/ipfs-cli/src/commands/bitswap/stat.js +++ b/packages/ipfs-cli/src/commands/bitswap/stat.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const prettyBytes = require('pretty-bytes') const { default: parseDuration } = require('parse-duration') @@ -14,7 +12,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, human: { type: 'boolean', @@ -30,7 +28,7 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {boolean} argv.human - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx, cidBase, human, timeout }) { @@ -54,7 +52,9 @@ module.exports = { output.dupDataReceived = prettyBytes(Number(stats.dupDataReceived)).toUpperCase() output.wantlist = `[${stats.wantlist.length} keys]` } else { - const wantlist = stats.wantlist.map(cid => cidToString(cid, { base: cidBase, upgrade: false })) + const base = await ipfs.bases.getBase(cidBase) + + const wantlist = stats.wantlist.map(cid => cid.toString(base.encoder)) output.wantlist = `[${wantlist.length} keys] ${wantlist.join('\n ')}` } diff --git a/packages/ipfs-cli/src/commands/bitswap/unwant.js b/packages/ipfs-cli/src/commands/bitswap/unwant.js index c48d53d01c..99c62ca088 100644 --- a/packages/ipfs-cli/src/commands/bitswap/unwant.js +++ b/packages/ipfs-cli/src/commands/bitswap/unwant.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { coerceCID } = require('../../utils') @@ -20,7 +18,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -31,15 +29,16 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key - * @param {import('multibase').BaseName} argv.cidBase + * @param {import('multiformats/cid').CID} argv.key + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx, key, cidBase, timeout }) { const { ipfs, print } = ctx + const base = await ipfs.bases.getBase(cidBase) await ipfs.bitswap.unwant(key, { timeout }) - print(`Key ${cidToString(key, { base: cidBase, upgrade: false })} removed from wantlist`) + print(`Key ${key.toString(base.encoder)} removed from wantlist`) } } diff --git a/packages/ipfs-cli/src/commands/bitswap/wantlist.js b/packages/ipfs-cli/src/commands/bitswap/wantlist.js index 397804c732..3db326ccc0 100644 --- a/packages/ipfs-cli/src/commands/bitswap/wantlist.js +++ b/packages/ipfs-cli/src/commands/bitswap/wantlist.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') module.exports = { @@ -18,7 +16,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -30,11 +28,12 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.peer - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx, peer, cidBase, timeout }) { const { ipfs, print } = ctx + const base = await ipfs.bases.getBase(cidBase) let list if (peer) { @@ -47,6 +46,6 @@ module.exports = { }) } - list.forEach(cid => print(cidToString(cid, { base: cidBase, upgrade: false }))) + list.forEach(cid => print(cid.toString(base.encoder))) } } diff --git a/packages/ipfs-cli/src/commands/block/get.js b/packages/ipfs-cli/src/commands/block/get.js index 78af009a9d..fe84a8543a 100644 --- a/packages/ipfs-cli/src/commands/block/get.js +++ b/packages/ipfs-cli/src/commands/block/get.js @@ -23,7 +23,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {string} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {number} argv.timeout */ async handler ({ ctx, key, timeout }) { @@ -32,7 +32,7 @@ module.exports = { timeout }) if (block) { - print(uint8ArrayToString(block.data), false) + print(uint8ArrayToString(block), false) } else { print('Block was unwanted before it could be remotely retrieved') } diff --git a/packages/ipfs-cli/src/commands/block/put.js b/packages/ipfs-cli/src/commands/block/put.js index 1b23dbd1f6..8d8d1c0809 100644 --- a/packages/ipfs-cli/src/commands/block/put.js +++ b/packages/ipfs-cli/src/commands/block/put.js @@ -1,9 +1,7 @@ 'use strict' const fs = require('fs') -const multibase = require('multibase') const concat = require('it-concat') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') module.exports = { @@ -33,7 +31,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, pin: { describe: 'Pin this block recursively', @@ -50,12 +48,12 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.block - * @param {import('multicodec').CodecName} argv.format - * @param {import('multihashes').HashName} argv.mhtype + * @param {string} argv.format + * @param {string} argv.mhtype * @param {number} argv.mhlen - * @param {import('cids').CIDVersion} argv.version + * @param {import('multiformats/cid').CIDVersion} argv.version * @param {boolean} argv.pin - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print, getStdin }, block, timeout, format, mhtype, mhlen, version, cidBase, pin }) { @@ -67,14 +65,15 @@ module.exports = { data = (await concat(getStdin(), { type: 'buffer' })).slice() } - const { cid } = await ipfs.block.put(data, { + const cid = await ipfs.block.put(data, { timeout, format, mhtype, - mhlen, version, pin }) - print(cidToString(cid, { base: cidBase })) + const base = await ipfs.bases.getBase(cidBase) + + print(cid.toString(base.encoder)) } } diff --git a/packages/ipfs-cli/src/commands/block/rm.js b/packages/ipfs-cli/src/commands/block/rm.js index 1fefce3e5e..817ca35193 100644 --- a/packages/ipfs-cli/src/commands/block/rm.js +++ b/packages/ipfs-cli/src/commands/block/rm.js @@ -34,7 +34,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')[]} argv.hash + * @param {import('multiformats/cid').CID[]} argv.hash * @param {boolean} argv.force * @param {boolean} argv.quiet * @param {number} argv.timeout diff --git a/packages/ipfs-cli/src/commands/block/stat.js b/packages/ipfs-cli/src/commands/block/stat.js index 5b6ac98497..f7eba4fae3 100644 --- a/packages/ipfs-cli/src/commands/block/stat.js +++ b/packages/ipfs-cli/src/commands/block/stat.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { coerceCID } = require('../../utils') @@ -18,7 +16,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -29,8 +27,8 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key - * @param {import('multibase').BaseName} argv.cidBase + * @param {import('multiformats/cid').CID} argv.key + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx, key, cidBase, timeout }) { @@ -38,7 +36,8 @@ module.exports = { const stats = await ipfs.block.stat(key, { timeout }) - print('Key: ' + cidToString(stats.cid, { base: cidBase })) + const base = await ipfs.bases.getBase(cidBase) + print('Key: ' + stats.cid.toString(base.encoder)) print('Size: ' + stats.size) } } diff --git a/packages/ipfs-cli/src/commands/cid.js b/packages/ipfs-cli/src/commands/cid.js index e2a7ab88cf..a854694f38 100644 --- a/packages/ipfs-cli/src/commands/cid.js +++ b/packages/ipfs-cli/src/commands/cid.js @@ -1,11 +1,5 @@ 'use strict' -const path = require('path') - -const cidCommandsPath = path.join( - path.dirname(require.resolve('cid-tool')), 'cli', 'commands' -) - module.exports = { command: 'cid ', @@ -15,7 +9,6 @@ module.exports = { * @param {import('yargs').Argv} yargs */ builder (yargs) { - return yargs - .commandDir(cidCommandsPath) + return yargs.commandDir('cid') } } diff --git a/packages/ipfs-cli/src/commands/cid/base32.js b/packages/ipfs-cli/src/commands/cid/base32.js new file mode 100644 index 0000000000..31cc335be7 --- /dev/null +++ b/packages/ipfs-cli/src/commands/cid/base32.js @@ -0,0 +1,36 @@ +'use strict' + +const split = require('it-split') +const { CID } = require('multiformats/cid') +const { base32 } = require('multiformats/bases/base32') + +module.exports = { + command: 'base32 [cids...]', + + describe: 'Convert CIDs to base 32 CID version 1.', + + /** + * @param {object} argv + * @param {import('../../types').Context} argv.ctx + * @param {string[]} [argv.cids] + */ + async handler ({ ctx: { print, getStdin }, cids }) { + let input + + if (cids && cids.length) { + input = cids + } else { + input = split(getStdin()) + } + + for await (const data of input) { + const input = data.toString().trim() + + if (!input) { + continue + } + + print(CID.parse(input).toV1().toString(base32.encoder)) + } + } +} diff --git a/packages/ipfs-cli/src/commands/cid/bases.js b/packages/ipfs-cli/src/commands/cid/bases.js new file mode 100644 index 0000000000..0a71e08300 --- /dev/null +++ b/packages/ipfs-cli/src/commands/cid/bases.js @@ -0,0 +1,40 @@ +'use strict' + +module.exports = { + command: 'bases', + + describe: 'List available multibase encoding names.', + + builder: { + prefix: { + describe: 'Display the single letter encoding codes as well as the encoding name.', + type: 'boolean', + default: false + }, + numeric: { + describe: 'Display the numeric encoding code as well as the encoding name', + type: 'boolean', + default: false + } + }, + + /** + * @param {object} argv + * @param {import('../../types').Context} argv.ctx + * @param {boolean} [argv.prefix] + * @param {boolean} [argv.numeric] + */ + handler ({ ctx: { ipfs, print }, prefix, numeric }) { + for (const base of ipfs.bases.listBases()) { + if (prefix && numeric) { + print(`${base.prefix}\t${base.prefix.charCodeAt(0)}\t${base.name}`) + } else if (prefix) { + print(`${base.prefix}\t${base.name}`) + } else if (numeric) { + print(`${base.prefix.charCodeAt(0)}\t${base.name}`) + } else { + print(base.name) + } + } + } +} diff --git a/packages/ipfs-cli/src/commands/cid/codecs.js b/packages/ipfs-cli/src/commands/cid/codecs.js new file mode 100644 index 0000000000..0a7a1c9a32 --- /dev/null +++ b/packages/ipfs-cli/src/commands/cid/codecs.js @@ -0,0 +1,30 @@ +'use strict' + +module.exports = { + command: 'codecs', + + describe: 'List available CID codec names.', + + builder: { + numeric: { + describe: 'Display the numeric code as well as the codec name', + type: 'boolean', + default: false + } + }, + + /** + * @param {object} argv + * @param {import('../../types').Context} argv.ctx + * @param {boolean} [argv.numeric] + */ + handler ({ ctx: { ipfs, print }, numeric }) { + for (const codec of ipfs.codecs.listCodecs()) { + if (numeric) { + print(`${codec.code}\t${codec.name}`) + } else { + print(codec.name) + } + } + } +} diff --git a/packages/ipfs-cli/src/commands/cid/format.js b/packages/ipfs-cli/src/commands/cid/format.js new file mode 100644 index 0000000000..21899c89a9 --- /dev/null +++ b/packages/ipfs-cli/src/commands/cid/format.js @@ -0,0 +1,226 @@ +'use strict' + +const split = require('it-split') +const { CID } = require('multiformats/cid') + +module.exports = { + command: 'format [cids...]', + + describe: 'Format and convert a CID in various useful ways.', + + builder: { + format: { + describe: `Printf style format string: + +%% literal % +%b multibase name +%B multibase code +%v version string +%V version number +%c codec name +%C codec code +%h multihash name +%H multihash code +%L hash digest length +%m multihash encoded in base %b (with multibase prefix) +%M multihash encoded in base %b without multibase prefix +%d hash digest encoded in base %b (with multibase prefix) +%D hash digest encoded in base %b without multibase prefix +%s cid string encoded in base %b (1) +%S cid string encoded in base %b without multibase prefix +%P cid prefix: %v-%c-%h-%L + +(1) For CID version 0 the multibase must be base58btc and no prefix is used. For Cid version 1 the multibase prefix is included.`, + alias: 'f', + type: 'string', + default: '%s' + }, + 'cid-version': { + describe: 'CID version to convert to.', + alias: 'v', + type: 'number' + }, + base: { + describe: 'Multibase to display output in.', + alias: 'b', + type: 'string' + } + }, + + /** + * @param {object} argv + * @param {import('../../types').Context} argv.ctx + * @param {string[]} [argv.cids] + * @param {string} [argv.format] + * @param {import('multiformats/cid').CIDVersion} [argv.cidVersion] + * @param {string} [argv.base] + */ + async handler ({ ctx: { ipfs, print, getStdin }, cids, format, cidVersion, base }) { + let input + + if (cids && cids.length) { + input = cids + } else { + input = split(getStdin()) + } + + let formatStr = format || '%s' + + if (formatStr === 'prefix') { + formatStr = '%P' + } + + if (typeof formatStr !== 'string' || formatStr.indexOf('%') === -1) { + throw new Error(`invalid format string: ${formatStr}`) + } + + for await (const data of input) { + const str = data.toString().trim() + + if (!str) { + continue + } + + let cid = CID.parse(str) + + if (cidVersion != null && cid.version !== cidVersion) { + if (cidVersion === 0) { + cid = cid.toV0() + } else if (cidVersion === 1) { + cid = cid.toV1() + } else { + throw new Error(`invalid cid version: ${cidVersion}`) + } + } + + let cidBase = findBase(str, ipfs) + + if (base) { + const foundBase = ipfs.bases.listBases().find(b => b.name === base) + + if (!foundBase) { + throw new Error(`invalid base prefix: ${str.substring(0, 1)}`) + } + + cidBase = foundBase + } + + print(formatStr.replace(/%([a-zA-Z%])/g, replacer(cid, cidBase, ipfs))) + } + } +} + +/** + * @param {CID} cid + * @param {import('multiformats/bases/interface').MultibaseCodec} base + * @param {import('ipfs-core-types').IPFS} ipfs + * @returns {(match: any, specifier: string) => string} + */ +function replacer (cid, base, ipfs) { + /** + * @param {*} match + * @param {string} specifier + */ + const replace = (match, specifier) => { + switch (specifier) { + case '%': + return '%' + case 'b': // base name + return base.name + case 'B': // base code + return base.prefix + case 'v': // version string + return `cidv${cid.version}` + case 'V': // version num + return cid.version.toString() + case 'c': // codec name + return findCodec(cid, ipfs).name + case 'C': // codec code + return cid.code + case 'h': // hash fun name + return findHasher(cid, ipfs).name + case 'H': // hash fun code + return findHasher(cid, ipfs).code + case 'L': // hash length + return cid.multihash.size.toString() + case 'm': // multihash encoded in base %b + return base.encoder.encode(cid.multihash.bytes) + case 'M': // multihash encoded in base %b without base prefix + return base.encoder.encode(cid.multihash.bytes).substring(1) + case 'd': // hash digest encoded in base %b + return base.encoder.encode(cid.multihash.digest) + case 'D': // hash digest encoded in base %b without base prefix + return base.encoder.encode(cid.multihash.digest).substring(1) + case 's': // cid string encoded in base %b + return base.encoder.encode(cid.bytes).slice(cid.version === 0 && base.name === 'base58btc' ? 1 : 0) + case 'S': // cid string without base prefix + return base.encoder.encode(cid.bytes).slice(1) + case 'P': // prefix + return prefix(cid, ipfs) + + default: + throw new Error(`unrecognized specifier in format string: ${specifier}`) + } + } + + return replace +} + +/** + * @param {string} str + * @param {import('ipfs-core-types').IPFS} ipfs + */ +function findBase (str, ipfs) { + if (CID.parse(str).version === 0) { + // force a match for base58btc for CIDv0, assuming it's configured + str = `z${str}` + } + + const prefix = str.substring(0, 1) + const base = ipfs.bases.listBases().find(b => b.prefix === prefix) + + if (!base) { + throw new Error(`invalid base prefix: ${str.substring(0, 1)}`) + } + + return base +} + +/** + * @param {CID} cid + * @param {import('ipfs-core-types').IPFS} ipfs + */ +function findCodec (cid, ipfs) { + const codec = ipfs.codecs.listCodecs().find(c => c.code === cid.code) + + if (!codec) { + throw new Error(`invalid codec: ${cid.code}`) + } + + return codec +} + +/** + * @param {CID} cid + * @param {import('ipfs-core-types').IPFS} ipfs + */ +function findHasher (cid, ipfs) { + const codec = ipfs.hashers.listHashers().find(h => h.code === cid.multihash.code) + + if (!codec) { + throw new Error(`invalid codec: ${cid.code}`) + } + + return codec +} + +/** + * @param {CID} cid + * @param {import('ipfs-core-types').IPFS} ipfs + */ +function prefix (cid, ipfs) { + const hasher = findHasher(cid, ipfs) + const codec = findCodec(cid, ipfs) + + return `cidv${cid.version}-${codec.name}-${hasher.name}-${cid.multihash.size}` +} diff --git a/packages/ipfs-cli/src/commands/cid/hashes.js b/packages/ipfs-cli/src/commands/cid/hashes.js new file mode 100644 index 0000000000..ff8e950a3c --- /dev/null +++ b/packages/ipfs-cli/src/commands/cid/hashes.js @@ -0,0 +1,30 @@ +'use strict' + +module.exports = { + command: 'hashes', + + describe: 'List available multihash hashing algorithm names.', + + builder: { + numeric: { + describe: 'Display the numeric code as well as the hashing algorithm name', + type: 'boolean', + default: false + } + }, + + /** + * @param {object} argv + * @param {import('../../types').Context} argv.ctx + * @param {boolean} [argv.numeric] + */ + handler ({ ctx: { ipfs, print }, numeric }) { + for (const codec of ipfs.hashers.listHashers()) { + if (numeric) { + print(`${codec.code}\t${codec.name}`) + } else { + print(codec.name) + } + } + } +} diff --git a/packages/ipfs-cli/src/commands/dag/get.js b/packages/ipfs-cli/src/commands/dag/get.js index c8c1031010..c0a5375974 100644 --- a/packages/ipfs-cli/src/commands/dag/get.js +++ b/packages/ipfs-cli/src/commands/dag/get.js @@ -3,13 +3,14 @@ const { default: parseDuration } = require('parse-duration') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') const uint8ArrayToString = require('uint8arrays/to-string') -const { cidToString } = require('ipfs-core-utils/src/cid') const { stripControlCharacters, makeEntriesPrintable, escapeControlCharacters } = require('../../utils') -const multibase = require('multibase') +const dagPB = require('@ipld/dag-pb') +const dagCBOR = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') module.exports = { command: 'get ', @@ -24,7 +25,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, 'data-enc': { describe: 'String encoding to display data in.', @@ -42,7 +43,7 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.cidpath - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {'base16' | 'base64' | 'base58btc'} argv.dataEnc * @param {boolean} argv.localResolve * @param {number} argv.timeout @@ -74,9 +75,10 @@ module.exports = { } const node = result.value + const base = await ipfs.bases.getBase(cidBase) - if (cid.codec === 'dag-pb') { - /** @type {import('ipld-dag-pb').DAGNode} */ + if (cid.code === dagPB.code) { + /** @type {import('@ipld/dag-pb').PBNode} */ const dagNode = node print(JSON.stringify({ @@ -84,13 +86,13 @@ module.exports = { links: (dagNode.Links || []).map(link => ({ Name: stripControlCharacters(link.Name), Size: link.Tsize, - Cid: { '/': cidToString(link.Hash, { base: cidBase }) } + Cid: { '/': link.Hash.toString(base.encoder) } })) })) - } else if (cid.codec === 'raw') { + } else if (cid.code === raw.code) { print(uint8ArrayToString(node, dataEnc)) - } else if (cid.codec === 'dag-cbor') { - print(JSON.stringify(makeEntriesPrintable(node, cidBase))) + } else if (cid.code === dagCBOR.code) { + print(JSON.stringify(makeEntriesPrintable(node, base))) } else { print(escapeControlCharacters(node.toString())) } diff --git a/packages/ipfs-cli/src/commands/dag/put.js b/packages/ipfs-cli/src/commands/dag/put.js index fd4c1cb81a..0d686f1868 100644 --- a/packages/ipfs-cli/src/commands/dag/put.js +++ b/packages/ipfs-cli/src/commands/dag/put.js @@ -1,12 +1,9 @@ 'use strict' -const mh = require('multihashing-async').multihash -const multibase = require('multibase') -const dagCBOR = require('ipld-dag-cbor') -const dagPB = require('ipld-dag-pb') +const dagCBOR = require('@ipld/dag-cbor') +const dagPB = require('@ipld/dag-pb') const concat = require('it-concat') -const CID = require('cids') -const { cidToString } = require('ipfs-core-utils/src/cid') +const { CID } = require('multiformats/cid') const { default: parseDuration } = require('parse-duration') /** @@ -18,8 +15,8 @@ const { default: parseDuration } = require('parse-duration') */ const inputDecoders = { json: (buf) => JSON.parse(buf.toString()), - cbor: (buf) => dagCBOR.util.deserialize(buf), - protobuf: (buf) => dagPB.util.deserialize(buf), + cbor: (buf) => dagCBOR.decode(buf), + protobuf: (buf) => dagPB.decode(buf), raw: (buf) => buf } @@ -66,8 +63,7 @@ module.exports = { type: 'string', alias: 'hash', default: 'sha2-256', - describe: 'Hash function to use', - choices: Object.keys(mh.names) + describe: 'Hash function to use' }, 'cid-version': { type: 'integer', @@ -77,7 +73,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, preload: { type: 'boolean', @@ -101,10 +97,10 @@ module.exports = { * @param {string} argv.data * @param {'dag-cbor' | 'dag-pb' | 'raw' | 'cbor' | 'protobuf'} argv.format * @param {'json' | 'cbor' | 'raw' | 'protobuf'} argv.inputEncoding - * @param {import('cids').CIDVersion} argv.cidVersion + * @param {import('multiformats/cid').CIDVersion} argv.cidVersion * @param {boolean} argv.pin - * @param {import('multihashes').HashName} argv.hashAlg - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.hashAlg + * @param {string} argv.cidBase * @param {boolean} argv.preload * @param {boolean} argv.onlyHash * @param {number} argv.timeout @@ -149,8 +145,9 @@ module.exports = { pin, timeout }) + const base = await ipfs.bases.getBase(cidBase) - print(cidToString(cid, { base: cidBase })) + print(cid.toString(base.encoder)) } } @@ -169,7 +166,7 @@ function objectSlashToCID (obj) { if (typeof obj['/'] !== 'string') { throw new Error('link should have been a string') } - return new CID(obj['/']) // throws if not a CID - consistent with go-ipfs + return CID.parse(obj['/']) // throws if not a CID - consistent with go-ipfs } return keys.reduce((obj, key) => { diff --git a/packages/ipfs-cli/src/commands/dht/find-providers.js b/packages/ipfs-cli/src/commands/dht/find-providers.js index 0150b430f0..d73c6893f1 100644 --- a/packages/ipfs-cli/src/commands/dht/find-providers.js +++ b/packages/ipfs-cli/src/commands/dht/find-providers.js @@ -28,7 +28,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {number} argv.numProviders * @param {number} argv.timeout */ diff --git a/packages/ipfs-cli/src/commands/dht/get.js b/packages/ipfs-cli/src/commands/dht/get.js index 4734c7d769..5f20acff10 100644 --- a/packages/ipfs-cli/src/commands/dht/get.js +++ b/packages/ipfs-cli/src/commands/dht/get.js @@ -23,7 +23,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, key, timeout }) { diff --git a/packages/ipfs-cli/src/commands/dht/provide.js b/packages/ipfs-cli/src/commands/dht/provide.js index 63e747053e..ac9c7ba3cc 100644 --- a/packages/ipfs-cli/src/commands/dht/provide.js +++ b/packages/ipfs-cli/src/commands/dht/provide.js @@ -28,7 +28,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {boolean} argv.recursive * @param {number} argv.timeout */ diff --git a/packages/ipfs-cli/src/commands/files/chmod.js b/packages/ipfs-cli/src/commands/files/chmod.js index f45d03454a..338b55ebb6 100644 --- a/packages/ipfs-cli/src/commands/files/chmod.js +++ b/packages/ipfs-cli/src/commands/files/chmod.js @@ -56,7 +56,7 @@ module.exports = { * @param {string} argv.path * @param {number} argv.mode * @param {boolean} argv.recursive - * @param {import('multihashes').HashName} argv.hashAlg + * @param {string} argv.hashAlg * @param {boolean} argv.flush * @param {number} argv.shardSplitThreshold * @param {number} argv.timeout diff --git a/packages/ipfs-cli/src/commands/files/cp.js b/packages/ipfs-cli/src/commands/files/cp.js index 8d5ba3dc56..9ec249f98d 100644 --- a/packages/ipfs-cli/src/commands/files/cp.js +++ b/packages/ipfs-cli/src/commands/files/cp.js @@ -48,7 +48,7 @@ module.exports = { * @param {string} argv.source * @param {string} argv.dest * @param {boolean} argv.parents - * @param {import('multihashes').HashName} argv.hashAlg + * @param {string} argv.hashAlg * @param {boolean} argv.flush * @param {number} argv.shardSplitThreshold * @param {number} argv.timeout diff --git a/packages/ipfs-cli/src/commands/files/flush.js b/packages/ipfs-cli/src/commands/files/flush.js index 826fde50f3..79cf5dc5fb 100644 --- a/packages/ipfs-cli/src/commands/files/flush.js +++ b/packages/ipfs-cli/src/commands/files/flush.js @@ -9,7 +9,8 @@ module.exports = { builder: { 'cid-base': { - describe: 'CID base to use.' + describe: 'CID base to use', + default: 'base58btc' }, timeout: { type: 'string', @@ -21,7 +22,7 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.path - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ @@ -30,16 +31,14 @@ module.exports = { cidBase, timeout }) { - let cid = await ipfs.files.flush(path || '/', { + const cid = await ipfs.files.flush(path || '/', { timeout }) - if (cidBase && cidBase !== 'base58btc' && cid.version === 0) { - cid = cid.toV1() - } + const base = await ipfs.bases.getBase(cidBase) print(JSON.stringify({ - Cid: cid.toString(cidBase) + Cid: cid.toString(base.encoder) })) } } diff --git a/packages/ipfs-cli/src/commands/files/ls.js b/packages/ipfs-cli/src/commands/files/ls.js index f3307bc643..91f73bf837 100644 --- a/packages/ipfs-cli/src/commands/files/ls.js +++ b/packages/ipfs-cli/src/commands/files/ls.js @@ -22,7 +22,8 @@ module.exports = { describe: 'Use long listing format.' }, 'cid-base': { - describe: 'CID base to use.' + describe: 'CID base to use.', + default: 'base58btc' }, timeout: { type: 'string', @@ -35,7 +36,7 @@ module.exports = { * @param {import('../../types').Context} argv.ctx * @param {string} argv.path * @param {boolean} argv.long - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ @@ -45,6 +46,8 @@ module.exports = { cidBase, timeout }) { + const base = await ipfs.bases.getBase(cidBase) + /** * @param {import('ipfs-core-types/src/files').MFSEntry} file */ @@ -52,7 +55,7 @@ module.exports = { const name = stripControlCharacters(file.name) if (long) { - print(`${file.mode ? formatMode(file.mode, file.type === 'directory') : ''}\t${file.mtime ? formatMtime(file.mtime) : ''}\t${name}\t${file.cid.toString(cidBase)}\t${file.size}`) + print(`${file.mode ? formatMode(file.mode, file.type === 'directory') : ''}\t${file.mtime ? formatMtime(file.mtime) : ''}\t${name}\t${file.cid.toString(base.encoder)}\t${file.size}`) } else { print(name) } diff --git a/packages/ipfs-cli/src/commands/files/mkdir.js b/packages/ipfs-cli/src/commands/files/mkdir.js index 278ff855a0..a8ab03c57a 100644 --- a/packages/ipfs-cli/src/commands/files/mkdir.js +++ b/packages/ipfs-cli/src/commands/files/mkdir.js @@ -72,8 +72,8 @@ module.exports = { * @param {import('../../types').Context} argv.ctx * @param {string} argv.path * @param {boolean} argv.parents - * @param {import('cids').CIDVersion} argv.cidVersion - * @param {import('multihashes').HashName} argv.hashAlg + * @param {import('multiformats/cid').CIDVersion} argv.cidVersion + * @param {string} argv.hashAlg * @param {boolean} argv.flush * @param {number} argv.shardSplitThreshold * @param {number} argv.mode diff --git a/packages/ipfs-cli/src/commands/files/mv.js b/packages/ipfs-cli/src/commands/files/mv.js index 510d89c031..3bf58e8a5c 100644 --- a/packages/ipfs-cli/src/commands/files/mv.js +++ b/packages/ipfs-cli/src/commands/files/mv.js @@ -54,8 +54,8 @@ module.exports = { * @param {string} argv.source * @param {string} argv.dest * @param {boolean} argv.parents - * @param {import('cids').CIDVersion} argv.cidVersion - * @param {import('multihashes').HashName} argv.hashAlg + * @param {import('multiformats/cid').CIDVersion} argv.cidVersion + * @param {string} argv.hashAlg * @param {boolean} argv.flush * @param {number} argv.shardSplitThreshold * @param {number} argv.timeout diff --git a/packages/ipfs-cli/src/commands/files/stat.js b/packages/ipfs-cli/src/commands/files/stat.js index 975da6240c..5598e6221a 100644 --- a/packages/ipfs-cli/src/commands/files/stat.js +++ b/packages/ipfs-cli/src/commands/files/stat.js @@ -47,7 +47,8 @@ Mtime: `, describe: 'Compute the amount of the dag that is local, and if possible the total size' }, 'cid-base': { - describe: 'CID base to use.' + describe: 'CID base to use.', + default: 'base58btc' }, timeout: { type: 'string', @@ -63,10 +64,10 @@ Mtime: `, * @param {boolean} argv.hash * @param {boolean} argv.size * @param {boolean} argv.withLocal - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ - handler ({ + async handler ({ ctx: { ipfs, print }, path, format, @@ -76,28 +77,28 @@ Mtime: `, cidBase, timeout }) { - return ipfs.files.stat(path, { + const stats = await ipfs.files.stat(path, { withLocal, timeout }) - .then((stats) => { - if (hash) { - return print(stats.cid.toString(cidBase)) - } + const base = await ipfs.bases.getBase(cidBase) - if (size) { - return print(`${stats.size}`) - } + if (hash) { + return print(stats.cid.toString(base.encoder)) + } + + if (size) { + return print(`${stats.size}`) + } - print(format - .replace('', stats.cid.toString(cidBase)) - .replace('', `${stats.size}`) - .replace('', `${stats.cumulativeSize}`) - .replace('', `${stats.blocks}`) - .replace('', stats.type) - .replace('', stats.mode ? formatMode(stats.mode, stats.type === 'directory') : '') - .replace('', stats.mtime ? formatMtime(stats.mtime) : '') - ) - }) + print(format + .replace('', stats.cid.toString(base.encoder)) + .replace('', `${stats.size}`) + .replace('', `${stats.cumulativeSize}`) + .replace('', `${stats.blocks}`) + .replace('', stats.type) + .replace('', stats.mode ? formatMode(stats.mode, stats.type === 'directory') : '') + .replace('', stats.mtime ? formatMtime(stats.mtime) : '') + ) } } diff --git a/packages/ipfs-cli/src/commands/files/touch.js b/packages/ipfs-cli/src/commands/files/touch.js index 0478b9dd3c..df63c46204 100644 --- a/packages/ipfs-cli/src/commands/files/touch.js +++ b/packages/ipfs-cli/src/commands/files/touch.js @@ -60,8 +60,8 @@ module.exports = { * @param {import('../../types').Context} argv.ctx * @param {string} argv.path * @param {boolean} argv.flush - * @param {import('cids').CIDVersion} argv.cidVersion - * @param {import('multihashes').HashName} argv.hashAlg + * @param {import('multiformats/cid').CIDVersion} argv.cidVersion + * @param {string} argv.hashAlg * @param {number} argv.shardSplitThreshold * @param {number} argv.mtime * @param {number} argv.mtimeNsecs diff --git a/packages/ipfs-cli/src/commands/files/write.js b/packages/ipfs-cli/src/commands/files/write.js index bbadaba144..e19c91374d 100644 --- a/packages/ipfs-cli/src/commands/files/write.js +++ b/packages/ipfs-cli/src/commands/files/write.js @@ -117,8 +117,8 @@ module.exports = { * @param {boolean} argv.truncate * @param {boolean} argv.rawLeaves * @param {boolean} argv.reduceSingleLeafToSelf - * @param {import('cids').CIDVersion} argv.cidVersion - * @param {import('multihashes').HashName} argv.hashAlg + * @param {import('multiformats/cid').CIDVersion} argv.cidVersion + * @param {string} argv.hashAlg * @param {boolean} argv.parents * @param {'trickle' | 'balanced'} argv.strategy * @param {boolean} argv.flush diff --git a/packages/ipfs-cli/src/commands/init.js b/packages/ipfs-cli/src/commands/init.js index f28340504b..f3544c67dc 100644 --- a/packages/ipfs-cli/src/commands/init.js +++ b/packages/ipfs-cli/src/commands/init.js @@ -83,11 +83,10 @@ module.exports = { // Required inline to reduce startup time const IPFS = require('ipfs-core') - const Repo = require('ipfs-repo') try { await IPFS.create({ - repo: new Repo(repoPath), + repo: repoPath, init: { algorithm: argv.algorithm, bits: argv.bits, diff --git a/packages/ipfs-cli/src/commands/ls.js b/packages/ipfs-cli/src/commands/ls.js index 1305ade633..268d658489 100644 --- a/packages/ipfs-cli/src/commands/ls.js +++ b/packages/ipfs-cli/src/commands/ls.js @@ -1,8 +1,6 @@ 'use strict' -const multibase = require('multibase') const { rightpad, stripControlCharacters } = require('../utils') -const { cidToString } = require('ipfs-core-utils/src/cid') const formatMode = require('ipfs-core-utils/src/files/format-mode') const formatMtime = require('ipfs-core-utils/src/files/format-mtime') const { default: parseDuration } = require('parse-duration') @@ -33,7 +31,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -47,7 +45,7 @@ module.exports = { * @param {string} argv.key * @param {boolean} argv.recursive * @param {boolean} argv.headers - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, key, recursive, headers, cidBase, timeout }) { @@ -99,10 +97,12 @@ module.exports = { ) } + const base = await ipfs.bases.getBase(cidBase) + for await (const link of ipfs.ls(key, { recursive, timeout })) { const mode = link.mode != null ? formatMode(link.mode, link.type === 'dir') : '' const mtime = link.mtime != null ? formatMtime(link.mtime) : '-' - const cid = cidToString(link.cid, { base: cidBase }) + const cid = link.cid.toString(base.encoder) const size = link.size ? String(link.size) : '-' const name = stripControlCharacters(link.type === 'dir' ? `${link.name || ''}/` : link.name) diff --git a/packages/ipfs-cli/src/commands/name/publish.js b/packages/ipfs-cli/src/commands/name/publish.js index 645bd963b2..15cede030f 100644 --- a/packages/ipfs-cli/src/commands/name/publish.js +++ b/packages/ipfs-cli/src/commands/name/publish.js @@ -46,7 +46,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.ipfsPath + * @param {import('multiformats/cid').CID} argv.ipfsPath * @param {boolean} argv.resolve * @param {string} argv.lifetime * @param {string} argv.key diff --git a/packages/ipfs-cli/src/commands/object/data.js b/packages/ipfs-cli/src/commands/object/data.js index f0e3334a1b..f6783159e9 100644 --- a/packages/ipfs-cli/src/commands/object/data.js +++ b/packages/ipfs-cli/src/commands/object/data.js @@ -22,7 +22,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, key, timeout }) { diff --git a/packages/ipfs-cli/src/commands/object/get.js b/packages/ipfs-cli/src/commands/object/get.js index c54a0edf04..5902c5e2f9 100644 --- a/packages/ipfs-cli/src/commands/object/get.js +++ b/packages/ipfs-cli/src/commands/object/get.js @@ -1,7 +1,6 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') +const dagPB = require('@ipld/dag-pb') const { default: parseDuration } = require('parse-duration') const uint8ArrayToString = require('uint8arrays/to-string') const { @@ -26,7 +25,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -37,15 +36,15 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {'base64' | 'text' | 'hex'} argv.dataEncoding - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, key, dataEncoding, cidBase, timeout }) { const node = await ipfs.object.get(key, { timeout }) - /** @type {import('multibase').BaseName | 'utf8' | 'utf-8' | 'ascii' | undefined} */ + /** @type {string | undefined} */ let encoding if (dataEncoding === 'base64') { @@ -60,15 +59,19 @@ module.exports = { encoding = 'base16' } + const buf = dagPB.encode(node) + const base = await ipfs.bases.getBase(cidBase) + const answer = { - Data: uint8ArrayToString(node.Data, encoding), - Hash: cidToString(key, { base: cidBase, upgrade: false }), - Size: node.size, + // @ts-ignore encoding type is wrong + Data: node.Data ? uint8ArrayToString(node.Data, encoding) : '', + Hash: key.toString(base.encoder), + Size: buf.length, Links: node.Links.map((l) => { return { Name: stripControlCharacters(l.Name), Size: l.Tsize, - Hash: cidToString(l.Hash, { base: cidBase, upgrade: false }) + Hash: l.Hash.toString(base.encoder) } }) } diff --git a/packages/ipfs-cli/src/commands/object/links.js b/packages/ipfs-cli/src/commands/object/links.js index cd1adbdc15..fb02cfdc20 100644 --- a/packages/ipfs-cli/src/commands/object/links.js +++ b/packages/ipfs-cli/src/commands/object/links.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { stripControlCharacters, @@ -21,7 +19,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -32,15 +30,16 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key - * @param {import('multibase').BaseName} argv.cidBase + * @param {import('multiformats/cid').CID} argv.key + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, key, cidBase, timeout }) { const links = await ipfs.object.links(key, { timeout }) + const base = await ipfs.bases.getBase(cidBase) links.forEach((link) => { - const cidStr = cidToString(link.Hash, { base: cidBase, upgrade: false }) + const cidStr = link.Hash.toString(base.encoder) print(`${cidStr} ${link.Tsize} ${stripControlCharacters(link.Name)}`) }) } diff --git a/packages/ipfs-cli/src/commands/object/new.js b/packages/ipfs-cli/src/commands/object/new.js index 3758cd5ff1..0c8d87f91a 100644 --- a/packages/ipfs-cli/src/commands/object/new.js +++ b/packages/ipfs-cli/src/commands/object/new.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') module.exports = { @@ -13,7 +11,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -25,7 +23,7 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {'unixfs-dir'} argv.template - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, template, cidBase, timeout }) { @@ -33,6 +31,7 @@ module.exports = { template, timeout }) - print(cidToString(cid, { base: cidBase, upgrade: false })) + const base = await ipfs.bases.getBase(cidBase) + print(cid.toString(base.encoder)) } } diff --git a/packages/ipfs-cli/src/commands/object/patch/add-link.js b/packages/ipfs-cli/src/commands/object/patch/add-link.js index 2c27d62d85..9176346dc2 100644 --- a/packages/ipfs-cli/src/commands/object/patch/add-link.js +++ b/packages/ipfs-cli/src/commands/object/patch/add-link.js @@ -1,9 +1,6 @@ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGLink = dagPB.DAGLink -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') +const dagPB = require('@ipld/dag-pb') const { default: parseDuration } = require('parse-duration') const { coerceCID } = require('../../../utils') @@ -24,12 +21,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) - }, - 'cid-version': { - describe: 'The CID version of the DAGNode to link to', - type: 'number', - default: 0 + default: 'base58btc' }, timeout: { type: 'string', @@ -40,20 +32,21 @@ module.exports = { /** * @param {object} argv * @param {import('../../../types').Context} argv.ctx - * @param {import('cids')} argv.root + * @param {import('multiformats/cid').CID} argv.root * @param {string} argv.name - * @param {import('cids')} argv.ref - * @param {import('multibase').BaseName} argv.cidBase - * @param {import('cids').CIDVersion} argv.cidVersion + * @param {import('multiformats/cid').CID} argv.ref + * @param {string} argv.cidBase * @param {number} argv.timeout */ - async handler ({ ctx: { ipfs, print }, root, name, ref, cidBase, cidVersion, timeout }) { + async handler ({ ctx: { ipfs, print }, root, name, ref, cidBase, timeout }) { const nodeA = await ipfs.object.get(ref, { timeout }) - const result = await dagPB.util.cid(dagPB.util.serialize(nodeA), { - cidVersion - }) - const link = new DAGLink(name, nodeA.size, result) - const cid = await ipfs.object.patch.addLink(root, link, { timeout }) - print(cidToString(cid, { base: cidBase, upgrade: false })) + const block = dagPB.encode(nodeA) + const cid = await ipfs.object.patch.addLink(root, { + Name: name, + Hash: ref, + Tsize: block.length + }, { timeout }) + const base = await ipfs.bases.getBase(cidBase) + print(cid.toString(base.encoder)) } } diff --git a/packages/ipfs-cli/src/commands/object/patch/append-data.js b/packages/ipfs-cli/src/commands/object/patch/append-data.js index b5063c5891..4da21c79d3 100644 --- a/packages/ipfs-cli/src/commands/object/patch/append-data.js +++ b/packages/ipfs-cli/src/commands/object/patch/append-data.js @@ -2,8 +2,6 @@ const concat = require('it-concat') const fs = require('fs') -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { coerceCID } = require('../../../utils') @@ -20,7 +18,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -31,9 +29,9 @@ module.exports = { /** * @param {object} argv * @param {import('../../../types').Context} argv.ctx - * @param {import('cids')} argv.root + * @param {import('multiformats/cid').CID} argv.root * @param {string} argv.data - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print, getStdin }, root, data, cidBase, timeout }) { @@ -48,7 +46,8 @@ module.exports = { const cid = await ipfs.object.patch.appendData(root, buf, { timeout }) + const base = await ipfs.bases.getBase(cidBase) - print(cidToString(cid, { base: cidBase, upgrade: false })) + print(cid.toString(base.encoder)) } } diff --git a/packages/ipfs-cli/src/commands/object/patch/rm-link.js b/packages/ipfs-cli/src/commands/object/patch/rm-link.js index 7ec1137cc0..0fddde0863 100644 --- a/packages/ipfs-cli/src/commands/object/patch/rm-link.js +++ b/packages/ipfs-cli/src/commands/object/patch/rm-link.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { coerceCID } = require('../../../utils') @@ -18,7 +16,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -29,16 +27,17 @@ module.exports = { /** * @param {object} argv * @param {import('../../../types').Context} argv.ctx - * @param {import('cids')} argv.root + * @param {import('multiformats/cid').CID} argv.root * @param {string} argv.link - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, root, link, cidBase, timeout }) { const cid = await ipfs.object.patch.rmLink(root, link, { timeout }) + const base = await ipfs.bases.getBase(cidBase) - print(cidToString(cid, { base: cidBase, upgrade: false })) + print(cid.toString(base.encoder)) } } diff --git a/packages/ipfs-cli/src/commands/object/patch/set-data.js b/packages/ipfs-cli/src/commands/object/patch/set-data.js index 453bc5547f..cf7faea973 100644 --- a/packages/ipfs-cli/src/commands/object/patch/set-data.js +++ b/packages/ipfs-cli/src/commands/object/patch/set-data.js @@ -2,8 +2,6 @@ const fs = require('fs') const concat = require('it-concat') -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { coerceCID } = require('../../../utils') @@ -20,7 +18,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -31,9 +29,9 @@ module.exports = { /** * @param {object} argv * @param {import('../../../types').Context} argv.ctx - * @param {import('cids')} argv.root + * @param {import('multiformats/cid').CID} argv.root * @param {string} argv.data - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print, getStdin }, root, data, cidBase, timeout }) { @@ -49,6 +47,8 @@ module.exports = { timeout }) - print(cidToString(cid, { base: cidBase, upgrade: false })) + const base = await ipfs.bases.getBase(cidBase) + + print(cid.toString(base.encoder)) } } diff --git a/packages/ipfs-cli/src/commands/object/put.js b/packages/ipfs-cli/src/commands/object/put.js index b9d6e2a46b..a9016dbbb0 100644 --- a/packages/ipfs-cli/src/commands/object/put.js +++ b/packages/ipfs-cli/src/commands/object/put.js @@ -1,10 +1,10 @@ 'use strict' -const fs = require('fs') const concat = require('it-concat') -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') +const dagPB = require('@ipld/dag-pb') const { default: parseDuration } = require('parse-duration') +const uint8arrayToString = require('uint8arrays/to-string') +const uint8arrayFromString = require('uint8arrays/from-string') module.exports = { command: 'put [data]', @@ -14,12 +14,13 @@ module.exports = { builder: { 'input-enc': { type: 'string', + choices: ['json', 'protobuf'], default: 'json' }, 'cid-base': { - describe: 'Number base to display CIDs in. Note: specifying a CID base for v0 CIDs will have no effect.', + describe: 'Number base to display CIDs in', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -31,20 +32,30 @@ module.exports = { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.data - * @param {import('ipfs-core-types/src/object').PutEncoding} argv.inputEnc - * @param {import('multibase').BaseName} argv.cidBase + * @param {'json' | 'protobuf'} argv.inputEnc + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print, getStdin }, data, inputEnc, cidBase, timeout }) { let buf if (data) { - buf = fs.readFileSync(data) + buf = uint8arrayFromString(data) } else { buf = (await concat(getStdin(), { type: 'buffer' })).slice() } - const cid = await ipfs.object.put(buf, { enc: inputEnc, timeout }) - print(`added ${cidToString(cid, { base: cidBase, upgrade: false })}`) + let node + + if (inputEnc === 'protobuf') { + node = dagPB.decode(buf) + } else { + node = JSON.parse(uint8arrayToString(buf)) + } + + const base = await ipfs.bases.getBase(cidBase) + + const cid = await ipfs.object.put(node, { timeout }) + print(`added ${cid.toString(base.encoder)}`) } } diff --git a/packages/ipfs-cli/src/commands/object/stat.js b/packages/ipfs-cli/src/commands/object/stat.js index 8c061d521e..077a915469 100644 --- a/packages/ipfs-cli/src/commands/object/stat.js +++ b/packages/ipfs-cli/src/commands/object/stat.js @@ -22,7 +22,7 @@ module.exports = { /** * @param {object} argv * @param {import('../../types').Context} argv.ctx - * @param {import('cids')} argv.key + * @param {import('multiformats/cid').CID} argv.key * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, key, timeout }) { diff --git a/packages/ipfs-cli/src/commands/pin/add.js b/packages/ipfs-cli/src/commands/pin/add.js index 68f1bac588..dc07981176 100644 --- a/packages/ipfs-cli/src/commands/pin/add.js +++ b/packages/ipfs-cli/src/commands/pin/add.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') module.exports = { @@ -19,7 +17,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -61,7 +59,7 @@ module.exports = { * @param {import('../../types').Context} argv.ctx * @param {string[]} argv.ipfsPath * @param {boolean} argv.recursive - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout * @param {Record} argv.metadata * @param {Record} argv.metadataJson @@ -69,13 +67,14 @@ module.exports = { async handler ({ ctx, ipfsPath, recursive, cidBase, timeout, metadata, metadataJson }) { const { ipfs, print } = ctx const type = recursive ? 'recursive' : 'direct' + const base = await ipfs.bases.getBase(cidBase) if (metadataJson) { metadata = metadataJson } for await (const res of ipfs.pin.addAll(ipfsPath.map(path => ({ path, recursive, metadata })), { timeout })) { - print(`pinned ${cidToString(res, { base: cidBase })} ${type}ly`) + print(`pinned ${res.toString(base.encoder)} ${type}ly`) } } } diff --git a/packages/ipfs-cli/src/commands/pin/ls.js b/packages/ipfs-cli/src/commands/pin/ls.js index e98fef0e84..666f1bf09e 100644 --- a/packages/ipfs-cli/src/commands/pin/ls.js +++ b/packages/ipfs-cli/src/commands/pin/ls.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') const { makeEntriesPrintable @@ -30,7 +28,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -44,20 +42,21 @@ module.exports = { * @param {string[]} argv.ipfsPath * @param {'direct' | 'indirect' | 'recursive' | 'all'} argv.type * @param {boolean} argv.quiet - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { ipfs, print }, ipfsPath, type, quiet, cidBase, timeout }) { + const base = await ipfs.bases.getBase(cidBase) /** * @param {import('ipfs-core-types/src/pin').LsResult} res */ const printPin = res => { - let line = cidToString(res.cid, { base: cidBase }) + let line = res.cid.toString(base.encoder) if (!quiet) { line += ` ${res.type}` if (res.metadata) { - line += ` ${JSON.stringify(makeEntriesPrintable(res.metadata))}` + line += ` ${JSON.stringify(makeEntriesPrintable(res.metadata, base))}` } } print(line) diff --git a/packages/ipfs-cli/src/commands/pin/rm.js b/packages/ipfs-cli/src/commands/pin/rm.js index 729ed8d0f7..5a54f47b64 100644 --- a/packages/ipfs-cli/src/commands/pin/rm.js +++ b/packages/ipfs-cli/src/commands/pin/rm.js @@ -1,7 +1,5 @@ 'use strict' -const multibase = require('multibase') -const { cidToString } = require('ipfs-core-utils/src/cid') const { default: parseDuration } = require('parse-duration') module.exports = { @@ -19,7 +17,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -32,14 +30,15 @@ module.exports = { * @param {import('../../types').Context} argv.ctx * @param {string[]} argv.ipfsPath * @param {boolean} argv.recursive - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx, ipfsPath, timeout, recursive, cidBase }) { const { ipfs, print } = ctx + const base = await ipfs.bases.getBase(cidBase) for await (const res of ipfs.pin.rmAll(ipfsPath.map(path => ({ path, recursive })), { timeout })) { - print(`unpinned ${cidToString(res, { base: cidBase })}`) + print(`unpinned ${res.toString(base.encoder)}`) } } } diff --git a/packages/ipfs-cli/src/commands/refs-local.js b/packages/ipfs-cli/src/commands/refs-local.js index 5a7ff88397..4eb75a209a 100644 --- a/packages/ipfs-cli/src/commands/refs-local.js +++ b/packages/ipfs-cli/src/commands/refs-local.js @@ -2,7 +2,7 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { default: parseDuration } = require('parse-duration') -const multibase = require('multibase') +const { base32 } = require('multiformats/bases/base32') module.exports = { command: 'refs-local', @@ -37,7 +37,7 @@ module.exports = { print(err.toString(), true, true) } else { if (multihash) { - print(multibase.encoding('base32upper').encode(uint8ArrayFromString(ref))) + print(base32.encode(uint8ArrayFromString(ref)).toUpperCase()) } else { print(ref) } diff --git a/packages/ipfs-cli/src/commands/resolve.js b/packages/ipfs-cli/src/commands/resolve.js index 193e9f45fa..da40571c03 100644 --- a/packages/ipfs-cli/src/commands/resolve.js +++ b/packages/ipfs-cli/src/commands/resolve.js @@ -1,6 +1,5 @@ 'use strict' -const multibase = require('multibase') const { default: parseDuration } = require('parse-duration') const { stripControlCharacters @@ -20,7 +19,7 @@ module.exports = { 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', - choices: Object.keys(multibase.names) + default: 'base58btc' }, timeout: { type: 'string', @@ -33,7 +32,7 @@ module.exports = { * @param {import('../types').Context} argv.ctx * @param {string} argv.name * @param {boolean} argv.recursive - * @param {import('multibase').BaseName} argv.cidBase + * @param {string} argv.cidBase * @param {number} argv.timeout */ async handler ({ ctx: { print, ipfs }, name, recursive, cidBase, timeout }) { diff --git a/packages/ipfs-cli/src/types.d.ts b/packages/ipfs-cli/src/types.d.ts index 6f4130de13..5552d1121c 100644 --- a/packages/ipfs-cli/src/types.d.ts +++ b/packages/ipfs-cli/src/types.d.ts @@ -22,4 +22,4 @@ export interface Print { error: (msg: string, includeNewline?: boolean) => void isTTY: boolean columns: any -} \ No newline at end of file +} diff --git a/packages/ipfs-cli/src/utils.js b/packages/ipfs-cli/src/utils.js index 74702bd2a2..5887314aa0 100644 --- a/packages/ipfs-cli/src/utils.js +++ b/packages/ipfs-cli/src/utils.js @@ -8,9 +8,8 @@ const Progress = require('progress') // @ts-ignore no types const byteman = require('byteman') const IPFS = require('ipfs-core') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { Multiaddr } = require('multiaddr') -const { cidToString } = require('ipfs-core-utils/src/cid') const uint8ArrayFromString = require('uint8arrays/from-string') const getRepoPath = () => { @@ -229,10 +228,10 @@ const coerceCID = (value) => { } if (value.startsWith('/ipfs/')) { - return new CID(value.split('/')[2]) + return CID.parse(value.split('/')[2]) } - return new CID(value) + return CID.parse(value) } /** @@ -335,12 +334,12 @@ const escapeControlCharacters = (str) => { * CID properties * * @param {any} obj - all keys/values in this object will be have control characters stripped - * @param {import('cids').BaseNameOrCode} cidBase - any encountered CIDs will be stringified using this base + * @param {import('multiformats/bases/interface').MultibaseCodec} cidBase - any encountered CIDs will be stringified using this base * @returns {any} */ -const makeEntriesPrintable = (obj, cidBase = 'base58btc') => { - if (CID.isCID(obj)) { - return { '/': cidToString(obj, { base: cidBase }) } +const makeEntriesPrintable = (obj, cidBase) => { + if (obj instanceof CID) { + return { '/': obj.toString(cidBase.encoder) } } if (typeof obj === 'string') { diff --git a/packages/ipfs-cli/test/add.js b/packages/ipfs-cli/test/add.js index 9e9828bc52..97897c0db2 100644 --- a/packages/ipfs-cli/test/add.js +++ b/packages/ipfs-cli/test/add.js @@ -2,7 +2,9 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const first = require('it-first') const cli = require('./utils/cli') const sinon = require('sinon') @@ -46,29 +48,34 @@ describe('add', () => { beforeEach(() => { ipfs = { - addAll: sinon.stub() + addAll: sinon.stub(), + bases: { + getBase: sinon.stub() + } } }) it('should add a file', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ - cid: new CID(cid), + cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --progress false README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) }) it('should strip control characters from paths when add a file', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ - cid: new CID(cid), + cid, path: 'R\b\n\tEADME.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --progress false README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) @@ -78,29 +85,31 @@ describe('add', () => { const cid = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ - cid: new CID(cid), + cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) }) it('add multiple', async () => { - const cid1 = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' - const cid2 = 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o' + const cid1 = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid2 = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, progress: sinon.match.func, wrapWithDirectory: true }).returns([{ - cid: new CID(cid1), + cid: cid1, path: 'README.md' }, { - cid: new CID(cid2), + cid: cid2, path: 'package.json' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add README.md package.json --wrap-with-directory', { ipfs }) expect(out).to.include(`added ${cid1} README.md\n`) @@ -108,7 +117,7 @@ describe('add', () => { }) it('add with cid-version=1', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -118,13 +127,14 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add README.md --cid-version=1', { ipfs }) - expect(out).to.equal(`added ${cid} README.md\n`) + expect(out).to.equal(`added ${cid.toString(base58btc)} README.md\n`) }) it('add with cid-version=1 and raw-leaves=false', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -134,13 +144,14 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add README.md --cid-version=1 --raw-leaves=false', { ipfs }) - expect(out).to.equal(`added ${cid} README.md\n`) + expect(out).to.equal(`added ${cid.toString(base58btc)} README.md\n`) }) it('add with cid-version=1 and raw-leaves=true', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -150,13 +161,14 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add README.md --cid-version=1 --raw-leaves=true', { ipfs }) - expect(out).to.equal(`added ${cid} README.md\n`) + expect(out).to.equal(`added ${cid.toString(base58btc)} README.md\n`) }) it('add from pipe', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(sinon.match([{ content: matchIterable(), @@ -166,6 +178,7 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const proc = cli('add', { ipfs, @@ -179,7 +192,7 @@ describe('add', () => { }) it('add from pipe with mtime=100', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(sinon.match([{ content: matchIterable(), @@ -189,6 +202,7 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const proc = cli('add --mtime=100', { ipfs, @@ -202,67 +216,72 @@ describe('add', () => { }) it('add --quiet', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --quiet README.md', { ipfs }) expect(out).to.equal(`${cid}\n`) }) it('add --quiet (short option)', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add -q README.md', { ipfs }) expect(out).to.equal(`${cid}\n`) }) it('add --quieter', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --quieter README.md', { ipfs }) expect(out).to.equal(`${cid}\n`) }) it('add --quieter (short option)', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add -Q README.md', { ipfs }) expect(out).to.equal(`${cid}\n`) }) it('add --silent', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --silent README.md', { ipfs }) expect(out).to.be.empty() }) it('add --only-hash outputs correct hash', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -271,13 +290,14 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --only-hash README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) }) it('add does not pin with --pin=false', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -286,18 +306,20 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --pin false README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) }) it('add with mtime', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --mtime 5 README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) @@ -308,12 +330,13 @@ describe('add', () => { }) it('add with mtime-nsecs', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --mtime 5 --mtime-nsecs 100 README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) @@ -325,12 +348,13 @@ describe('add', () => { }) it('add with mode', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add --mode 0655 README.md', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) @@ -342,7 +366,7 @@ describe('add', () => { HASH_ALGS.forEach((name) => { it(`add with hash=${name} and raw-leaves=false`, async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -352,6 +376,7 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`add README.md --hash=${name} --raw-leaves=false`, { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) @@ -359,19 +384,24 @@ describe('add', () => { }) it('should add and print CID encoded in specified base', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB').toV1() - ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ + ipfs.addAll.withArgs(matchIterable(), { + ...defaultOptions, + rawLeaves: true, + cidVersion: 1 + }).returns([{ cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base64').returns(base64) - const out = await cli('add --cid-base=base64 README.md', { ipfs }) - expect(out).to.equal(`added ${cid.toV1().toString('base64')} README.md\n`) + const out = await cli('add --cid-base=base64 --cid-version=1 README.md', { ipfs }) + expect(out).to.equal(`added ${cid.toString(base64)} README.md\n`) }) it('should add with a timeout', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') ipfs.addAll.withArgs(matchIterable(), { ...defaultOptions, @@ -380,6 +410,7 @@ describe('add', () => { cid, path: 'README.md' }]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('add README.md --timeout=1s', { ipfs }) expect(out).to.equal(`added ${cid} README.md\n`) diff --git a/packages/ipfs-cli/test/bitswap.js b/packages/ipfs-cli/test/bitswap.js index 9d17519f3a..0bbe7f2241 100644 --- a/packages/ipfs-cli/test/bitswap.js +++ b/packages/ipfs-cli/test/bitswap.js @@ -2,14 +2,16 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const cli = require('./utils/cli') const sinon = require('sinon') describe('bitswap', () => { const peerId = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNA' - const key0 = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' - const key1 = 'zb2rhafnd6kEUujnoMkozHnWXY7XpWttyVDWKXfChqA42VTDU' + const key0 = CID.parse('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') + const key1 = CID.parse('zb2rhafnd6kEUujnoMkozHnWXY7XpWttyVDWKXfChqA42VTDU') let ipfs @@ -20,6 +22,9 @@ describe('bitswap', () => { wantlistForPeer: sinon.stub(), stat: sinon.stub(), unwant: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -31,25 +36,28 @@ describe('bitswap', () => { it('should return the wantlist', async () => { ipfs.bitswap.wantlist.withArgs(defaultOptions).resolves([ - new CID(key0), - new CID(key1) + key0, + key1 ]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('bitswap wantlist', { ipfs }) - expect(out).to.include(key0) - expect(out).to.include(key1) + expect(out).to.include(key0.toString(base58btc)) + expect(out).to.include(key1.toString(base58btc)) }) it('should get wantlist with CIDs encoded in specified base', async () => { ipfs.bitswap.wantlist.withArgs({ ...defaultOptions }).resolves([ - new CID(key0), - new CID(key1) + key0.toV1(), + key1.toV1() ]) + ipfs.bases.getBase.withArgs('base64').returns(base64) const out = await cli('bitswap wantlist --cid-base=base64', { ipfs }) - expect(out).to.include(new CID(key1).toBaseEncodedString('base64') + '\n') + expect(out).to.include(key0.toV1().toString(base64) + '\n') + expect(out).to.include(key1.toV1().toString(base64) + '\n') }) it('wantlist peerid', async () => { @@ -95,11 +103,12 @@ describe('bitswap', () => { dupDataReceived: BigInt(10), dataSent: BigInt(10), wantlist: [ - new CID(key0), - new CID(key1) + key0, + key1 ], peers: [] }) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('bitswap stat', { ipfs }) @@ -112,8 +121,8 @@ describe('bitswap', () => { expect(out).to.match(/dup blocks received:\s\d+$/m) expect(out).to.match(/dup data received:\s\d+$/m) expect(out).to.match(/wantlist\s\[\d+\skeys\]$/m) - expect(out).to.include(key0) - expect(out).to.include(key1) + expect(out).to.include(key0.toString(base58btc)) + expect(out).to.include(key1.toString(base58btc)) expect(out).to.match(/partners\s\[\d+\]$/m) }) @@ -127,11 +136,12 @@ describe('bitswap', () => { dupDataReceived: BigInt(10), dataSent: BigInt(10), wantlist: [ - new CID(key0), - new CID(key1) + key0, + key1 ], peers: [] }) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('bitswap stat --human', { ipfs }) @@ -144,8 +154,6 @@ describe('bitswap', () => { expect(out).to.match(/dup blocks received:\s\d+$/m) expect(out).to.match(/dup data received:\s+[\d.]+\s[PTGMK]?B$/m) expect(out).to.match(/wantlist\s\[\d+\skeys\]$/m) - expect(out).to.not.include(key0) - expect(out).to.not.include(key1) expect(out).to.match(/partners\s\[\d+\]$/m) }) @@ -159,14 +167,15 @@ describe('bitswap', () => { dupDataReceived: BigInt(10), dataSent: BigInt(10), wantlist: [ - new CID(key0), - new CID(key1) + key0.toV1(), + key1.toV1() ], peers: [] }) + ipfs.bases.getBase.withArgs('base64').returns(base64) const out = await cli('bitswap stat --cid-base=base64', { ipfs }) - expect(out).to.include(new CID(key1).toBaseEncodedString('base64')) + expect(out).to.include(key1.toV1().toString(base64)) }) it('should return bitswap stats with a timeout', async () => { @@ -182,11 +191,12 @@ describe('bitswap', () => { dupDataReceived: BigInt(10), dataSent: BigInt(10), wantlist: [ - new CID(key0), - new CID(key1) + key0, + key1 ], peers: [] }) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('bitswap stat --timeout=1s', { ipfs }) @@ -199,8 +209,8 @@ describe('bitswap', () => { expect(out).to.match(/dup blocks received:\s\d+$/m) expect(out).to.match(/dup data received:\s\d+$/m) expect(out).to.match(/wantlist\s\[\d+\skeys\]$/m) - expect(out).to.include(key0) - expect(out).to.include(key1) + expect(out).to.include(key0.toString(base58btc)) + expect(out).to.include(key1.toString(base58btc)) expect(out).to.match(/partners\s\[\d+\]$/m) }) }) @@ -211,16 +221,20 @@ describe('bitswap', () => { } it('should unwant a block', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('bitswap unwant ' + key0, { ipfs }) expect(out).to.eql(`Key ${key0} removed from wantlist\n`) expect(ipfs.bitswap.unwant.called).to.be.true() }) it('should unwant a block with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli(`bitswap unwant ${key0} --timeout=1s`, { ipfs }) expect(out).to.eql(`Key ${key0} removed from wantlist\n`) expect(ipfs.bitswap.unwant.called).to.be.true() - expect(ipfs.bitswap.unwant.getCall(0).args).to.deep.equal([new CID(key0), { + expect(ipfs.bitswap.unwant.getCall(0).args).to.deep.equal([key0, { ...defaultOptions, timeout: 1000 }]) diff --git a/packages/ipfs-cli/test/block.js b/packages/ipfs-cli/test/block.js index b03433eeb5..015b9b670b 100644 --- a/packages/ipfs-cli/test/block.js +++ b/packages/ipfs-cli/test/block.js @@ -2,13 +2,15 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const cli = require('./utils/cli') const sinon = require('sinon') const uint8ArrayFromString = require('uint8arrays/from-string') describe('block', () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') let ipfs beforeEach(() => { @@ -18,6 +20,9 @@ describe('block', () => { put: sinon.stub(), rm: sinon.stub(), stat: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -26,16 +31,14 @@ describe('block', () => { const defaultOptions = { format: 'dag-pb', mhtype: 'sha2-256', - mhlen: undefined, version: 0, pin: false, timeout: undefined } it('should put a file', async () => { - ipfs.block.put.withArgs(sinon.match.any, defaultOptions).resolves({ - cid: new CID(cid) - }) + ipfs.block.put.withArgs(sinon.match.any, defaultOptions).resolves(cid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('block put README.md', { ipfs }) expect(out).to.eql(`${cid}\n`) @@ -46,30 +49,27 @@ describe('block', () => { ...defaultOptions, format: 'eth-block', mhtype: 'keccak-256' - }).resolves({ - cid: new CID(cid) - }) + }).resolves(cid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('block put --format eth-block --mhtype keccak-256 README.md', { ipfs }) expect(out).to.eql(`${cid}\n`) }) it('should put and print CID encoded in specified base', async () => { - ipfs.block.put.withArgs(sinon.match.any, defaultOptions).resolves({ - cid: new CID(cid) - }) + ipfs.block.put.withArgs(sinon.match.any, defaultOptions).resolves(cid.toV1()) + ipfs.bases.getBase.withArgs('base64').returns(base64) const out = await cli('block put README.md --cid-base=base64', { ipfs }) - expect(out).to.eql(`${cid.toV1().toString('base64')}\n`) + expect(out).to.eql(`${cid.toV1().toString(base64)}\n`) }) it('should put and pin the block', async () => { ipfs.block.put.withArgs(sinon.match.any, { ...defaultOptions, pin: true - }).resolves({ - cid: new CID(cid) - }) + }).resolves(cid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('block put README.md --pin', { ipfs }) expect(out).to.eql(`${cid}\n`) @@ -79,9 +79,8 @@ describe('block', () => { ipfs.block.put.withArgs(sinon.match.any, { ...defaultOptions, timeout: 1000 - }).resolves({ - cid: new CID(cid) - }) + }).resolves(cid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('block put --timeout=1s README.md', { ipfs }) expect(out).to.eql(`${cid}\n`) @@ -94,10 +93,10 @@ describe('block', () => { } it('should get a block', async () => { - ipfs.block.get.withArgs(cid, defaultOptions).resolves({ - cid, - data: uint8ArrayFromString('hello world\n') - }) + ipfs.block.get.withArgs(cid, defaultOptions).resolves( + uint8ArrayFromString('hello world\n') + ) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`block get ${cid}`, { ipfs }) expect(out).to.eql('hello world\n') @@ -112,10 +111,10 @@ describe('block', () => { ipfs.block.get.withArgs(cid, { ...defaultOptions, timeout: 1000 - }).resolves({ - cid, - data: uint8ArrayFromString('hello world\n') - }) + }).resolves( + uint8ArrayFromString('hello world\n') + ) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`block get ${cid} --timeout=1s`, { ipfs }) expect(out).to.eql('hello world\n') @@ -132,6 +131,7 @@ describe('block', () => { cid, size: 12 }) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`block stat ${cid}`, { ipfs }) expect(out).to.eql([ @@ -141,11 +141,12 @@ describe('block', () => { }) it('should stat and print CID encoded in specified base', async () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp').toV1() ipfs.block.stat.withArgs(cid, defaultOptions).resolves({ cid, size: 12 }) + ipfs.bases.getBase.withArgs('base64').returns(base64) const out = await cli(`block stat ${cid} --cid-base=base64`, { ipfs }) expect(out).to.eql([ @@ -162,6 +163,7 @@ describe('block', () => { cid, size: 12 }) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`block stat ${cid} --timeout=1s`, { ipfs }) expect(out).to.eql([ @@ -179,7 +181,7 @@ describe('block', () => { } it('should remove a block', async () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') ipfs.block.rm.withArgs([cid], defaultOptions).returns([{ cid, error: false @@ -191,7 +193,7 @@ describe('block', () => { it('rm prints error when removing fails', async () => { const err = new Error('Yikes!') - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') ipfs.block.rm.withArgs([cid], defaultOptions).returns([{ cid, error: err @@ -202,7 +204,7 @@ describe('block', () => { }) it('rm quietly', async () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') ipfs.block.rm.withArgs([cid], { ...defaultOptions, quiet: true @@ -216,7 +218,7 @@ describe('block', () => { }) it('rm force', async () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kh') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kh') ipfs.block.rm.withArgs([cid], { ...defaultOptions, force: true @@ -230,7 +232,7 @@ describe('block', () => { }) it('fails to remove non-existent block', async () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kh') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kh') ipfs.block.rm.withArgs([cid]).returns([{ cid, error: new Error('block not found') diff --git a/packages/ipfs-cli/test/cat.js b/packages/ipfs-cli/test/cat.js index 29e61be0bf..67692c3107 100644 --- a/packages/ipfs-cli/test/cat.js +++ b/packages/ipfs-cli/test/cat.js @@ -2,7 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') const cli = require('./utils/cli') const sinon = require('sinon') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -23,7 +23,7 @@ describe('cat', () => { }) it('should cat a file', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') const buf = uint8ArrayFromString('hello world') ipfs.cat.withArgs(cid.toString(), defaultOptions).returns([buf]) @@ -33,7 +33,7 @@ describe('cat', () => { }) it('cat part of a file using `count`', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') const buf = uint8ArrayFromString('hello world') ipfs.cat.withArgs(cid.toString(), { @@ -47,7 +47,7 @@ describe('cat', () => { }) it('cat part of a file using `length`', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') const buf = uint8ArrayFromString('hello world') ipfs.cat.withArgs(cid.toString(), { @@ -71,7 +71,7 @@ describe('cat', () => { }) it('should cat a file with a timeout', async () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') const buf = uint8ArrayFromString('hello world') ipfs.cat.withArgs(cid.toString(), { diff --git a/packages/ipfs-cli/test/cid.js b/packages/ipfs-cli/test/cid.js new file mode 100644 index 0000000000..b3ce784d33 --- /dev/null +++ b/packages/ipfs-cli/test/cid.js @@ -0,0 +1,304 @@ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const cli = require('./utils/cli') +const sinon = require('sinon') +const { base32 } = require('multiformats/bases/base32') +const { base58btc } = require('multiformats/bases/base58') +const raw = require('multiformats/codecs/raw') +const { sha256 } = require('multiformats/hashes/sha2') +const dagPb = require('@ipld/dag-pb') +const uint8ArrayFromString = require('uint8arrays/from-string') + +describe('cid', () => { + let ipfs + + beforeEach(() => { + ipfs = { + bases: { + listBases: sinon.stub(), + getBase: sinon.stub() + }, + codecs: { + listCodecs: sinon.stub(), + getCodec: sinon.stub() + }, + hashers: { + listHashers: sinon.stub(), + getHasher: sinon.stub() + } + } + }) + + describe('base32', () => { + it('should convert a cid to base32', async () => { + const out = await cli('cid base32 QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354') + }) + + it('should convert a cid to base32 from stdin', async () => { + const out = await cli('cid base32', { + ipfs, + getStdin: function * () { + yield uint8ArrayFromString('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn\n') + } + }) + expect(out.trim()).to.equal('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354') + }) + }) + + describe('bases', () => { + it('should list bases', async () => { + ipfs.bases.listBases.returns([base32]) + + const out = await cli('cid bases', { ipfs }) + expect(out.trim()).to.equal('base32') + }) + + it('should list bases with prefixes', async () => { + ipfs.bases.listBases.returns([base32]) + + const out = await cli('cid bases --prefix', { ipfs }) + expect(out.trim()).to.equal('b\tbase32') + }) + + it('should list bases with numeric code', async () => { + ipfs.bases.listBases.returns([base32]) + + const out = await cli('cid bases --numeric', { ipfs }) + expect(out.trim()).to.equal('98\tbase32') + }) + + it('should list bases with numeric code and prefix', async () => { + ipfs.bases.listBases.returns([base32]) + + const out = await cli('cid bases --numeric --prefix', { ipfs }) + expect(out.trim()).to.equal('b\t98\tbase32') + }) + }) + + describe('codecs', () => { + it('should list codecs', async () => { + ipfs.codecs.listCodecs.returns([raw]) + + const out = await cli('cid codecs', { ipfs }) + expect(out.trim()).to.equal('raw') + }) + + it('should list codecs with numeric code', async () => { + ipfs.codecs.listCodecs.returns([raw]) + + const out = await cli('cid codecs --numeric', { ipfs }) + expect(out.trim()).to.equal('85\traw') + }) + }) + + describe('format', () => { + it('should format cid', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + }) + + it('should format base name', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%b" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('base58btc') + }) + + it('should format base prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%B" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('z') + }) + + it('should format version string', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%v" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('cidv0') + }) + + it('should format version number', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%V" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('0') + }) + + it('should format codec name', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([dagPb]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%c" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('dag-pb') + }) + + it('should format codec code', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%C" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('112') + }) + + it('should format multihash name', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%h" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('sha2-256') + }) + + it('should format multihash name', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%H" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('18') + }) + + it('should format multihash digest length', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%L" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('32') + }) + + it('should format multihash encoded in default base', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%m" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('zQmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + }) + + it('should format multihash encoded in base %b', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%m" -b base32 QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('bciqftfeehedf6klbt32bfaglxezl4uwfnwm4lftlmxqbcerz6cmlx3y') + }) + + // go-ipfs always converts to v1? + it.skip('should format multihash encoded in default base without multihash prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%M" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('bciqftfeehedf6klbt32bfaglxezl4uwfnwm4lftlmxqbcerz6cmlx3y') + }) + + it('should format multihash encoded in base %b without multihash prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%M" -b base32 QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('ciqftfeehedf6klbt32bfaglxezl4uwfnwm4lftlmxqbcerz6cmlx3y') + }) + + it('should format hash digest encoded in base %b with multihash prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%d" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('z72gdmFAgRzYHkJzKiL8MgMMRW3BTSCGyDHroPxJbxMJn') + }) + + it('should format hash digest encoded in base %b without multihash prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%D" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('72gdmFAgRzYHkJzKiL8MgMMRW3BTSCGyDHroPxJbxMJn') + }) + + it('should format cid in default base', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%s" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + }) + + it('should format cid in specified base', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%s" -b base32 QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('bciqftfeehedf6klbt32bfaglxezl4uwfnwm4lftlmxqbcerz6cmlx3y') + }) + + it('should format cid in default base without multibase prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%S" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + }) + + it('should format cid in specified base without multibase prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([raw]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%S" -b base32 QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('ciqftfeehedf6klbt32bfaglxezl4uwfnwm4lftlmxqbcerz6cmlx3y') + }) + + it('should format cid prefix', async () => { + ipfs.bases.listBases.returns([base32, base58btc]) + ipfs.codecs.listCodecs.returns([dagPb]) + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid format -f "%P" QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', { ipfs }) + expect(out.trim()).to.equal('cidv0-dag-pb-sha2-256-32') + }) + }) + + describe('hashes', () => { + it('should list hashers', async () => { + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid hashes', { ipfs }) + expect(out.trim()).to.equal('sha2-256') + }) + + it('should list hashers with numeric code', async () => { + ipfs.hashers.listHashers.returns([sha256]) + + const out = await cli('cid hashes --numeric', { ipfs }) + expect(out.trim()).to.equal('18\tsha2-256') + }) + }) +}) diff --git a/packages/ipfs-cli/test/commands.js b/packages/ipfs-cli/test/commands.js index 2c65b88861..0888528273 100644 --- a/packages/ipfs-cli/test/commands.js +++ b/packages/ipfs-cli/test/commands.js @@ -4,7 +4,7 @@ const { expect } = require('aegir/utils/chai') const cli = require('./utils/cli') -const commandCount = 110 +const commandCount = 115 describe('commands', () => { it('list the commands', async () => { diff --git a/packages/ipfs-cli/test/dag.js b/packages/ipfs-cli/test/dag.js index 041927c9e0..3388705167 100644 --- a/packages/ipfs-cli/test/dag.js +++ b/packages/ipfs-cli/test/dag.js @@ -3,17 +3,20 @@ const { expect } = require('aegir/utils/chai') const cli = require('./utils/cli') -const dagCBOR = require('ipld-dag-cbor') -const dagPB = require('ipld-dag-pb') +const dagCBOR = require('@ipld/dag-cbor') +const dagPB = require('@ipld/dag-pb') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const raw = require('multiformats/codecs/raw') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') describe('dag', () => { - const dagPbCid = new CID('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z') - const rawCid = new CID(1, 'raw', dagPbCid.multihash) - const dagCborCid = new CID(1, 'dag-cbor', dagPbCid.multihash) + const dagPbCid = CID.parse('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z') + const rawCid = CID.createV1(raw.code, dagPbCid.multihash) + const dagCborCid = CID.createV1(dagCBOR.code, dagPbCid.multihash) let ipfs beforeEach(() => { @@ -22,6 +25,9 @@ describe('dag', () => { get: sinon.stub(), resolve: sinon.stub(), put: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -39,6 +45,7 @@ describe('dag', () => { } ipfs.dag.get.withArgs(rawCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${rawCid} --data-enc base16`, { ipfs }) @@ -58,10 +65,11 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagPbCid}`, { ipfs }) - expect(out).to.equal(`{"data":"AAED","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString()}"}}]}\n`) + expect(out).to.equal(`{"data":"AAED","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString(base58btc)}"}}]}\n`) }) it('should get a dag-pb node and specify data encoding', async () => { @@ -77,10 +85,11 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagPbCid} --data-enc base16`, { ipfs }) - expect(out).to.equal(`{"data":"000103","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString()}"}}]}\n`) + expect(out).to.equal(`{"data":"000103","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString(base58btc)}"}}]}\n`) }) it('should get a dag-pb node and specify CID encoding', async () => { @@ -96,10 +105,11 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base64').returns(base64) - const out = await cli(`dag get ${dagPbCid} --cid-base base16`, { ipfs }) + const out = await cli(`dag get ${dagPbCid} --cid-base base64`, { ipfs }) - expect(out).to.equal(`{"data":"AAED","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString('base16')}"}}]}\n`) + expect(out).to.equal(`{"data":"AAED","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString(base64)}"}}]}\n`) }) it('should get a dag-cbor node', async () => { @@ -110,6 +120,7 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagCborCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagCborCid}`, { ipfs }) @@ -125,6 +136,7 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagCborCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagCborCid}`, { ipfs }) @@ -140,10 +152,11 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagCborCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base64').returns(base64) const out = await cli(`dag get ${dagCborCid} --cid-base=base64`, { ipfs }) - expect(out).to.equal(`{"foo":"bar","baz":{"/":"${rawCid.toString('base64')}"}}\n`) + expect(out).to.equal(`{"foo":"bar","baz":{"/":"${rawCid.toString(base64)}"}}\n`) }) it('should get a node with a deep path', async () => { @@ -222,10 +235,11 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagPbCid}`, { ipfs }) - expect(out).to.equal(`{"links":[{"Name":"foo.txt","Size":9000,"Cid":{"/":"${dagPbCid}"}}]}\n`) + expect(out).to.equal(`{"links":[{"Name":"foo.txt","Size":9000,"Cid":{"/":"${dagPbCid.toString(base58btc)}"}}]}\n`) }) it('should strip control characters from dag-cbor nodes', async () => { @@ -288,7 +302,7 @@ describe('dag', () => { it('resolves a cid ref', async () => { ipfs.dag.resolve.withArgs(dagPbCid.toString(), defaultOptions).returns([{ - value: new CID(dagPbCid) + value: dagPbCid }]) const out = await cli(`dag resolve ${dagPbCid}`, { ipfs }) @@ -297,7 +311,7 @@ describe('dag', () => { it('resolves an ipfs path', async () => { ipfs.dag.resolve.withArgs(`/ipfs/${dagPbCid}`, defaultOptions).returns([{ - value: new CID(dagPbCid) + value: dagPbCid }]) const out = await cli(`dag resolve /ipfs/${dagPbCid}`, { ipfs }) @@ -309,7 +323,7 @@ describe('dag', () => { ...defaultOptions, timeout: 1000 }).returns([{ - value: new CID(dagPbCid) + value: dagPbCid }]) const out = await cli(`dag resolve ${dagPbCid} --timeout=1s`, { ipfs }) @@ -329,14 +343,16 @@ describe('dag', () => { } it('puts json string', async () => { - ipfs.dag.put.withArgs({}, defaultOptions).resolves(new CID(dagCborCid)) + ipfs.dag.put.withArgs({}, defaultOptions).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put "{}"', { ipfs }) - expect(out).to.equal(`${dagCborCid}\n`) + expect(out).to.equal(`${dagCborCid.toString(base58btc)}\n`) }) it('puts piped json string', async () => { - ipfs.dag.put.withArgs({}, defaultOptions).resolves(new CID(dagCborCid)) + ipfs.dag.put.withArgs({}, defaultOptions).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put', { getStdin: function * () { @@ -344,26 +360,28 @@ describe('dag', () => { }, ipfs }) - expect(out).to.equal(`${dagCborCid}\n`) + expect(out).to.equal(`${dagCborCid.toString(base58btc)}\n`) }) it('puts piped cbor node', async () => { - ipfs.dag.put.withArgs({}, defaultOptions).resolves(new CID(dagCborCid)) + ipfs.dag.put.withArgs({}, defaultOptions).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --input-encoding cbor', { getStdin: function * () { - yield dagCBOR.util.serialize({}) + yield dagCBOR.encode({}) }, ipfs }) - expect(out).to.equal(`${dagCborCid}\n`) + expect(out).to.equal(`${dagCborCid.toString(base58btc)}\n`) }) it('puts piped raw node', async () => { ipfs.dag.put.withArgs(Buffer.alloc(10), { ...defaultOptions, format: 'raw' - }).resolves(new CID(rawCid)) + }).resolves(rawCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --input-encoding raw --format raw', { getStdin: function * () { @@ -371,23 +389,24 @@ describe('dag', () => { }, ipfs }) - expect(out).to.equal(`${rawCid}\n`) + expect(out).to.equal(`${rawCid.toString(base58btc)}\n`) }) it('puts piped protobuf node', async () => { - ipfs.dag.put.withArgs(dagPB.util.deserialize(dagPB.util.serialize({})), { + ipfs.dag.put.withArgs(dagPB.decode(dagPB.encode({ Links: [] })), { ...defaultOptions, format: 'dag-pb', version: 0 - }).resolves(new CID(dagPbCid)) + }).resolves(dagPbCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --input-encoding protobuf --format protobuf', { getStdin: function * () { - yield dagPB.util.serialize({}) + yield dagPB.encode({ Links: [] }) }, ipfs }) - expect(out).to.equal(`${dagPbCid}\n`) + expect(out).to.equal(`${dagPbCid.toString(base58btc)}\n`) }) it('puts protobuf node as json', async () => { @@ -395,66 +414,72 @@ describe('dag', () => { ...defaultOptions, format: 'dag-pb', version: 0 - }).resolves(new CID(dagPbCid)) + }).resolves(dagPbCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --format protobuf \'{"Links":[]}\'', { ipfs }) - expect(out).to.equal(`${dagPbCid}\n`) + expect(out).to.equal(`${dagPbCid.toString(base58btc)}\n`) }) it('puts piped protobuf node with cid-v1', async () => { - ipfs.dag.put.withArgs(dagPB.util.deserialize(dagPB.util.serialize({})), { + ipfs.dag.put.withArgs(dagPB.decode(dagPB.encode({ Links: [] })), { ...defaultOptions, format: 'dag-pb', version: 1 - }).resolves(new CID(dagPbCid)) + }).resolves(dagPbCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --input-encoding protobuf --format protobuf --cid-version=1', { getStdin: function * () { - yield dagPB.util.serialize({}) + yield dagPB.encode({ Links: [] }) }, ipfs }) - expect(out).to.equal(`${dagPbCid}\n`) + expect(out).to.equal(`${dagPbCid.toString(base58btc)}\n`) }) it('puts json string with esoteric hashing algorithm', async () => { ipfs.dag.put.withArgs({}, { ...defaultOptions, hashAlg: 'blake2s-40' - }).resolves(new CID(dagCborCid)) + }).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --hash-alg blake2s-40 "{}"', { ipfs }) - expect(out).to.equal(`${dagCborCid}\n`) + expect(out).to.equal(`${dagCborCid.toString(base58btc)}\n`) }) it('puts json string with cid base', async () => { ipfs.dag.put.withArgs({}, defaultOptions).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base64').returns(base64) const out = await cli('dag put --cid-base base64 "{}"', { ipfs }) - expect(out).to.equal(`${dagCborCid.toV1().toString('base64')}\n`) + expect(out).to.equal(`${dagCborCid.toV1().toString(base64)}\n`) }) it('pins node after putting', async () => { ipfs.dag.put.withArgs({ hello: 'world' }, { ...defaultOptions, pin: true - }).resolves(new CID(dagCborCid)) + }).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put --pin \'{"hello":"world"}\'', { ipfs }) - expect(out).to.equal(`${dagCborCid}\n`) + expect(out).to.equal(`${dagCborCid.toString(base58btc)}\n`) }) it('puts json string with a timeout', async () => { ipfs.dag.put.withArgs({}, { ...defaultOptions, timeout: 1000 - }).resolves(new CID(dagCborCid)) + }).resolves(dagCborCid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli('dag put "{}" --timeout=1s', { ipfs }) - expect(out).to.equal(`${dagCborCid}\n`) + expect(out).to.equal(`${dagCborCid.toString(base58btc)}\n`) }) }) }) diff --git a/packages/ipfs-cli/test/dht.js b/packages/ipfs-cli/test/dht.js index 46f4027c29..08641e92b3 100644 --- a/packages/ipfs-cli/test/dht.js +++ b/packages/ipfs-cli/test/dht.js @@ -7,7 +7,7 @@ const cli = require('./utils/cli') const sinon = require('sinon') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') -const CID = require('cids') +const { CID } = require('multiformats/cid') describe('dht', () => { let ipfs @@ -60,7 +60,7 @@ describe('dht', () => { } it('should be able to get a value from the dht', async () => { - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') const value = uint8ArrayFromString('testvalue') ipfs.dht.get.withArgs(key.bytes, defaultOptions).resolves(value) @@ -72,7 +72,7 @@ describe('dht', () => { }) it('should be able to get a value from the dht with a timeout', async () => { - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') const value = uint8ArrayFromString('testvalue') ipfs.dht.get.withArgs(key.bytes, { @@ -94,7 +94,7 @@ describe('dht', () => { } it('should be able to provide data', async () => { - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') await cli(`dht provide ${key}`, { ipfs @@ -103,7 +103,7 @@ describe('dht', () => { }) it('should be able to provide data recursively', async () => { - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') await cli(`dht provide ${key} --recursive`, { ipfs @@ -115,7 +115,7 @@ describe('dht', () => { }) it('should be able to provide data recursively (short option)', async () => { - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') await cli(`dht provide ${key} -r`, { ipfs @@ -127,7 +127,7 @@ describe('dht', () => { }) it('should be able to provide data with a timeout', async () => { - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') await cli(`dht provide ${key} --timeout=1s`, { ipfs @@ -144,7 +144,7 @@ describe('dht', () => { numProviders: 20, timeout: undefined } - const key = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const key = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') const prov = { id: 'prov-id' } diff --git a/packages/ipfs-cli/test/files/flush.js b/packages/ipfs-cli/test/files/flush.js index 8212ab7e49..cebbf3ecbc 100644 --- a/packages/ipfs-cli/test/files/flush.js +++ b/packages/ipfs-cli/test/files/flush.js @@ -4,8 +4,10 @@ const { expect } = require('aegir/utils/chai') const cli = require('../utils/cli') const sinon = require('sinon') -const CID = require('cids') -const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') +const cid = CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const defaultOptions = { timeout: undefined @@ -21,6 +23,9 @@ describe('flush', () => { ipfs = { files: { flush: sinon.stub().resolves(cid) + }, + bases: { + getBase: sinon.stub() } } print = (msg = '', newline = true) => { @@ -29,6 +34,8 @@ describe('flush', () => { }) it('should flush a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files flush ${path}`, { ipfs, print }) expect(ipfs.files.flush.callCount).to.equal(1) @@ -40,6 +47,8 @@ describe('flush', () => { }) it('should flush without a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli('files flush', { ipfs, print }) expect(ipfs.files.flush.callCount).to.equal(1) @@ -51,6 +60,9 @@ describe('flush', () => { }) it('should flush with a different CID base', async () => { + ipfs.files.flush.returns(cid.toV1()) + ipfs.bases.getBase.withArgs('base64').returns(base64) + await cli('files flush --cid-base base64', { ipfs, print }) expect(ipfs.files.flush.callCount).to.equal(1) @@ -58,10 +70,12 @@ describe('flush', () => { '/', defaultOptions ]) - expect(output).to.include(cid.toV1().toString('base64')) + expect(output).to.include(cid.toV1().toString(base64)) }) it('should flush a path with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files flush ${path} --timeout=1s`, { ipfs, print }) expect(ipfs.files.flush.callCount).to.equal(1) diff --git a/packages/ipfs-cli/test/files/ls.js b/packages/ipfs-cli/test/files/ls.js index 2396c47096..b3c864b795 100644 --- a/packages/ipfs-cli/test/files/ls.js +++ b/packages/ipfs-cli/test/files/ls.js @@ -5,8 +5,9 @@ const { expect } = require('aegir/utils/chai') const cli = require('../utils/cli') const sinon = require('sinon') const { isNode } = require('ipfs-utils/src/env') -const CID = require('cids') -const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const fileCid = CID.parse('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') const defaultOptions = { timeout: undefined @@ -26,6 +27,9 @@ describe('ls', () => { ipfs = { files: { ls: sinon.stub().returns([]) + }, + bases: { + getBase: sinon.stub() } } print = (msg = '', newline = true) => { @@ -34,6 +38,8 @@ describe('ls', () => { }) it('should list a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const path = '/foo' await cli(`files ls ${path}`, { ipfs, print }) @@ -46,6 +52,8 @@ describe('ls', () => { }) it('should list without a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli('files ls', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) @@ -56,6 +64,8 @@ describe('ls', () => { }) it('should list a path with details', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const files = [{ cid: fileCid, name: 'file-name', @@ -72,12 +82,14 @@ describe('ls', () => { await cli('files ls --long /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].cid.toString()) + expect(output).to.include(files[0].cid.toString(base58btc)) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path with details (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const files = [{ cid: fileCid, name: 'file-name', @@ -94,12 +106,14 @@ describe('ls', () => { await cli('files ls -l /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].cid.toString()) + expect(output).to.include(files[0].cid.toString(base58btc)) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const path = '/foo' await cli(`files ls ${path} --timeout=1s`, { ipfs, print }) @@ -114,6 +128,8 @@ describe('ls', () => { }) it('should strip control characters from path names', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const files = [{ cid: fileCid, name: 'file\n\t\b-name', @@ -130,7 +146,7 @@ describe('ls', () => { await cli('files ls --long /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].cid.toString()) + expect(output).to.include(files[0].cid.toString(base58btc)) expect(output).to.include('file-name') expect(output).to.include(files[0].size) }) diff --git a/packages/ipfs-cli/test/files/stat.js b/packages/ipfs-cli/test/files/stat.js index be0499a9b4..ce6c013449 100644 --- a/packages/ipfs-cli/test/files/stat.js +++ b/packages/ipfs-cli/test/files/stat.js @@ -5,8 +5,9 @@ const { expect } = require('aegir/utils/chai') const cli = require('../utils/cli') const sinon = require('sinon') const { isNode } = require('ipfs-utils/src/env') -const CID = require('cids') -const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const fileCid = CID.parse('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') const defaultOptions = { withLocal: false, @@ -36,6 +37,9 @@ describe('stat', () => { mode: 'stats-mode', mtime: 'stats-mtime' }) + }, + bases: { + getBase: sinon.stub() } } print = (msg = '', newline = true) => { @@ -44,6 +48,8 @@ describe('stat', () => { }) it('should stat a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -55,6 +61,8 @@ describe('stat', () => { }) it('should stat a path with local', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat --with-local ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -68,6 +76,8 @@ describe('stat', () => { }) it('should stat a path with local (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat -l ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -81,6 +91,8 @@ describe('stat', () => { }) it('should stat a path and only show hashes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat --hash ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -88,10 +100,12 @@ describe('stat', () => { path, defaultOptions ]) - expect(output).to.equal(`${fileCid}\n`) + expect(output).to.equal(`${fileCid.toString(base58btc)}\n`) }) it('should stat a path and only show hashes (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat -h ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -99,10 +113,12 @@ describe('stat', () => { path, defaultOptions ]) - expect(output).to.equal(`${fileCid}\n`) + expect(output).to.equal(`${fileCid.toString(base58btc)}\n`) }) it('should stat a path and only show sizes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat --size ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -114,6 +130,8 @@ describe('stat', () => { }) it('should stat a path and only show sizes (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat -s ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -125,6 +143,8 @@ describe('stat', () => { }) it('should stat a path with format option', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat --format ' ' ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) @@ -136,6 +156,8 @@ describe('stat', () => { }) it('should stat a path with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + await cli(`files stat ${path} --timeout=1s`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) diff --git a/packages/ipfs-cli/test/get.js b/packages/ipfs-cli/test/get.js index 0677d4cfb4..c4b73f3bb6 100644 --- a/packages/ipfs-cli/test/get.js +++ b/packages/ipfs-cli/test/get.js @@ -5,7 +5,7 @@ const fs = require('fs') const { expect } = require('aegir/utils/chai') const path = require('path') const clean = require('./utils/clean') -const CID = require('cids') +const { CID } = require('multiformats/cid') const cli = require('./utils/cli') const sinon = require('sinon') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -15,7 +15,7 @@ const defaultOptions = { } describe('get', () => { - const cid = new CID('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') const buf = uint8ArrayFromString('hello world') let ipfs diff --git a/packages/ipfs-cli/test/ls.js b/packages/ipfs-cli/test/ls.js index 4a0a272436..bcf638bc13 100644 --- a/packages/ipfs-cli/test/ls.js +++ b/packages/ipfs-cli/test/ls.js @@ -4,7 +4,9 @@ const { expect } = require('aegir/utils/chai') const cli = require('./utils/cli') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const defaultOptions = { recursive: false, @@ -16,20 +18,23 @@ describe('ls', () => { beforeEach(() => { ipfs = { - ls: sinon.stub() + ls: sinon.stub(), + bases: { + getBase: sinon.stub() + } } ipfs.ls.withArgs('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', defaultOptions).returns([{ mode: 0o755, mtime: null, - cid: new CID('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), + cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', name: 'blocks', depth: 0 }, { mode: 0o644, mtime: null, - cid: new CID('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), + cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', size: 3928, @@ -42,14 +47,14 @@ describe('ls', () => { }).returns([{ mode: 0o755, mtime: null, - cid: new CID('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), + cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', name: 'blocks', depth: 0 }, { mode: 0o644, mtime: null, - cid: new CID('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), + cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', size: 3928, @@ -59,14 +64,14 @@ describe('ls', () => { ipfs.ls.withArgs('/ipfs/Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', defaultOptions).returns([{ mode: 0o755, mtime: null, - cid: new CID('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), + cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', name: 'blocks', depth: 0 }, { mode: 0o644, mtime: null, - cid: new CID('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), + cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', size: 3928, @@ -76,7 +81,7 @@ describe('ls', () => { ipfs.ls.withArgs('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z/blocks', defaultOptions).returns([{ mode: 0o644, mtime: null, - cid: new CID('QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD'), + cid: CID.parse('QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD'), type: 'file', name: 'CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data', size: 10849, @@ -84,7 +89,7 @@ describe('ls', () => { }, { mode: 0o644, mtime: null, - cid: new CID('QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx'), + cid: CID.parse('QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx'), type: 'file', name: 'CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data', size: 10807, @@ -97,14 +102,14 @@ describe('ls', () => { }).returns([{ mode: 0o755, mtime: null, - cid: new CID('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), + cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', name: 'blocks', depth: 0 }, { mode: 0o644, mtime: null, - cid: new CID('QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD'), + cid: CID.parse('QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD'), type: 'file', name: 'CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data', size: 10849, @@ -112,7 +117,7 @@ describe('ls', () => { }, { mode: 0o644, mtime: null, - cid: new CID('QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx'), + cid: CID.parse('QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx'), type: 'file', name: 'CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data', size: 10807, @@ -120,15 +125,26 @@ describe('ls', () => { }, { mode: 0o644, mtime: null, - cid: new CID('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), + cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', size: 3928, depth: 0 }]) + + ipfs.ls.withArgs('bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq', defaultOptions).returns([{ + mode: 0o755, + mtime: null, + cid: CID.parse('bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq'), + type: 'dir', + name: 'blocks', + depth: 0 + }]) }) it('prints added files', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', { ipfs }) expect(out).to.eql( 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + @@ -137,6 +153,8 @@ describe('ls', () => { }) it('prints added files with /ipfs/ prefix', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls /ipfs/Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', { ipfs }) expect(out).to.eql( 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + @@ -145,6 +163,8 @@ describe('ls', () => { }) it('supports a trailing slash', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z/', { ipfs }) expect(out).to.eql( 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + @@ -153,6 +173,8 @@ describe('ls', () => { }) it('supports multiple trailing slashes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z///', { ipfs }) expect(out).to.eql( 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + @@ -161,6 +183,8 @@ describe('ls', () => { }) it('supports multiple intermediate slashes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z///blocks', { ipfs }) expect(out).to.eql( '-rw-r--r-- - QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD 10849 CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data\n' + @@ -169,6 +193,8 @@ describe('ls', () => { }) it('adds a header, -v', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z -v', { ipfs }) expect(out).to.eql( 'Mode Mtime Hash Size Name\n' + @@ -178,6 +204,8 @@ describe('ls', () => { }) it('recursively follows folders, -r', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls -r Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', { ipfs }) expect(out).to.eql( 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + @@ -188,14 +216,17 @@ describe('ls', () => { }) it('should ls and print CIDs encoded in specified base', async () => { - const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z --cid-base=base64', { ipfs }) + ipfs.bases.getBase.withArgs('base64').returns(base64) + + const out = await cli('ls bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq --cid-base=base64', { ipfs }) expect(out).to.eql( - 'drwxr-xr-x - mAXASILidvV1YroHLqBvmuXko1Ly1UVenZV1K+MvhsjXhdvZQ - blocks/\n' + - '-rw-r--r-- - mAXASIBT4ZYkQw0IApLoNHBxSjpezyayKZHJyxmFKpt0I3sK5 3928 config\n' + 'drwxr-xr-x - mAXESIFgkdj3wVJjyuQSabCncE9phJUPZQmr1h47PEKMiCKxM - blocks/\n' ) }) it('prints added files with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const out = await cli('ls Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z --timeout=1s', { ipfs }) expect(out).to.eql( 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + @@ -204,17 +235,19 @@ describe('ls', () => { }) it('removes control characters from paths', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.ls.withArgs('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', defaultOptions).returns([{ mode: 0o755, mtime: null, - cid: new CID('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), + cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', name: 'bl\nock\bs', depth: 0 }, { mode: 0o644, mtime: null, - cid: new CID('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), + cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'co\r\tnfig', size: 3928, diff --git a/packages/ipfs-cli/test/object.js b/packages/ipfs-cli/test/object.js index 4f31a6aa82..25f0c9483c 100644 --- a/packages/ipfs-cli/test/object.js +++ b/packages/ipfs-cli/test/object.js @@ -3,21 +3,19 @@ const { expect } = require('aegir/utils/chai') const fs = require('fs') -const multibase = require('multibase') const cli = require('./utils/cli') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const uint8ArrayFromString = require('uint8arrays/from-string') -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') +const dagPb = require('@ipld/dag-pb') describe('object', () => { - const cid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + const cid = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') let ipfs - before(() => { + beforeEach(() => { ipfs = { object: { new: sinon.stub(), @@ -32,6 +30,9 @@ describe('object', () => { setData: sinon.stub(), rmLink: sinon.stub() } + }, + bases: { + getBase: sinon.stub() } } }) @@ -43,6 +44,7 @@ describe('object', () => { } it('should create a new object', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.new.withArgs(defaultOptions).resolves(cid) const out = await cli('object new', { ipfs }) @@ -50,6 +52,7 @@ describe('object', () => { }) it('new unixfs-dir', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.new.withArgs({ ...defaultOptions, template: 'unixfs-dir' @@ -60,6 +63,7 @@ describe('object', () => { }) it('new with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.new.withArgs({ ...defaultOptions, timeout: 1000 @@ -70,11 +74,12 @@ describe('object', () => { }) it('should new and print CID encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.object.new.withArgs(defaultOptions).resolves(cid.toV1()) const out = await cli('object new --cid-base=base64', { ipfs }) expect(out).to.equal( - `${cid.toV1().toString('base64')}\n` + `${cid.toV1().toString(base64)}\n` ) }) }) @@ -85,8 +90,11 @@ describe('object', () => { } it('should get an object', async () => { - const node = new DAGNode() + const node = { + Links: [] + } + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, defaultOptions).resolves(node) const out = await cli(`object get ${cid}`, { ipfs }) @@ -96,13 +104,15 @@ describe('object', () => { }) it('should get an object and strip control characters from link names', async () => { - const node = new DAGNode() - node.addLink({ - Name: 'derp\n\b', - Tsize: 10, - Hash: cid - }) + const node = { + Links: [{ + Name: 'derp\n\b', + Tsize: 10, + Hash: cid + }] + } + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, defaultOptions).resolves(node) const out = await cli(`object get ${cid}`, { ipfs }) @@ -116,8 +126,12 @@ describe('object', () => { }) it('get with data', async () => { - const node = new DAGNode(uint8ArrayFromString('aGVsbG8gd29ybGQK', 'base64')) + const node = { + Data: uint8ArrayFromString('aGVsbG8gd29ybGQK', 'base64'), + Links: [] + } + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, defaultOptions).resolves(node) const out = await cli(`object get ${cid}`, { ipfs }) @@ -127,8 +141,12 @@ describe('object', () => { }) it('get while overriding data-encoding', async () => { - const node = new DAGNode(uint8ArrayFromString('hello world')) + const node = { + Data: uint8ArrayFromString('hello world'), + Links: [] + } + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, defaultOptions).resolves(node) const out = await cli(`object get --data-encoding=utf8 ${cid}`, { ipfs }) @@ -138,24 +156,32 @@ describe('object', () => { }) it('should get and print CIDs encoded in specified base', async () => { - const node = new DAGNode(null, [ - new DAGLink('', 0, cid.toV1()) - ]) + const node = { + Links: [{ + Name: '', + Tsize: 0, + Hash: cid.toV1() + }] + } + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.object.get.withArgs(cid.toV1(), defaultOptions).resolves(node) const out = await cli(`object get --cid-base=base64 ${cid.toV1()}`, { ipfs }) const result = JSON.parse(out) - expect(multibase.isEncoded(result.Hash)).to.deep.equal('base64') + expect(result.Hash).to.equal(cid.toV1().toString(base64)) result.Links.forEach(l => { - expect(multibase.isEncoded(l.Hash)).to.deep.equal('base64') + expect(l.Hash).to.equal(cid.toV1().toString(base64)) }) }) it('should get an object with a timeout', async () => { - const node = new DAGNode() + const node = { + Links: [] + } + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, { ...defaultOptions, timeout: 1000 @@ -170,14 +196,14 @@ describe('object', () => { describe('put', () => { const defaultOptions = { - enc: 'json', timeout: undefined } it('should put an object', async () => { - ipfs.object.put.withArgs(sinon.match.instanceOf(Uint8Array), defaultOptions).resolves(cid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.object.put.withArgs({}, defaultOptions).resolves(cid) - const out = await cli('object put README.md', { ipfs }) + const out = await cli('object put {}', { ipfs }) expect(out).to.equal( `added ${cid}\n` @@ -185,14 +211,13 @@ describe('object', () => { }) it('put from pipe', async () => { - const buf = Buffer.from('hello world') - - ipfs.object.put.withArgs(buf, defaultOptions).resolves(cid) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.object.put.withArgs({}, defaultOptions).resolves(cid) const out = await cli('object put', { ipfs, getStdin: function * () { - yield buf + yield Buffer.from('{}') } }) @@ -201,26 +226,41 @@ describe('object', () => { ) }) - it('should put and print CID encoded in specified base', async () => { - const filePath = 'README.md' - const buf = fs.readFileSync(filePath) + it('put protobuf from pipe', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.object.put.withArgs({ Links: [] }, defaultOptions).resolves(cid) + + const out = await cli('object put --input-enc protobuf', { + ipfs, + getStdin: function * () { + yield dagPb.encode({ Links: [] }) + } + }) + + expect(out).to.equal( + `added ${cid}\n` + ) + }) - ipfs.object.put.withArgs(buf, defaultOptions).resolves(cid.toV1()) + it('should put and print CID encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + ipfs.object.put.withArgs({}, defaultOptions).resolves(cid.toV1()) - const out = await cli(`object put ${filePath} --cid-base=base64`, { ipfs }) + const out = await cli('object put {} --cid-base=base64', { ipfs }) expect(out).to.equal( - `added ${cid.toV1().toString('base64')}\n` + `added ${cid.toV1().toString(base64)}\n` ) }) it('should put an object with a timeout', async () => { - ipfs.object.put.withArgs(sinon.match.instanceOf(Uint8Array), { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.object.put.withArgs({}, { ...defaultOptions, timeout: 1000 }).resolves(cid) - const out = await cli('object put README.md --timeout=1s', { ipfs }) + const out = await cli('object put {} --timeout=1s', { ipfs }) expect(out).to.equal( `added ${cid}\n` @@ -307,9 +347,12 @@ describe('object', () => { } it('should return links from an object', async () => { - ipfs.object.links.withArgs(cid, defaultOptions).resolves([ - new DAGLink('some link', 8, new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')) - ]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.object.links.withArgs(cid, defaultOptions).resolves([{ + Name: 'some link', + Tsize: 8, + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + }]) const out = await cli(`object links ${cid}`, { ipfs }) expect(out).to.equal( @@ -318,27 +361,34 @@ describe('object', () => { }) it('should get links and print CIDs encoded in specified base', async () => { - const cid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() - - ipfs.object.links.withArgs(cid, defaultOptions).resolves([ - new DAGLink('some link', 8, new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V').toV1()) - ]) + ipfs.bases.getBase.withArgs('base64').returns(base64) + const cid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() + const linkCid = CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V').toV1() + + ipfs.object.links.withArgs(cid, defaultOptions).resolves([{ + Name: 'some link', + Tsize: 8, + Hash: linkCid + }]) const out = await cli(`object links ${cid} --cid-base=base64`, { ipfs }) out.trim().split('\n').forEach(line => { const cid = line.split(' ')[0] - expect(multibase.isEncoded(cid)).to.deep.equal('base64') + expect(cid).to.equal(linkCid.toString(base64)) }) }) it('should return links from an object with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.links.withArgs(cid, { ...defaultOptions, timeout: 1000 - }).resolves([ - new DAGLink('some link', 8, new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')) - ]) + }).resolves([{ + Name: 'some link', + Tsize: 8, + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + }]) const out = await cli(`object links ${cid} --timeout=1s`, { ipfs }) expect(out).to.equal( @@ -347,9 +397,12 @@ describe('object', () => { }) it('should get an object and strip control characters from link names', async () => { - ipfs.object.links.withArgs(cid, defaultOptions).resolves([ - new DAGLink('derp\t\n\b', 8, new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')) - ]) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.object.links.withArgs(cid, defaultOptions).resolves([{ + Name: 'derp\t\n\b', + Tsize: 8, + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + }]) const out = await cli(`object links ${cid}`, { ipfs }) expect(out).to.equal( @@ -365,6 +418,7 @@ describe('object', () => { } it('should append data', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const filePath = 'README.md' const buf = fs.readFileSync(filePath) @@ -377,6 +431,7 @@ describe('object', () => { }) it('append data from pipe', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const buf = Buffer.from('hello world') ipfs.object.patch.appendData.withArgs(cid, buf, defaultOptions).resolves( @@ -393,6 +448,7 @@ describe('object', () => { }) it('should append data and print CID encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) const filePath = 'README.md' const buf = fs.readFileSync(filePath) @@ -401,10 +457,11 @@ describe('object', () => { ) const out = await cli(`object patch append-data ${cid} ${filePath} --cid-base=base64`, { ipfs }) - expect(out).to.equal(`${cid.toV1().toString('base64')}\n`) + expect(out).to.equal(`${cid.toV1().toString(base64)}\n`) }) it('should append data with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const filePath = 'README.md' const buf = fs.readFileSync(filePath) @@ -426,6 +483,7 @@ describe('object', () => { } it('should set data on an object', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const filePath = 'README.md' const buf = fs.readFileSync(filePath) @@ -438,6 +496,7 @@ describe('object', () => { }) it('set-data from pipe', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const buf = Buffer.from('hello world') ipfs.object.patch.setData.withArgs(cid, buf, defaultOptions).resolves( @@ -454,6 +513,7 @@ describe('object', () => { }) it('should set-data and print CID encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) const filePath = 'README.md' const buf = fs.readFileSync(filePath) @@ -462,10 +522,11 @@ describe('object', () => { ) const out = await cli(`object patch set-data ${cid.toV1()} ${filePath} --cid-base=base64`, { ipfs }) - expect(out).to.equal(`${cid.toV1().toString('base64')}\n`) + expect(out).to.equal(`${cid.toV1().toString(base64)}\n`) }) it('should set data on an object with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const filePath = 'README.md' const buf = fs.readFileSync(filePath) @@ -487,13 +548,18 @@ describe('object', () => { } it('should add a link to an object', async () => { - const linkCid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - const updatedCid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const linkCid = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + const updatedCid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') - ipfs.object.get.withArgs(linkCid, defaultOptions).resolves( - new DAGNode() - ) - ipfs.object.patch.addLink.withArgs(cid, sinon.match.instanceOf(DAGLink), defaultOptions).resolves( + ipfs.object.get.withArgs(linkCid, defaultOptions).resolves({ + Links: [] + }) + ipfs.object.patch.addLink.withArgs(cid, { + Name: 'foo', + Tsize: 0, + Hash: linkCid + }, defaultOptions).resolves( updatedCid ) @@ -504,33 +570,43 @@ describe('object', () => { }) it('should add-link and print CID encoded in specified base', async () => { - const linkCid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n').toV1() - const updatedCid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() + ipfs.bases.getBase.withArgs('base64').returns(base64) + const linkCid = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n').toV1() + const updatedCid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() - ipfs.object.get.withArgs(linkCid, defaultOptions).resolves( - new DAGNode() - ) - ipfs.object.patch.addLink.withArgs(cid.toV1(), sinon.match.instanceOf(DAGLink), defaultOptions).resolves( + ipfs.object.get.withArgs(linkCid, defaultOptions).resolves({ + Links: [] + }) + ipfs.object.patch.addLink.withArgs(cid.toV1(), { + Name: 'foo', + Tsize: 0, + Hash: linkCid + }, defaultOptions).resolves( updatedCid ) const out = await cli(`object patch add-link ${cid.toV1()} foo ${linkCid} --cid-base=base64`, { ipfs }) expect(out).to.equal( - `${updatedCid.toString('base64')}\n` + `${updatedCid.toString(base64)}\n` ) }) it('should add a link to an object with a timeout', async () => { - const linkCid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - const updatedCid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const linkCid = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + const updatedCid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') ipfs.object.get.withArgs(linkCid, { ...defaultOptions, timeout: 1000 - }).resolves( - new DAGNode() - ) - ipfs.object.patch.addLink.withArgs(cid, sinon.match.instanceOf(DAGLink), { + }).resolves({ + Links: [] + }) + ipfs.object.patch.addLink.withArgs(cid, { + Name: 'foo', + Tsize: 0, + Hash: linkCid + }, { ...defaultOptions, timeout: 1000 }).resolves( @@ -550,8 +626,9 @@ describe('object', () => { } it('should remove a link from an object', async () => { - const cid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') - const updatedCid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const cid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + const updatedCid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') const linkName = 'foo' ipfs.object.patch.rmLink.withArgs(cid, linkName, defaultOptions).resolves( @@ -565,8 +642,9 @@ describe('object', () => { }) it('should rm-link and print CID encoded in specified base', async () => { - const cid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() - const updatedCid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() + ipfs.bases.getBase.withArgs('base64').returns(base64) + const cid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() + const updatedCid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm').toV1() const linkName = 'foo' ipfs.object.patch.rmLink.withArgs(cid, linkName, defaultOptions).resolves( @@ -575,13 +653,14 @@ describe('object', () => { const out = await cli(`object patch rm-link ${cid} ${linkName} --cid-base=base64`, { ipfs }) expect(out).to.equal( - `${updatedCid.toString('base64')}\n` + `${updatedCid.toString(base64)}\n` ) }) it('should remove a link from an object with a timeout', async () => { - const cid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') - const updatedCid = new CID('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const cid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + const updatedCid = CID.parse('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') const linkName = 'foo' ipfs.object.patch.rmLink.withArgs(cid, linkName, { diff --git a/packages/ipfs-cli/test/pin.js b/packages/ipfs-cli/test/pin.js index deeb3c524d..abd616e622 100644 --- a/packages/ipfs-cli/test/pin.js +++ b/packages/ipfs-cli/test/pin.js @@ -2,32 +2,32 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const cli = require('./utils/cli') const sinon = require('sinon') -// fixture structure: -// planets/ -// solar-system.md -// mercury/ -// wiki.md const pins = { - root: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys', - solarWiki: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG', - mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', - mercuryWiki: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi' + root: CID.parse('QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys'), + solarWiki: CID.parse('QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG'), + mercuryDir: CID.parse('QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q'), + mercuryWiki: CID.parse('QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi') } describe('pin', () => { let ipfs - before(() => { + beforeEach(() => { ipfs = { pin: { rmAll: sinon.stub(), addAll: sinon.stub(), ls: sinon.stub(), query: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -42,11 +42,12 @@ describe('pin', () => { } it('recursively (default)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ ...defaultPinOptions, - path: pins.root + path: pins.root.toString() }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin rm ${pins.root}`, { ipfs }) @@ -54,12 +55,13 @@ describe('pin', () => { }) it('non recursively', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), recursive: false }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin rm --recursive false ${pins.root}`, { ipfs }) @@ -67,12 +69,13 @@ describe('pin', () => { }) it('non recursively (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), recursive: false }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin rm -r false ${pins.root}`, { ipfs }) @@ -80,27 +83,29 @@ describe('pin', () => { }) it('should rm and print CIDs encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.pin.rmAll.withArgs([{ ...defaultPinOptions, - path: pins.root + path: pins.root.toString() }], defaultOptions).returns([ - new CID(pins.root) + pins.root.toV1() ]) const out = await cli(`pin rm ${pins.root} --cid-base=base64`, { ipfs }) - const b64CidStr = new CID(pins.root).toV1().toString('base64') + const b64CidStr = pins.root.toV1().toString(base64) expect(out).to.eql(`unpinned ${b64CidStr}\n`) }) it('with timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ ...defaultPinOptions, - path: pins.root + path: pins.root.toString() }], { ...defaultOptions, timeout: 1000 }).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin rm ${pins.root} --timeout=1s`, { ipfs }) @@ -119,11 +124,12 @@ describe('pin', () => { } it('recursively (default)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root + path: pins.root.toString() }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add ${pins.root}`, { ipfs }) @@ -131,12 +137,13 @@ describe('pin', () => { }) it('non recursively', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), recursive: false }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add --recursive false ${pins.root}`, { ipfs }) @@ -144,12 +151,13 @@ describe('pin', () => { }) it('non recursively (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), recursive: false }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add -r false ${pins.root}`, { ipfs }) @@ -157,14 +165,15 @@ describe('pin', () => { }) it('with metadata', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), metadata: { key: 'value' } }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add --metadata key=value ${pins.root}`, { ipfs }) @@ -172,14 +181,15 @@ describe('pin', () => { }) it('with a metadata (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), metadata: { key: 'value' } }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add -m key=value ${pins.root}`, { ipfs }) @@ -187,44 +197,45 @@ describe('pin', () => { }) it('with json metadata', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root, + path: pins.root.toString(), metadata: { key: 'value' } }], defaultOptions).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add --metadata-json '{"key":"value"}' ${pins.root}`, { ipfs }) expect(out).to.equal(`pinned ${pins.root} recursively\n`) }) - it('should rm and print CIDs encoded in specified base', async () => { + it('should add and print CIDs encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.pin.addAll.withArgs([{ - ...defaultOptions, - path: pins.root, - recursive: true, - comments: undefined + ...defaultPinOptions, + path: pins.root.toString() }], defaultOptions).returns([ - new CID(pins.root) + pins.root.toV1() ]) const out = await cli(`pin add ${pins.root} --cid-base=base64`, { ipfs }) - const b64CidStr = new CID(pins.root).toV1().toString('base64') + const b64CidStr = pins.root.toV1().toString(base64) expect(out).to.eql(`pinned ${b64CidStr} recursively\n`) }) it('recursively with timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ ...defaultPinOptions, - path: pins.root + path: pins.root.toString() }], { ...defaultOptions, timeout: 1000 }).returns([ - new CID(pins.root) + pins.root ]) const out = await cli(`pin add ${pins.root} --timeout=1s`, { ipfs }) @@ -240,8 +251,9 @@ describe('pin', () => { } it('lists all pins when no hash is passed', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs(defaultOptions).returns([{ - cid: new CID(pins.root), + cid: pins.root, type: 'recursive' }]) @@ -250,14 +262,15 @@ describe('pin', () => { }) it('handles multiple hashes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs({ ...defaultOptions, - paths: [pins.root, pins.solarWiki] + paths: [pins.root.toString(), pins.solarWiki.toString()] }).returns([{ - cid: new CID(pins.root), + cid: pins.root, type: 'recursive' }, { - cid: new CID(pins.solarWiki), + cid: pins.solarWiki, type: 'direct' }]) @@ -266,8 +279,9 @@ describe('pin', () => { }) it('can print quietly', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs(defaultOptions).returns([{ - cid: new CID(pins.root), + cid: pins.root.toString(), type: 'recursive' }]) @@ -276,8 +290,9 @@ describe('pin', () => { }) it('can print quietly (short option)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs(defaultOptions).returns([{ - cid: new CID(pins.root), + cid: pins.root.toString(), type: 'recursive' }]) @@ -286,21 +301,23 @@ describe('pin', () => { }) it('should ls and print CIDs encoded in specified base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.pin.ls.withArgs(defaultOptions).returns([{ - cid: new CID(pins.root).toV1(), + cid: pins.root.toV1(), type: 'recursive' }]) const out = await cli('pin ls --cid-base=base64', { ipfs }) - expect(out).to.equal(`${new CID(pins.root).toV1().toString('base64')} recursive\n`) + expect(out).to.equal(`${pins.root.toV1().toString(base64)} recursive\n`) }) it('lists all pins with a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs({ ...defaultOptions, timeout: 1000 }).returns([{ - cid: new CID(pins.root), + cid: pins.root, type: 'recursive' }]) @@ -309,8 +326,9 @@ describe('pin', () => { }) it('strips control characters from metadata', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs(defaultOptions).returns([{ - cid: new CID(pins.root), + cid: pins.root, type: 'recursive', metadata: { 'herp\n\t': 'de\brp' diff --git a/packages/ipfs-cli/test/refs-local.js b/packages/ipfs-cli/test/refs-local.js index bed5f30f9a..b392effed8 100644 --- a/packages/ipfs-cli/test/refs-local.js +++ b/packages/ipfs-cli/test/refs-local.js @@ -4,7 +4,7 @@ const { expect } = require('aegir/utils/chai') const cli = require('./utils/cli') const sinon = require('sinon') -const multibase = require('multibase') +const { base32 } = require('multiformats/bases/base32') const uint8ArrayFromString = require('uint8arrays/from-string') const defaultOptions = { @@ -52,7 +52,7 @@ describe('refs local', () => { const out = await cli('refs local --multihash', { ipfs }) const lines = out.split('\n') - expect(lines.includes(multibase.encoding('base32upper').encode(uint8ArrayFromString(ref)))).to.be.true() + expect(lines.includes(base32.encode(uint8ArrayFromString(ref)).toUpperCase())).to.be.true() expect(lines.includes(err)).to.be.true() }) diff --git a/packages/ipfs-cli/test/refs.js b/packages/ipfs-cli/test/refs.js index 9dc17fb93d..e26c985227 100644 --- a/packages/ipfs-cli/test/refs.js +++ b/packages/ipfs-cli/test/refs.js @@ -2,7 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') const cli = require('./utils/cli') const sinon = require('sinon') @@ -18,7 +18,7 @@ const defaultOptions = { // Note: There are more comprehensive tests in interface-js-ipfs-core describe('refs', () => { let ipfs - const cid = new CID('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z') + const cid = CID.parse('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z') const err = 'err' const ref = 'ref' diff --git a/packages/ipfs-cli/test/repo.js b/packages/ipfs-cli/test/repo.js index 66051740a7..856be5b942 100644 --- a/packages/ipfs-cli/test/repo.js +++ b/packages/ipfs-cli/test/repo.js @@ -4,7 +4,7 @@ const { expect } = require('aegir/utils/chai') const cli = require('./utils/cli') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') describe('repo', () => { let ipfs @@ -124,7 +124,7 @@ describe('repo', () => { }) describe('gc', () => { - const cid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const cid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') const defaultOptions = { timeout: undefined } diff --git a/packages/ipfs-cli/test/resolve.js b/packages/ipfs-cli/test/resolve.js index c823da4384..7ab661a997 100644 --- a/packages/ipfs-cli/test/resolve.js +++ b/packages/ipfs-cli/test/resolve.js @@ -2,19 +2,19 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') const cli = require('./utils/cli') const sinon = require('sinon') const defaultOptions = { recursive: false, - cidBase: undefined, + cidBase: 'base58btc', timeout: undefined } describe('resolve', () => { let ipfs - const cid = new CID('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z') + const cid = CID.parse('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z') beforeEach(() => { ipfs = { diff --git a/packages/ipfs-cli/tsconfig.json b/packages/ipfs-cli/tsconfig.json index 2a5580b45c..b1f30b011c 100644 --- a/packages/ipfs-cli/tsconfig.json +++ b/packages/ipfs-cli/tsconfig.json @@ -22,12 +22,6 @@ }, { "path": "../ipfs-http-client" - }, - { - "path": "../ipfs-http-gateway" - }, - { - "path": "../ipfs-http-server" } ] } diff --git a/packages/ipfs-client/.aegir.js b/packages/ipfs-client/.aegir.js index 8942c3deb8..2f84195e71 100644 --- a/packages/ipfs-client/.aegir.js +++ b/packages/ipfs-client/.aegir.js @@ -3,6 +3,6 @@ /** @type {import('aegir').PartialOptions} */ module.exports = { build: { - bundlesizeMax: '120kB' + bundlesizeMax: '98kB' } } diff --git a/packages/ipfs-client/package.json b/packages/ipfs-client/package.json index 6647a23a71..684094e275 100644 --- a/packages/ipfs-client/package.json +++ b/packages/ipfs-client/package.json @@ -37,7 +37,7 @@ "merge-options": "^3.0.4" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "rimraf": "^3.0.2" } } diff --git a/packages/ipfs-client/src/index.js b/packages/ipfs-client/src/index.js index 4fe4fd57ba..20343cccb7 100644 --- a/packages/ipfs-client/src/index.js +++ b/packages/ipfs-client/src/index.js @@ -2,7 +2,7 @@ const { create: httpClient } = require('ipfs-http-client') const { create: grpcClient } = require('ipfs-grpc-client') -const mergeOptions = require('merge-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) /** * @typedef {import('ipfs-http-client').Options} HTTPOptions @@ -30,7 +30,7 @@ function create (opts = {}) { } // override http methods with grpc if address is supplied - return mergeOptions.apply({ ignoreUndefined: true }, clients) + return mergeOptions(...clients) } module.exports = { diff --git a/packages/ipfs-core-types/package.json b/packages/ipfs-core-types/package.json index 1b3b69e00f..ef090b8152 100644 --- a/packages/ipfs-core-types/package.json +++ b/packages/ipfs-core-types/package.json @@ -28,14 +28,12 @@ ], "license": "(Apache-2.0 OR MIT)", "dependencies": { - "cids": "^1.1.6", - "interface-datastore": "^4.0.0", - "ipld-block": "^0.11.1", - "multiaddr": "^9.0.1", - "multibase": "^4.0.2" + "interface-datastore": "^5.0.0", + "multiaddr": "^10.0.0", + "multiformats": "^9.4.1" }, "devDependencies": { - "aegir": "^33.0.0" + "aegir": "^34.0.2" }, "contributors": [ "Irakli Gozalishvili " diff --git a/packages/ipfs-core-types/src/bitswap/index.d.ts b/packages/ipfs-core-types/src/bitswap/index.d.ts index de1b2dabfa..4de64c7a3b 100644 --- a/packages/ipfs-core-types/src/bitswap/index.d.ts +++ b/packages/ipfs-core-types/src/bitswap/index.d.ts @@ -1,4 +1,4 @@ -import type CID from 'cids' +import type { CID } from 'multiformts/cid' import type { AbortOptions } from '../utils' export interface API { @@ -61,10 +61,10 @@ export interface Stats { provideBufLen: number wantlist: CID[] peers: string[] - blocksReceived: BigInt - dataReceived: BigInt - blocksSent: BigInt - dataSent: BigInt - dupBlksReceived: BigInt - dupDataReceived: BigInt + blocksReceived: bigint + dataReceived: bigint + blocksSent: bigint + dataSent: bigint + dupBlksReceived: bigint + dupDataReceived: bigint } diff --git a/packages/ipfs-core-types/src/block/index.d.ts b/packages/ipfs-core-types/src/block/index.d.ts index 65bc5e3401..989626302f 100644 --- a/packages/ipfs-core-types/src/block/index.d.ts +++ b/packages/ipfs-core-types/src/block/index.d.ts @@ -1,8 +1,5 @@ import { AbortOptions, PreloadOptions, IPFSPath } from '../utils' -import CID, { CIDVersion } from 'cids' -import Block from 'ipld-block' -import { CodecName } from 'multicodec' -import { HashName } from 'multihashes' +import { CID, CIDVersion } from 'multiformats/cid' export interface API { /** @@ -11,26 +8,23 @@ export interface API { * @example * ```js * const block = await ipfs.block.get(cid) - * console.log(block.data) + * console.log(block) * ``` */ - get: (cid: CID | string | Uint8Array, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise + get: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise /** - * Stores input as an IPFS block. - * - * **Note:** If you pass a `Block` instance as the block parameter, you - * don't need to pass options, as the block instance will carry the CID - * value as a property. + * Stores a Uint8Array as a block in the underlying blockstore * * @example * ```js + * const dagPb = require('@ipld/dag-pb') * // Defaults * const encoder = new TextEncoder() * const decoder = new TextDecoder() * * const bytes = encoder.encode('a serialized object') - * const block = await ipfs.block.put(bytes) + * const cid = await ipfs.block.put(bytes) * * console.log(decoder.decode(block.data)) * // Logs: @@ -40,9 +34,9 @@ export interface API { * // the CID of the object * * // With custom format and hashtype through CID - * const CID = require('cids') + * const { CID } = require('multiformats/cid') * const another = encoder.encode('another serialized object') - * const cid = new CID(1, 'dag-pb', multihash) + * const cid = CID.createV1(dagPb.code, multihash) * const block = await ipfs.block.put(another, cid) * console.log(decoder.decode(block.data)) * @@ -53,7 +47,7 @@ export interface API { * // the CID of the object * ``` */ - put: (block: Block | Uint8Array, options?: PutOptions & OptionExtension) => Promise + put: (block: Uint8Array, options?: PutOptions & OptionExtension) => Promise /** * Remove one or more IPFS block(s) from the underlying block store @@ -76,7 +70,7 @@ export interface API { * * @example * ```js - * const cid = CID.from('QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ') + * const cid = CID.parse('QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ') * const stats = await ipfs.block.stat(cid) * console.log(stats.cid.toString()) * // Logs: QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ @@ -88,25 +82,15 @@ export interface API { } export interface PutOptions extends AbortOptions, PreloadOptions { - /** - * CID to store the block under - ignored if a Block is passed - */ - cid?: CID - /** * The codec to use to create the CID */ - format?: CodecName + format?: string /** * Multihash hashing algorithm to use. (Defaults to 'sha2-256') */ - mhtype?: HashName - - /** - * @deprecated - */ - mhlen?: any + mhtype?: string /** * The version to use to create the CID @@ -123,7 +107,7 @@ export interface RmOptions extends AbortOptions { /** * Ignores non-existent blocks */ - force?: boolean, + force?: boolean /** * Do not return output if true diff --git a/packages/ipfs-core-types/src/config/index.d.ts b/packages/ipfs-core-types/src/config/index.d.ts index 61881834b2..776f115a2b 100644 --- a/packages/ipfs-core-types/src/config/index.d.ts +++ b/packages/ipfs-core-types/src/config/index.d.ts @@ -1,5 +1,5 @@ import type { AbortOptions } from '../utils' -import { API as ProfilesAPI} from './profiles' +import { API as ProfilesAPI } from './profiles' export interface API { /** @@ -31,7 +31,7 @@ export interface API { export interface Config { Addresses?: AddressConfig - API?: APIConfig, + API?: APIConfig Profiles?: string Bootstrap?: string[] Discovery?: DiscoveryConfig @@ -51,8 +51,8 @@ export interface AddressConfig { RPC?: string Delegates?: string[] Gateway?: string - Swarm?: string[], - Announce?: string[], + Swarm?: string[] + Announce?: string[] NoAnnounce?: string[] } @@ -79,22 +79,22 @@ export interface DatastoreConfig { } export interface DatastoreType { - type: string, - path: string, - sync?: boolean, - shardFunc?: string, + type: string + path: string + sync?: boolean + shardFunc?: string compression?: string } export interface DatastoreMountPoint { - mountpoint: string, - type: string, - prefix: string, + mountpoint: string + type: string + prefix: string child: DatastoreType } export interface DatastoreSpec { - type?: string, + type?: string mounts?: DatastoreMountPoint[] } diff --git a/packages/ipfs-core-types/src/dag/index.d.ts b/packages/ipfs-core-types/src/dag/index.d.ts index fa8bbdd961..3db88b9eeb 100644 --- a/packages/ipfs-core-types/src/dag/index.d.ts +++ b/packages/ipfs-core-types/src/dag/index.d.ts @@ -1,7 +1,5 @@ import { AbortOptions, PreloadOptions, IPFSPath } from '../utils' -import CID, { CIDVersion } from 'cids' -import { CodecName } from 'multicodec' -import { HashName } from 'multihashes' +import { CID, CIDVersion } from 'multiformats/cid' export interface API { /** @@ -56,7 +54,7 @@ export interface API { * @example * ```js * const obj = { simple: 'object' } - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha3-512' }) + * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-512' }) * * console.log(cid.toString()) * // zBwWX9ecx5F4X54WAjmFLErnBT6ByfNxStr5ovowTL7AhaUR98RWvXPS1V3HqV1qs3r5Ec5ocv7eCdbqYQREXNUfYNuKG @@ -64,43 +62,6 @@ export interface API { */ put: (node: any, options?: PutOptions & OptionExtension) => Promise - /** - * Enumerate all the entries in a graph - * - * @example - * ```js - * // example obj - * const obj = { - * a: 1, - * b: [1, 2, 3], - * c: { - * ca: [5, 6, 7], - * cb: 'foo' - * } - * } - * - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - * console.log(cid.toString()) - * // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 - * - * const result = await ipfs.dag.tree('zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5') - * console.log(result) - * // Logs: - * // a - * // b - * // b/0 - * // b/1 - * // b/2 - * // c - * // c/ca - * // c/ca/0 - * // c/ca/1 - * // c/ca/2 - * // c/cb - * ``` - */ - tree: (cid: CID, options?: TreeOptions & OptionExtension) => Promise - /** * Returns the CID and remaining path of the node at the end of the passed IPFS path * @@ -158,22 +119,17 @@ export interface GetResult { export interface PutOptions extends AbortOptions, PreloadOptions { /** - * CID to store the value with - */ - cid?: CID - - /** - * The codec to use to create the CID (ignored if `cid` is passed) + * The codec to use to create the CID (defaults to 'dag-cbor') */ - format?: CodecName + format?: string /** - * Multihash hashing algorithm to use (ignored if `cid` is passed) + * Multihash hashing algorithm to use (defaults to 'sha2-256') */ - hashAlg?: HashName + hashAlg?: string /** - * The version to use to create the CID (ignored if `cid` is passed) + * The version to use to create the CID (default to 1) */ version?: CIDVersion @@ -224,4 +180,4 @@ export interface ResolveResult { * The remainder of the Path that the node was unable to resolve */ remainderPath?: string -} \ No newline at end of file +} diff --git a/packages/ipfs-core-types/src/dht/index.d.ts b/packages/ipfs-core-types/src/dht/index.d.ts index 91aa08c7ed..30b263802b 100644 --- a/packages/ipfs-core-types/src/dht/index.d.ts +++ b/packages/ipfs-core-types/src/dht/index.d.ts @@ -1,39 +1,39 @@ import type { AbortOptions } from '../utils' import type { Multiaddr } from 'multiaddr' -import type CID from 'cids' +import type { CID } from 'multiformts/cid' export interface API { /** - * Query the DHT for all multiaddresses associated with a `PeerId`. - * - * @example - * ```js - * const info = await ipfs.dht.findPeer('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt') - * - * console.log(info.id) - * // QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt - * - * info.addrs.forEach(addr => console.log(addr.toString())) - * // '/ip4/147.75.94.115/udp/4001/quic' - * // '/ip6/2604:1380:3000:1f00::1/udp/4001/quic' - * // '/dnsaddr/bootstrap.libp2p.io' - * // '/ip6/2604:1380:3000:1f00::1/tcp/4001' - * // '/ip4/147.75.94.115/tcp/4001' - * ``` - */ + * Query the DHT for all multiaddresses associated with a `PeerId`. + * + * @example + * ```js + * const info = await ipfs.dht.findPeer('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt') + * + * console.log(info.id) + * // QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt + * + * info.addrs.forEach(addr => console.log(addr.toString())) + * // '/ip4/147.75.94.115/udp/4001/quic' + * // '/ip6/2604:1380:3000:1f00::1/udp/4001/quic' + * // '/dnsaddr/bootstrap.libp2p.io' + * // '/ip6/2604:1380:3000:1f00::1/tcp/4001' + * // '/ip4/147.75.94.115/tcp/4001' + * ``` + */ findPeer: (peerId: string, options?: AbortOptions & OptionExtension) => Promise /** - * Find peers in the DHT that can provide a specific value, given a CID. - * - * @example - * ```js - * const providers = ipfs.dht.findProvs('QmdPAhQRxrDKqkGPvQzBvjYe3kU8kiEEAd2J6ETEamKAD9') - * for await (const provider of providers) { - * console.log(provider.id.toString()) - * } - * ``` - */ + * Find peers in the DHT that can provide a specific value, given a CID. + * + * @example + * ```js + * const providers = ipfs.dht.findProvs('QmdPAhQRxrDKqkGPvQzBvjYe3kU8kiEEAd2J6ETEamKAD9') + * for await (const provider of providers) { + * console.log(provider.id.toString()) + * } + * ``` + */ findProvs: (cid: CID, options?: DHTFindProvsOptions & OptionExtension) => AsyncIterable /** @@ -42,18 +42,18 @@ export interface API { get: (key: Uint8Array, options?: AbortOptions & OptionExtension) => Promise /** - * Announce to the network that we are providing given values. - */ + * Announce to the network that we are providing given values. + */ provide: (cid: CID | CID[], options?: DHTProvideOptions & OptionExtension) => AsyncIterable /** - * Write a key/value pair to the DHT. - * - * Given a key of the form /foo/bar and a value of any - * form, this will write that value to the DHT with - * that key. - * - */ + * Write a key/value pair to the DHT. + * + * Given a key of the form /foo/bar and a value of any + * form, this will write that value to the DHT with + * that key. + * + */ put: (key: Uint8Array, value: Uint8Array, options?: AbortOptions & OptionExtension) => AsyncIterable /** diff --git a/packages/ipfs-core-types/src/files/index.d.ts b/packages/ipfs-core-types/src/files/index.d.ts index 08ee4fa1d3..0086405584 100644 --- a/packages/ipfs-core-types/src/files/index.d.ts +++ b/packages/ipfs-core-types/src/files/index.d.ts @@ -1,7 +1,5 @@ import { AbortOptions, IPFSPath } from '../utils' -import CID, { CIDVersion } from 'cids' -import { CodecName } from 'multicodec' -import { HashName } from 'multihashes' +import { CID, CIDVersion } from 'multiformats/cid' import { Mtime, MtimeLike } from 'ipfs-unixfs' import type { AddProgressFn } from '../root' @@ -35,13 +33,13 @@ export interface API { * @example * ```js * // To copy a file -* await ipfs.files.cp('/src-file', '/dst-file') -* -* // To copy a directory -* await ipfs.files.cp('/src-dir', '/dst-dir') -* -* // To copy multiple files to a directory -* await ipfs.files.cp('/src-file1', '/src-file2', '/dst-dir') + * await ipfs.files.cp('/src-file', '/dst-file') + * + * // To copy a directory + * await ipfs.files.cp('/src-dir', '/dst-dir') + * + * // To copy multiple files to a directory + * await ipfs.files.cp('/src-file1', '/src-file2', '/dst-dir') * ``` */ cp: (from: IPFSPath | IPFSPath[], to: string, options?: CpOptions & OptionExtension) => Promise @@ -210,7 +208,7 @@ export interface ChmodOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries @@ -232,7 +230,7 @@ export interface CpOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries @@ -264,7 +262,7 @@ export interface MkdirOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries @@ -355,7 +353,7 @@ export interface TouchOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries @@ -377,7 +375,7 @@ export interface RmOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries @@ -446,7 +444,7 @@ export interface WriteOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries @@ -483,7 +481,7 @@ export interface MvOptions extends MFSOptions, AbortOptions { /** * The hash algorithm to use for any updated entries */ - hashAlg?: HashName + hashAlg?: string /** * The CID version to use for any updated entries diff --git a/packages/ipfs-core-types/src/index.d.ts b/packages/ipfs-core-types/src/index.d.ts index ea1e19a98a..fcfc03336a 100644 --- a/packages/ipfs-core-types/src/index.d.ts +++ b/packages/ipfs-core-types/src/index.d.ts @@ -18,6 +18,9 @@ import { API as RepoAPI } from './repo' import { API as StatsAPI } from './stats' import { API as SwarmAPI } from './swarm' import { AbortOptions, Await, AwaitIterable } from './utils' +import type { BlockCodec } from 'multiformats/codecs/interface' +import type { MultibaseCodec } from 'multiformats/bases/interface' +import type { MultihashHasher } from 'multiformats/hashes/interface' interface RefsAPI extends Refs { local: Local @@ -42,6 +45,24 @@ export interface IPFS extends RootAPI { repo: RepoAPI stats: StatsAPI swarm: SwarmAPI + bases: Bases + codecs: Codecs + hashers: Hashers +} + +interface Bases { + getBase: (code: string) => Promise> + listBases: () => Array> +} + +interface Codecs { + getCodec: (code: number | string) => Promise> + listCodecs: () => Array> +} + +interface Hashers { + getHasher: (code: number | string) => Promise> + listHashers: () => Array> } export type { diff --git a/packages/ipfs-core-types/src/name/index.d.ts b/packages/ipfs-core-types/src/name/index.d.ts index ab71f8d3d9..d4c70087bf 100644 --- a/packages/ipfs-core-types/src/name/index.d.ts +++ b/packages/ipfs-core-types/src/name/index.d.ts @@ -1,9 +1,9 @@ -import CID from 'cids'; +import { CID } from 'multiformts/cid' import type { AbortOptions } from '../utils' import type { API as PubsubAPI } from './pubsub' export interface API { - /** + /** * IPNS is a PKI namespace, where names are the hashes of public keys, and * the private key enables publishing new (signed) values. In both publish * and resolve, the default name used is the node's own PeerID, @@ -60,11 +60,11 @@ export interface PublishOptions extends AbortOptions { key?: string /** * When offline, save the IPNS record - * to the the local datastore without broadcasting to the network instead of - * simply failing. - * - * This option is not yet implemented in js-ipfs. See tracking issue [ipfs/js-ipfs#1997] - * (https://github.com/ipfs/js-ipfs/issues/1997). + * to the the local datastore without broadcasting to the network instead of + * simply failing. + * + * This option is not yet implemented in js-ipfs. See tracking issue [ipfs/js-ipfs#1997] + * (https://github.com/ipfs/js-ipfs/issues/1997). */ allowOffline?: boolean } @@ -90,5 +90,5 @@ export interface ResolveOptions extends AbortOptions { /** * do not use cached entries */ - nocache?: boolean + nocache?: boolean } diff --git a/packages/ipfs-core-types/src/name/pubsub/index.d.ts b/packages/ipfs-core-types/src/name/pubsub/index.d.ts index 2d262891f7..bcc34bca57 100644 --- a/packages/ipfs-core-types/src/name/pubsub/index.d.ts +++ b/packages/ipfs-core-types/src/name/pubsub/index.d.ts @@ -1,4 +1,3 @@ -import CID from 'cids'; import type { AbortOptions } from '../../utils' export interface API { diff --git a/packages/ipfs-core-types/src/object/index.d.ts b/packages/ipfs-core-types/src/object/index.d.ts index f1864d9b2d..fa830dad8b 100644 --- a/packages/ipfs-core-types/src/object/index.d.ts +++ b/packages/ipfs-core-types/src/object/index.d.ts @@ -1,14 +1,14 @@ -import CID from 'cids'; +import type { CID } from 'multiformts/cid' import type { AbortOptions, PreloadOptions } from '../utils' -import type { DAGNode, DAGNodeLike, DAGLink } from 'ipld-dag-pb' import type { API as PatchAPI } from './patch' +import type { PBNode, PBLink } from '@ipld/dag-pb' export interface API { new: (options?: NewObjectOptions & OptionExtension) => Promise - put: (obj: DAGNode | DAGNodeLike | Uint8Array, options?: PutOptions & OptionExtension) => Promise - get: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise + put: (obj: PBNode, options?: PutOptions & OptionExtension) => Promise + get: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise data: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise - links: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise + links: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise stat: (cid: CID, options?: AbortOptions & PreloadOptions & OptionExtension) => Promise patch: PatchAPI @@ -18,12 +18,8 @@ export interface NewObjectOptions extends AbortOptions, PreloadOptions { template?: 'unixfs-dir' } -export interface PutOptions extends AbortOptions, PreloadOptions { - enc?: PutEncoding -} - export interface StatResult { - Hash: string + Hash: CID NumLinks: number BlockSize: number LinksSize: number @@ -31,4 +27,6 @@ export interface StatResult { CumulativeSize: number } -export type PutEncoding = 'json' | 'protobuf' \ No newline at end of file +export interface PutOptions extends AbortOptions, PreloadOptions { + pin?: boolean +} diff --git a/packages/ipfs-core-types/src/object/patch/index.d.ts b/packages/ipfs-core-types/src/object/patch/index.d.ts index 4d38c0a05a..b21ddd601a 100644 --- a/packages/ipfs-core-types/src/object/patch/index.d.ts +++ b/packages/ipfs-core-types/src/object/patch/index.d.ts @@ -1,6 +1,6 @@ -import type CID from 'cids'; +import type { CID } from 'multiformts/cid' import type { AbortOptions } from '../../utils' -import type { DAGLink } from 'ipld-dag-pb' +import type { PBLink as DAGLink } from '@ipld/dag-pb' export interface API { addLink: (cid: CID, link: DAGLink, options?: AbortOptions & OptionExtension) => Promise diff --git a/packages/ipfs-core-types/src/pin/index.d.ts b/packages/ipfs-core-types/src/pin/index.d.ts index 9a22c8f6a7..285882f5c9 100644 --- a/packages/ipfs-core-types/src/pin/index.d.ts +++ b/packages/ipfs-core-types/src/pin/index.d.ts @@ -1,5 +1,5 @@ import type { AbortOptions, AwaitIterable } from '../utils' -import type CID from 'cids' +import type { CID } from 'multiformts/cid' import type { API as Remote } from './remote' export interface API { @@ -9,7 +9,7 @@ export interface API { * * @example * ```js - * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * const cid = CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') * const pinned of ipfs.pin.add(cid)) * console.log(pinned) * // Logs: @@ -24,7 +24,7 @@ export interface API { * * @example * ```js - * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * const cid = CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') * for await (const cid of ipfs.pin.addAll([cid])) { * console.log(cid) * } @@ -47,9 +47,9 @@ export interface API { * // { cid: CID(QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R), type: 'indirect' } * * const paths = [ - * CID.from('Qmc5..'), - * CID.from('QmZb..'), - * CID.from('QmSo..') + * CID.parse('Qmc5..'), + * CID.parse('QmZb..'), + * CID.parse('QmSo..') * ] * for await (const { cid, type } of ipfs.pin.ls({ paths })) { * console.log({ cid, type }) @@ -66,7 +66,7 @@ export interface API { * * @example * ```js - * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * const cid = CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') * const result = await ipfs.pin.rm(cid) * console.log(result) * // prints the CID that was unpinned @@ -81,7 +81,7 @@ export interface API { * @example * ```js * const source = [ - * CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * CID.parse('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') * ] * for await (const cid of ipfs.pin.rmAll(source)) { * console.log(cid) @@ -150,7 +150,7 @@ export type PinType = 'recursive' | 'direct' | 'indirect' | 'all' export type PinQueryType = 'recursive' | 'direct' | 'indirect' | 'all' -export interface LsOptions extends AbortOptions { +export interface LsOptions extends AbortOptions { paths?: CID | CID[] | string | string[] type?: PinQueryType } @@ -170,4 +170,3 @@ export interface RmAllInput { path?: string recursive?: boolean } - diff --git a/packages/ipfs-core-types/src/pin/remote/index.d.ts b/packages/ipfs-core-types/src/pin/remote/index.d.ts index 63395c350a..d52af9d936 100644 --- a/packages/ipfs-core-types/src/pin/remote/index.d.ts +++ b/packages/ipfs-core-types/src/pin/remote/index.d.ts @@ -1,4 +1,4 @@ -import CID from 'cids' +import { CID } from 'multiformts/cid' import { Multiaddr } from 'multiaddr' import { API as Service } from './service' import { AbortOptions } from '../../utils' diff --git a/packages/ipfs-core-types/src/pin/remote/service/index.d.ts b/packages/ipfs-core-types/src/pin/remote/service/index.d.ts index 14f6c2a996..67de01322f 100644 --- a/packages/ipfs-core-types/src/pin/remote/service/index.d.ts +++ b/packages/ipfs-core-types/src/pin/remote/service/index.d.ts @@ -61,6 +61,7 @@ interface InvalidStat { status: 'invalid' pinCount?: undefined } + export interface PinCount { queued: number pinning: number diff --git a/packages/ipfs-core-types/src/pubsub/index.d.ts b/packages/ipfs-core-types/src/pubsub/index.d.ts index 8062dc9a4a..1a63a4ba4a 100644 --- a/packages/ipfs-core-types/src/pubsub/index.d.ts +++ b/packages/ipfs-core-types/src/pubsub/index.d.ts @@ -40,7 +40,7 @@ export interface API { */ unsubscribe: (topic: string, handler: MessageHandlerFn, options?: AbortOptions & OptionExtension) => Promise - /** + /** * Publish a data message to a pubsub topic * * @example @@ -89,4 +89,4 @@ export interface SubscribeOptions extends AbortOptions { onError?: (err: Error) => void } -export type MessageHandlerFn = (message: Message) => void +export interface MessageHandlerFn { (message: Message): void } diff --git a/packages/ipfs-core-types/src/refs/index.d.ts b/packages/ipfs-core-types/src/refs/index.d.ts index 3d1ff80143..77ad3181ec 100644 --- a/packages/ipfs-core-types/src/refs/index.d.ts +++ b/packages/ipfs-core-types/src/refs/index.d.ts @@ -1,7 +1,6 @@ import type { AbortOptions, PreloadOptions, IPFSPath } from '../utils' -import type CID from 'cids' -export type API = { +export interface API { /** * Get links (references) from an object */ @@ -13,7 +12,7 @@ export type API = { local: Local } -export type Refs = (ipfsPath: IPFSPath | IPFSPath[], options?: RefsOptions & OptionExtension) => AsyncIterable +export interface Refs { (ipfsPath: IPFSPath | IPFSPath[], options?: RefsOptions & OptionExtension): AsyncIterable } export interface RefsOptions extends AbortOptions, PreloadOptions { recursive?: boolean @@ -23,7 +22,7 @@ export interface RefsOptions extends AbortOptions, PreloadOptions { maxDepth?: number } -export type Local = (options?: AbortOptions & OptionExtension) => AsyncIterable +export interface Local { (options?: AbortOptions & OptionExtension): AsyncIterable } export interface RefsResult { ref: string diff --git a/packages/ipfs-core-types/src/repo/index.d.ts b/packages/ipfs-core-types/src/repo/index.d.ts index 5a930737ad..84d509be77 100644 --- a/packages/ipfs-core-types/src/repo/index.d.ts +++ b/packages/ipfs-core-types/src/repo/index.d.ts @@ -1,5 +1,5 @@ import type { AbortOptions } from '../utils' -import CID from 'cids' +import { CID } from 'multiformts/cid' export interface API { /** @@ -26,21 +26,21 @@ export interface GCOptions extends AbortOptions { } export interface GCError { - err: Error, + err: Error cid?: never } export interface GCSuccess { - err?: never, + err?: never cid: CID } export type GCResult = GCSuccess | GCError export interface StatResult { - numObjects: BigInt + numObjects: bigint repoPath: string - repoSize: BigInt + repoSize: bigint version: string - storageMax: BigInt + storageMax: bigint } diff --git a/packages/ipfs-core-types/src/root.d.ts b/packages/ipfs-core-types/src/root.d.ts index f6607d431e..06be36d42f 100644 --- a/packages/ipfs-core-types/src/root.d.ts +++ b/packages/ipfs-core-types/src/root.d.ts @@ -1,8 +1,7 @@ import { AbortOptions, PreloadOptions, IPFSPath, ImportCandidateStream, ImportCandidate } from './utils' -import CID, { CIDVersion } from 'cids' +import { CID, CIDVersion } from 'multiformats/cid' import { Mtime } from 'ipfs-unixfs' import { Multiaddr } from 'multiaddr' -import { BaseName } from 'multibase' export interface API { /** @@ -174,7 +173,7 @@ export interface Directory { export type IPFSEntry = File | Directory -export type AddProgressFn = (bytes: number, path?: string) => void +export interface AddProgressFn { (bytes: number, path?: string): void } export interface AddOptions extends AbortOptions { /** @@ -334,7 +333,7 @@ export interface PingResult { export interface ResolveOptions extends AbortOptions { recursive?: boolean - cidBase?: BaseName + cidBase?: string } export interface MountOptions extends AbortOptions { diff --git a/packages/ipfs-core-types/src/stats/index.d.ts b/packages/ipfs-core-types/src/stats/index.d.ts index 8a2e997f21..a3628ab131 100644 --- a/packages/ipfs-core-types/src/stats/index.d.ts +++ b/packages/ipfs-core-types/src/stats/index.d.ts @@ -1,11 +1,10 @@ import type { AbortOptions } from '../utils' import { API as BitswapAPI } from '../bitswap' import { API as RepoAPI } from '../repo' -import type CID from 'cids' export interface API { - bitswap: BitswapAPI["stat"] - repo: RepoAPI["stat"] + bitswap: BitswapAPI['stat'] + repo: RepoAPI['stat'] /** * Return bandwith usage stats @@ -21,8 +20,8 @@ export interface BWOptions extends AbortOptions { } export interface BWResult { - totalIn: BigInt - totalOut: BigInt - rateIn: BigInt - rateOut: BigInt + totalIn: bigint + totalOut: bigint + rateIn: bigint + rateOut: bigint } diff --git a/packages/ipfs-core-types/src/swarm/index.d.ts b/packages/ipfs-core-types/src/swarm/index.d.ts index 910e63271e..f174272922 100644 --- a/packages/ipfs-core-types/src/swarm/index.d.ts +++ b/packages/ipfs-core-types/src/swarm/index.d.ts @@ -1,7 +1,4 @@ import type { AbortOptions } from '../utils' -import { API as BitswapAPI } from '../bitswap' -import { API as RepoAPI } from '../repo' -import type CID from 'cids' import type { Multiaddr } from 'multiaddr' export interface API { diff --git a/packages/ipfs-core-types/src/utils.d.ts b/packages/ipfs-core-types/src/utils.d.ts index f8d59ac403..1ceea70b79 100644 --- a/packages/ipfs-core-types/src/utils.d.ts +++ b/packages/ipfs-core-types/src/utils.d.ts @@ -1,7 +1,5 @@ -import CID from 'cids' +import { CID } from 'multiformts/cid' import { Mtime, MtimeLike } from 'ipfs-unixfs' -import { Options as DatastoreOptions, Query, KeyQuery } from 'interface-datastore' -import Block from 'ipld-block' export type Entry|Blob> = | FileEntry @@ -73,9 +71,9 @@ export interface InputFile extends BaseFile { } export interface BrowserImportCandidate { - path?: string, - content?: Blob, - mtime?: Mtime, + path?: string + content?: Blob + mtime?: Mtime mode?: number } diff --git a/packages/ipfs-core-utils/package.json b/packages/ipfs-core-utils/package.json index e37ea5d542..343452667a 100644 --- a/packages/ipfs-core-utils/package.json +++ b/packages/ipfs-core-utils/package.json @@ -44,22 +44,22 @@ "any-signal": "^2.1.2", "blob-to-it": "^1.0.1", "browser-readablestream-to-it": "^1.0.1", - "cids": "^1.1.6", "err-code": "^3.0.1", "ipfs-core-types": "^0.5.2", - "ipfs-unixfs": "^4.0.3", - "ipfs-utils": "^8.1.2", + "ipfs-unixfs": "^5.0.0", + "ipfs-utils": "^8.1.4", "it-all": "^1.0.4", "it-map": "^1.0.4", "it-peekable": "^1.0.1", - "multiaddr": "^9.0.1", - "multiaddr-to-uri": "^7.0.0", + "multiaddr": "^10.0.0", + "multiaddr-to-uri": "^8.0.0", + "multiformats": "^9.4.1", "parse-duration": "^1.0.0", "timeout-abort-controller": "^1.1.1", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "rimraf": "^3.0.2" } } diff --git a/packages/ipfs-core-utils/src/cid.js b/packages/ipfs-core-utils/src/cid.js deleted file mode 100644 index e9a3b6baef..0000000000 --- a/packages/ipfs-core-utils/src/cid.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -const CID = require('cids') - -/** - * Stringify a CID encoded in the requested base, upgrading to v1 if necessary. - * - * Setting upgrade to false will disable automatic CID upgrading from v0 to v1 - * which is necessary if the multibase is something other than base58btc. Note - * that it will also not apply the encoding (since v0 CIDs can only be encoded - * as base58btc). - * - * @param {CID|Uint8Array|string} input - The CID to encode - * @param {Object} [options] - Optional options - * @param {import('cids').BaseNameOrCode} [options.base] - Name of multibase codec to encode the CID with - * @param {boolean} [options.upgrade] - Automatically upgrade v0 CIDs to v1 when - * necessary. Default: true. - * @returns {string} - CID in string representation - */ -exports.cidToString = (input, options = {}) => { - const upgrade = options.upgrade !== false - let cid = CID.isCID(input) - ? input - // @ts-ignore - TS seems to get confused by the type defs in CID repo. - : new CID(input) - - if (cid.version === 0 && options.base && options.base !== 'base58btc') { - if (!upgrade) return cid.toString() - cid = cid.toV1() - } - - return cid.toBaseEncodedString(options.base) -} diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.js b/packages/ipfs-core-utils/src/files/normalise-input/index.js index df15835217..13b51b6099 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.js @@ -8,7 +8,7 @@ const normaliseInput = require('./normalise-input') * @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImportCandidate */ -/** +/* * Transforms any of the `ipfs.add` input types into * * ``` @@ -20,4 +20,5 @@ const normaliseInput = require('./normalise-input') * @param {ImportCandidateStream} input * @returns {AsyncGenerator} */ +// @ts-ignore TODO vmx 2021-03-30 enable again module.exports = (input) => normaliseInput(input, normaliseContent) diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js index 7ea199ba06..b9fd3ba864 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js @@ -13,6 +13,7 @@ const { const { parseMtime, parseMode +// @ts-ignore - TODO vmx 2021-03-30 enable again } = require('ipfs-unixfs') /** @@ -117,6 +118,7 @@ async function toFileObject (input, normaliseContent) { } if (content) { + // @ts-ignore TODO vmx 2021-03-30 enable again file.content = await normaliseContent(content) } else if (!path) { // Not already a file object with path or content prop // @ts-ignore - input still can be different ToContent diff --git a/packages/ipfs-core-utils/src/multibases.js b/packages/ipfs-core-utils/src/multibases.js new file mode 100644 index 0000000000..247c88c311 --- /dev/null +++ b/packages/ipfs-core-utils/src/multibases.js @@ -0,0 +1,88 @@ +'use strict' + +/** + * @typedef {import('multiformats/bases/interface').MultibaseCodec} MultibaseCodec + * @typedef {import('./types').LoadBaseFn} LoadBaseFn + * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions + */ + +/** + * @type {LoadBaseFn} + */ +const LOAD_BASE = (name) => Promise.reject(new Error(`No base found for "${name}"`)) + +class Multibases { + /** + * @param {object} options + * @param {LoadBaseFn} [options.loadBase] + * @param {MultibaseCodec[]} options.bases + */ + constructor (options) { + // Object with current list of active resolvers + /** @type {Record}} */ + this._basesByName = {} + + // Object with current list of active resolvers + /** @type {Record}} */ + this._basesByPrefix = {} + + this._loadBase = options.loadBase || LOAD_BASE + + // Enable all supplied codecs + for (const base of options.bases) { + this.addBase(base) + } + } + + /** + * Add support for a multibase codec + * + * @param {MultibaseCodec} base + */ + addBase (base) { + if (this._basesByName[base.name] || this._basesByPrefix[base.prefix]) { + throw new Error(`Codec already exists for codec "${base.name}"`) + } + + this._basesByName[base.name] = base + this._basesByPrefix[base.prefix] = base + } + + /** + * Remove support for a multibase codec + * + * @param {MultibaseCodec} base + */ + removeBase (base) { + delete this._basesByName[base.name] + delete this._basesByPrefix[base.prefix] + } + + /** + * @param {string} nameOrPrefix + */ + async getBase (nameOrPrefix) { + if (this._basesByName[nameOrPrefix]) { + return this._basesByName[nameOrPrefix] + } + + if (this._basesByPrefix[nameOrPrefix]) { + return this._basesByPrefix[nameOrPrefix] + } + + // If not supported, attempt to dynamically load this codec + const base = await this._loadBase(nameOrPrefix) + + if (this._basesByName[base.name] == null && this._basesByPrefix[base.prefix] == null) { + this.addBase(base) + } + + return base + } + + listBases () { + return Object.values(this._basesByName) + } +} + +module.exports = Multibases diff --git a/packages/ipfs-core-utils/src/multicodecs.js b/packages/ipfs-core-utils/src/multicodecs.js new file mode 100644 index 0000000000..b1aa3d10e3 --- /dev/null +++ b/packages/ipfs-core-utils/src/multicodecs.js @@ -0,0 +1,86 @@ +'use strict' + +/** + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + * @typedef {import('./types').LoadCodecFn} LoadCodecFn + * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions + */ + +/** + * @type {LoadCodecFn} + */ +const LOAD_CODEC = (codeOrName) => Promise.reject(new Error(`No codec found for "${codeOrName}"`)) + +class Multicodecs { + /** + * @param {object} options + * @param {LoadCodecFn} [options.loadCodec] + * @param {BlockCodec[]} options.codecs + */ + constructor (options) { + // Object with current list of active resolvers + /** @type {Record}} */ + this._codecsByName = {} + + // Object with current list of active resolvers + /** @type {Record}} */ + this._codecsByCode = {} + + this._loadCodec = options.loadCodec || LOAD_CODEC + + // Enable all supplied codecs + for (const codec of options.codecs) { + this.addCodec(codec) + } + } + + /** + * Add support for a block codec + * + * @param {BlockCodec} codec + */ + addCodec (codec) { + if (this._codecsByName[codec.name] || this._codecsByCode[codec.code]) { + throw new Error(`Resolver already exists for codec "${codec.name}"`) + } + + this._codecsByName[codec.name] = codec + this._codecsByCode[codec.code] = codec + } + + /** + * Remove support for a block codec + * + * @param {BlockCodec} codec + */ + removeCodec (codec) { + delete this._codecsByName[codec.name] + delete this._codecsByCode[codec.code] + } + + /** + * @param {number | string} code + */ + async getCodec (code) { + const table = typeof code === 'string' ? this._codecsByName : this._codecsByCode + + if (table[code]) { + return table[code] + } + + // If not supported, attempt to dynamically load this codec + const codec = await this._loadCodec(code) + + if (table[code] == null) { + this.addCodec(codec) + } + + return codec + } + + listCodecs () { + return Object.values(this._codecsByName) + } +} + +module.exports = Multicodecs diff --git a/packages/ipfs-core-utils/src/multihashes.js b/packages/ipfs-core-utils/src/multihashes.js new file mode 100644 index 0000000000..92acbd6167 --- /dev/null +++ b/packages/ipfs-core-utils/src/multihashes.js @@ -0,0 +1,86 @@ +'use strict' + +/** + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * @typedef {import('./types').LoadHasherFn} LoadHasherFn + * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions + */ + +/** + * @type {LoadHasherFn} + */ +const LOAD_HASHER = (codeOrName) => Promise.reject(new Error(`No hasher found for "${codeOrName}"`)) + +class Multihashes { + /** + * @param {object} options + * @param {LoadHasherFn} [options.loadHasher] + * @param {MultihashHasher[]} options.hashers + */ + constructor (options) { + // Object with current list of active hashers + /** @type {Record}} */ + this._hashersByName = {} + + // Object with current list of active hashers + /** @type {Record}} */ + this._hashersByCode = {} + + this._loadHasher = options.loadHasher || LOAD_HASHER + + // Enable all supplied hashers + for (const hasher of options.hashers) { + this.addHasher(hasher) + } + } + + /** + * Add support for a multibase hasher + * + * @param {MultihashHasher} hasher + */ + addHasher (hasher) { + if (this._hashersByName[hasher.name] || this._hashersByCode[hasher.code]) { + throw new Error(`Resolver already exists for codec "${hasher.name}"`) + } + + this._hashersByName[hasher.name] = hasher + this._hashersByCode[hasher.code] = hasher + } + + /** + * Remove support for a multibase hasher + * + * @param {MultihashHasher} hasher + */ + removeHasher (hasher) { + delete this._hashersByName[hasher.name] + delete this._hashersByCode[hasher.code] + } + + /** + * @param {number | string} code + */ + async getHasher (code) { + const table = typeof code === 'string' ? this._hashersByName : this._hashersByCode + + if (table[code]) { + return table[code] + } + + // If not supported, attempt to dynamically load this hasher + const hasher = await this._loadHasher(code) + + if (table[code] == null) { + this.addHasher(hasher) + } + + return hasher + } + + listHashers () { + return Object.values(this._hashersByName) + } +} + +module.exports = Multihashes diff --git a/packages/ipfs-core-utils/src/pins/normalise-input.js b/packages/ipfs-core-utils/src/pins/normalise-input.js index e7dc21dd24..cc7b9765d6 100644 --- a/packages/ipfs-core-utils/src/pins/normalise-input.js +++ b/packages/ipfs-core-utils/src/pins/normalise-input.js @@ -1,7 +1,7 @@ 'use strict' const errCode = require('err-code') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @typedef {Object} Pinnable @@ -52,8 +52,8 @@ module.exports = async function * normaliseInput (input) { throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') } - // CID|String - if (CID.isCID(input)) { + // CID + if (input instanceof CID) { yield toPin({ cid: input }) return } @@ -78,7 +78,7 @@ module.exports = async function * normaliseInput (input) { if (first.done) return iterator // Iterable - if (CID.isCID(first.value) || first.value instanceof String || typeof first.value === 'string') { + if (first.value instanceof CID || first.value instanceof String || typeof first.value === 'string') { yield toPin({ cid: first.value }) for (const cid of iterator) { yield toPin({ cid }) @@ -106,7 +106,7 @@ module.exports = async function * normaliseInput (input) { if (first.done) return iterator // AsyncIterable - if (CID.isCID(first.value) || first.value instanceof String || typeof first.value === 'string') { + if (first.value instanceof CID || first.value instanceof String || typeof first.value === 'string') { yield toPin({ cid: first.value }) for await (const cid of iterator) { yield toPin({ cid }) diff --git a/packages/ipfs-core-utils/src/to-cid-and-path.js b/packages/ipfs-core-utils/src/to-cid-and-path.js index 9b4bdb23d3..33736c5f9d 100644 --- a/packages/ipfs-core-utils/src/to-cid-and-path.js +++ b/packages/ipfs-core-utils/src/to-cid-and-path.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const errCode = require('err-code') const IPFS_PREFIX = '/ipfs/' @@ -12,13 +12,13 @@ const IPFS_PREFIX = '/ipfs/' const toCidAndPath = (string) => { if (string instanceof Uint8Array) { try { - string = new CID(string) + string = CID.decode(string) } catch (err) { throw errCode(err, 'ERR_INVALID_CID') } } - if (CID.isCID(string)) { + if (string instanceof CID) { return { cid: string, path: undefined @@ -34,7 +34,7 @@ const toCidAndPath = (string) => { let path try { - cid = new CID(/** @type {string} */(parts.shift())) + cid = CID.parse(parts.shift() || '') } catch (err) { throw errCode(err, 'ERR_INVALID_CID') } diff --git a/packages/ipfs-core-utils/src/types.d.ts b/packages/ipfs-core-utils/src/types.d.ts new file mode 100644 index 0000000000..212d970062 --- /dev/null +++ b/packages/ipfs-core-utils/src/types.d.ts @@ -0,0 +1,7 @@ +import { MultibaseCodec } from 'multiformats/bases/interface' +import { BlockCodec } from 'multiformats/codecs/interface' +import { MultihashHasher } from 'multiformats/hashes/interface' + +export interface LoadBaseFn { (codeOrName: string): Promise> } +export interface LoadCodecFn { (codeOrName: number | string): Promise> } +export interface LoadHasherFn { (codeOrName: number | string): Promise } diff --git a/packages/ipfs-core-utils/test/pins/normalise-input.spec.js b/packages/ipfs-core-utils/test/pins/normalise-input.spec.js index 04e05d605d..541d69aac9 100644 --- a/packages/ipfs-core-utils/test/pins/normalise-input.spec.js +++ b/packages/ipfs-core-utils/test/pins/normalise-input.spec.js @@ -5,11 +5,11 @@ const { expect } = require('aegir/utils/chai') const normalise = require('../../src/pins/normalise-input') const all = require('it-all') -const CID = require('cids') +const { CID } = require('multiformats/cid') const STRING = () => '/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn/path/to/file.txt' -const PLAIN_CID = () => new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') -const OBJECT_CID = () => ({ cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), recursive: true, metadata: { key: 'hello world' } }) +const PLAIN_CID = () => CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') +const OBJECT_CID = () => ({ cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), recursive: true, metadata: { key: 'hello world' } }) const OBJECT_PATH = () => ({ path: '/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn/path/to/file.txt', recursive: true, metadata: { key: 'hello world' } }) async function verifyNormalisation (input, withOptions) { diff --git a/packages/ipfs-core-utils/tsconfig.json b/packages/ipfs-core-utils/tsconfig.json index 5fe8ea40d7..03031d1601 100644 --- a/packages/ipfs-core-utils/tsconfig.json +++ b/packages/ipfs-core-utils/tsconfig.json @@ -5,5 +5,10 @@ }, "include": [ "src" + ], + "references": [ + { + "path": "../ipfs-core-types" + } ] } diff --git a/packages/ipfs-core/.aegir.js b/packages/ipfs-core/.aegir.js index 29d5f7c6de..0f6f1b5561 100644 --- a/packages/ipfs-core/.aegir.js +++ b/packages/ipfs-core/.aegir.js @@ -70,7 +70,7 @@ module.exports = { } }, build: { - bundlesizeMax: '500KB', + bundlesizeMax: '477KB', config: esbuild } } diff --git a/packages/ipfs-core/package.json b/packages/ipfs-core/package.json index 32bd86105d..33e129eb48 100644 --- a/packages/ipfs-core/package.json +++ b/packages/ipfs-core/package.json @@ -24,7 +24,6 @@ "./src/runtime/libp2p-pubsub-routers-nodejs.js": "./src/runtime/libp2p-pubsub-routers-browser.js", "./src/runtime/preload-nodejs.js": "./src/runtime/preload-browser.js", "./src/runtime/repo-nodejs.js": "./src/runtime/repo-browser.js", - "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", "ipfs-utils/src/files/glob-source": false }, "typesVersions": { @@ -57,36 +56,34 @@ "dep-check": "aegir dep-check -i interface-ipfs-core -i ipfs-core-types -i abort-controller" }, "dependencies": { + "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-pb": "^2.1.3", "abort-controller": "^3.0.0", "array-shuffle": "^2.0.0", - "cborg": "^1.2.1", - "cids": "^1.1.6", - "dag-cbor-links": "^2.0.0", - "datastore-core": "^4.0.0", - "datastore-pubsub": "^0.6.1", + "blockstore-datastore-adapter": "1.0.0", + "datastore-core": "^5.0.0", + "datastore-fs": "^5.0.1", + "datastore-level": "^6.0.1", + "datastore-pubsub": "^0.7.0", "debug": "^4.1.1", "dlv": "^1.1.3", "err-code": "^3.0.1", "hamt-sharding": "^2.0.0", "hashlru": "^2.3.0", - "interface-datastore": "^4.0.0", - "ipfs-bitswap": "^5.0.3", - "ipfs-block-service": "^0.19.0", + "interface-blockstore": "^1.0.0", + "interface-datastore": "^5.0.0", + "ipfs-bitswap": "^6.0.0", "ipfs-core-types": "^0.5.2", "ipfs-core-utils": "^0.8.3", - "ipfs-repo": "^9.1.6", - "ipfs-unixfs": "^4.0.3", - "ipfs-unixfs-exporter": "^5.0.3", - "ipfs-unixfs-importer": "^7.0.3", - "ipfs-utils": "^8.1.2", - "ipld": "^0.30.0", - "ipld-block": "^0.11.0", - "ipld-dag-cbor": "^1.0.0", - "ipld-dag-pb": "^0.22.1", - "ipld-raw": "^7.0.0", - "ipns": "^0.12.0", + "ipfs-http-client": "^50.1.2", + "ipfs-repo": "^11.0.0", + "ipfs-unixfs": "^5.0.0", + "ipfs-unixfs-exporter": "^6.0.0", + "ipfs-unixfs-importer": "^8.0.0", + "ipfs-utils": "^8.1.4", + "ipns": "^0.13.2", "is-domain-name": "^1.0.1", - "is-ipfs": "^5.0.0", + "is-ipfs": "^6.0.1", "it-all": "^1.0.4", "it-drain": "^1.0.3", "it-first": "^1.0.4", @@ -94,46 +91,46 @@ "it-map": "^1.0.4", "it-pipe": "^1.1.0", "just-safe-set": "^2.2.1", - "libp2p": "^0.31.6", - "libp2p-bootstrap": "^0.12.3", - "libp2p-crypto": "^0.19.3", - "libp2p-floodsub": "^0.25.1", - "libp2p-gossipsub": "^0.9.2", - "libp2p-kad-dht": "^0.22.0", - "libp2p-mdns": "^0.16.0", + "libp2p": "^0.32.0", + "libp2p-bootstrap": "^0.13.0", + "libp2p-crypto": "^0.19.6", + "libp2p-delegated-content-routing": "^0.11.0", + "libp2p-delegated-peer-routing": "^0.10.0", + "libp2p-floodsub": "^0.27.0", + "libp2p-gossipsub": "^0.11.0", + "libp2p-kad-dht": "^0.23.1", + "libp2p-mdns": "^0.17.0", "libp2p-mplex": "^0.10.2", - "libp2p-noise": "^3.1.0", + "libp2p-noise": "^4.0.0", "libp2p-record": "^0.10.3", - "libp2p-tcp": "^0.15.4", - "libp2p-webrtc-star": "^0.22.2", - "libp2p-websockets": "^0.15.6", - "mafmt": "^9.0.0", + "libp2p-tcp": "^0.17.1", + "libp2p-webrtc-star": "^0.23.0", + "libp2p-websockets": "^0.16.1", + "mafmt": "^10.0.0", "merge-options": "^3.0.4", "mortice": "^2.0.0", - "multiaddr": "^9.0.1", - "multiaddr-to-uri": "^7.0.0", - "multibase": "^4.0.2", - "multicodec": "^3.0.1", - "multihashing-async": "^2.1.2", + "multiaddr": "^10.0.0", + "multiaddr-to-uri": "^8.0.0", + "multiformats": "^9.4.1", "native-abort-controller": "^1.0.3", "p-queue": "^6.6.1", "parse-duration": "^1.0.0", - "peer-id": "^0.14.1", - "streaming-iterables": "^5.0.2", - "uint8arrays": "^2.1.3" + "peer-id": "^0.15.1", + "streaming-iterables": "^6.0.0", + "uint8arrays": "^2.1.6" }, "devDependencies": { "@types/dlv": "^1.1.2", - "aegir": "^33.0.0", + "aegir": "^34.0.2", "delay": "^5.0.0", "go-ipfs": "0.8.0", + "interface-blockstore-tests": "^1.0.0", "interface-ipfs-core": "^0.147.0", - "ipfsd-ctl": "^8.0.1", - "ipld-git": "^0.6.1", + "ipfsd-ctl": "^9.0.0", "iso-url": "^1.0.0", "nanoid": "^3.1.12", "p-defer": "^3.0.0", "rimraf": "^3.0.2", - "sinon": "^10.0.1" + "sinon": "^11.1.1" } } diff --git a/packages/ipfs-core/src/block-storage.js b/packages/ipfs-core/src/block-storage.js new file mode 100644 index 0000000000..02b95073f7 --- /dev/null +++ b/packages/ipfs-core/src/block-storage.js @@ -0,0 +1,150 @@ +'use strict' + +const { BlockstoreAdapter } = require('interface-blockstore') + +/** + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('interface-blockstore').Query} Query + * @typedef {import('interface-blockstore').KeyQuery} KeyQuery + * @typedef {import('multiformats/cid').CID} CID + * @typedef {import('ipfs-bitswap').IPFSBitswap} Bitswap + * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions + * @typedef {import('ipfs-core-types/src/block').RmOptions} RmOptions + */ + +/** + * BlockStorage is a hybrid block datastore. It stores data in a local + * datastore and may retrieve data from a remote Exchange. + * It uses an internal `datastore.Datastore` instance to store values. + * + * @implements {Blockstore} + */ +class BlockStorage extends BlockstoreAdapter { + /** + * Create a new BlockStorage + * + * @param {Blockstore} blockstore + * @param {Bitswap} bitswap + */ + constructor (blockstore, bitswap) { + super() + + this.child = blockstore + this.bitswap = bitswap + } + + open () { + return this.child.open() + } + + close () { + return this.child.close() + } + + unwrap () { + return this.child + } + + /** + * Put a block to the underlying datastore + * + * @param {CID} cid + * @param {Uint8Array} block + * @param {AbortOptions} [options] + */ + async put (cid, block, options = {}) { + if (this.bitswap.isStarted()) { + await this.bitswap.put(cid, block, options) + } else { + await this.child.put(cid, block, options) + } + } + + /** + * Put a multiple blocks to the underlying datastore + * + * @param {AsyncIterable<{ key: CID, value: Uint8Array }> | Iterable<{ key: CID, value: Uint8Array }>} blocks + * @param {AbortOptions} [options] + */ + async * putMany (blocks, options = {}) { + if (this.bitswap.isStarted()) { + yield * this.bitswap.putMany(blocks, options) + } else { + yield * this.child.putMany(blocks, options) + } + } + + /** + * Get a block by cid + * + * @param {CID} cid + * @param {AbortOptions} [options] + */ + async get (cid, options = {}) { + if (this.bitswap.isStarted()) { + return this.bitswap.get(cid, options) + } else { + return this.child.get(cid, options) + } + } + + /** + * Get multiple blocks back from an array of cids + * + * @param {AsyncIterable | Iterable} cids + * @param {AbortOptions} [options] + */ + async * getMany (cids, options = {}) { + if (this.bitswap.isStarted()) { + yield * this.bitswap.getMany(cids, options) + } else { + yield * this.child.getMany(cids, options) + } + } + + /** + * Delete a block from the blockstore + * + * @param {CID} cid + * @param {RmOptions} [options] + */ + async delete (cid, options) { + await this.child.delete(cid, options) + } + + /** + * Delete multiple blocks from the blockstore + * + * @param {AsyncIterable | Iterable} cids + * @param {RmOptions} [options] + */ + async * deleteMany (cids, options) { + yield * this.child.deleteMany(cids, options) + } + + /** + * @param {CID} cid + * @param {AbortOptions} options + */ + async has (cid, options = {}) { + return this.child.has(cid, options) + } + + /** + * @param {Query} q + * @param {AbortOptions} options + */ + async * query (q, options = {}) { + yield * this.child.query(q, options) + } + + /** + * @param {KeyQuery} q + * @param {AbortOptions} options + */ + async * queryKeys (q, options = {}) { + yield * this.child.queryKeys(q, options) + } +} + +module.exports = BlockStorage diff --git a/packages/ipfs-core/src/components/add-all/index.js b/packages/ipfs-core/src/components/add-all/index.js index 2eb11547cc..0ca65c6868 100644 --- a/packages/ipfs-core/src/components/add-all/index.js +++ b/packages/ipfs-core/src/components/add-all/index.js @@ -8,21 +8,19 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) /** - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-unixfs-importer').ImportResult} ImportResult */ /** * @typedef {Object} Context - * @property {import('ipfs-core-types/src/block').API} block - * @property {import('../gc-lock').GCLock} gcLock + * @property {import('ipfs-repo').IPFSRepo} repo * @property {import('../../types').Preload} preload - * @property {import('ipfs-core-types/src/pin').API} pin * @property {import('ipfs-core-types/src/root').ShardingOptions} [options] * * @param {Context} context */ -module.exports = ({ block, gcLock, preload, pin, options }) => { +module.exports = ({ repo, preload, options }) => { const isShardingEnabled = options && options.sharding /** @@ -90,16 +88,16 @@ module.exports = ({ block, gcLock, preload, pin, options }) => { /** * @param {AsyncIterable} source */ - source => importer(source, block, { + source => importer(source, repo.blocks, { ...opts, pin: false }), transformFile(opts), preloadFile(preload, opts), - pinFile(pin, opts) + pinFile(repo, opts) ) - const releaseLock = await gcLock.readLock() + const releaseLock = await repo.gcLock.readLock() try { for await (const added of iterator) { @@ -139,7 +137,7 @@ function transformFile (opts) { yield { path, - cid, + cid: cid, size: file.size, mode: file.unixfs && file.unixfs.mode, mtime: file.unixfs && file.unixfs.mtime @@ -178,10 +176,10 @@ function preloadFile (preload, opts) { } /** - * @param {import('ipfs-core-types/src/pin').API} pin + * @param {import('ipfs-repo').IPFSRepo} repo * @param {import('ipfs-core-types/src/root').AddAllOptions} opts */ -function pinFile (pin, opts) { +function pinFile (repo, opts) { /** * @param {AsyncGenerator} source */ @@ -193,12 +191,7 @@ function pinFile (pin, opts) { const shouldPin = (opts.pin == null ? true : opts.pin) && isRootDir && !opts.onlyHash if (shouldPin) { - // Note: addAsyncIterator() has already taken a GC lock, so tell - // pin.add() not to take a (second) GC lock - await pin.add(file.cid, { - preload: false, - lock: false - }) + await repo.pins.pinRecursively(file.cid) } yield file diff --git a/packages/ipfs-core/src/components/bitswap/index.js b/packages/ipfs-core/src/components/bitswap/index.js index 178b75171e..7a99a3b425 100644 --- a/packages/ipfs-core/src/components/bitswap/index.js +++ b/packages/ipfs-core/src/components/bitswap/index.js @@ -8,7 +8,7 @@ const createStat = require('./stat') /** * @typedef {import('../../types').NetworkService} NetworkService * @typedef {import('peer-id')} PeerId - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions */ diff --git a/packages/ipfs-core/src/components/bitswap/stat.js b/packages/ipfs-core/src/components/bitswap/stat.js index 5301c41005..7cc76c37f3 100644 --- a/packages/ipfs-core/src/components/bitswap/stat.js +++ b/packages/ipfs-core/src/components/bitswap/stat.js @@ -11,7 +11,7 @@ module.exports = ({ network }) => { * @type {import('ipfs-core-types/src/bitswap').API["stat"]} */ async function stat (options = {}) { - /** @type {import('ipfs-bitswap')} */ + /** @type {import('ipfs-bitswap').IPFSBitswap} */ const bitswap = (await network.use(options)).bitswap const snapshot = bitswap.stat().snapshot diff --git a/packages/ipfs-core/src/components/bitswap/unwant.js b/packages/ipfs-core/src/components/bitswap/unwant.js index e86306c940..bcee1a08c5 100644 --- a/packages/ipfs-core/src/components/bitswap/unwant.js +++ b/packages/ipfs-core/src/components/bitswap/unwant.js @@ -1,7 +1,5 @@ 'use strict' -const CID = require('cids') -const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** @@ -19,12 +17,6 @@ module.exports = ({ network }) => { cids = [cids] } - try { - cids = cids.map((cid) => new CID(cid)) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - return bitswap.unwant(cids) } diff --git a/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js b/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js index d22db8031f..dcefc4fee1 100644 --- a/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js +++ b/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js @@ -13,7 +13,7 @@ module.exports = ({ network }) => { */ async function wantlistForPeer (peerId, options = {}) { const { bitswap } = await network.use(options) - const list = bitswap.wantlistForPeer(PeerId.createFromCID(peerId), options) + const list = bitswap.wantlistForPeer(PeerId.createFromB58String(peerId)) return Array.from(list).map(e => e[1].cid) } diff --git a/packages/ipfs-core/src/components/block/get.js b/packages/ipfs-core/src/components/block/get.js index 772e15f76f..02071e33d7 100644 --- a/packages/ipfs-core/src/components/block/get.js +++ b/packages/ipfs-core/src/components/block/get.js @@ -1,25 +1,22 @@ 'use strict' -const { cleanCid } = require('./utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-block-service')} config.blockService * @param {import('../../types').Preload} config.preload + * @param {import('ipfs-repo').IPFSRepo} config.repo */ -module.exports = ({ blockService, preload }) => { +module.exports = ({ preload, repo }) => { /** * @type {import('ipfs-core-types/src/block').API["get"]} */ async function get (cid, options = {}) { // eslint-disable-line require-await - cid = cleanCid(cid) - if (options.preload !== false) { preload(cid) } - return blockService.get(cid, options) + return repo.blocks.get(cid, options) } return withTimeoutOption(get) diff --git a/packages/ipfs-core/src/components/block/index.js b/packages/ipfs-core/src/components/block/index.js index fd11eaf27c..8fa9243e42 100644 --- a/packages/ipfs-core/src/components/block/index.js +++ b/packages/ipfs-core/src/components/block/index.js @@ -7,26 +7,21 @@ const createStat = require('./stat') /** * @typedef {import('../../types').Preload} Preload - * @typedef {import('ipfs-block-service')} BlockService - * @typedef {import('../gc-lock').GCLock} GCLock - * @typedef {import('ipfs-core-types/src/pin').API} Pin - * @typedef {import('../pin/pin-manager')} PinManager */ class BlockAPI { /** * @param {Object} config + * @param {import('ipfs-core-utils/src/multihashes')} config.hashers + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs * @param {Preload} config.preload - * @param {BlockService} config.blockService - * @param {GCLock} config.gcLock - * @param {Pin} config.pin - * @param {PinManager} config.pinManager + * @param {import('ipfs-repo').IPFSRepo} config.repo */ - constructor ({ blockService, preload, gcLock, pinManager, pin }) { - this.get = createGet({ blockService, preload }) - this.put = createPut({ blockService, preload, gcLock, pin }) - this.rm = createRm({ blockService, gcLock, pinManager }) - this.stat = createStat({ blockService, preload }) + constructor ({ codecs, hashers, preload, repo }) { + this.get = createGet({ preload, repo }) + this.put = createPut({ codecs, hashers, preload, repo }) + this.rm = createRm({ repo }) + this.stat = createStat({ preload, repo }) } } diff --git a/packages/ipfs-core/src/components/block/put.js b/packages/ipfs-core/src/components/block/put.js index 77002b2cb0..d18abc84d0 100644 --- a/packages/ipfs-core/src/components/block/put.js +++ b/packages/ipfs-core/src/components/block/put.js @@ -1,81 +1,55 @@ 'use strict' -const Block = require('ipld-block') -const multihashing = require('multihashing-async') -const CID = require('cids') -const isIPFS = require('is-ipfs') +const { CID } = require('multiformats/cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion */ /** * @param {Object} config - * @param {import('ipfs-block-service')} config.blockService - * @param {import('ipfs-core-types/src/pin').API} config.pin - * @param {import('.').GCLock} config.gcLock + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-core-utils/src/multihashes')} config.hashers + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload + * */ -module.exports = ({ blockService, pin, gcLock, preload }) => { +module.exports = ({ codecs, hashers, repo, preload }) => { /** * @type {import('ipfs-core-types/src/block').API["put"]} */ async function put (block, options = {}) { - if (Array.isArray(block)) { - throw new Error('Array is not supported') - } - - if (!Block.isBlock(block)) { - /** @type {Uint8Array} */ - const bytes = (block) - if (options.cid && isIPFS.cid(options.cid)) { - const cid = CID.isCID(options.cid) ? options.cid : new CID(options.cid) - block = new Block(bytes, cid) - } else { - const mhtype = options.mhtype || 'sha2-256' - const format = options.format || 'dag-pb' - - /** @type {CIDVersion} */ - let cidVersion = 1 - - if (options.version == null) { - // Pick appropriate CID version - cidVersion = mhtype === 'sha2-256' && format === 'dag-pb' ? 0 : 1 - } else { - // @ts-ignore - options.version is a {number} but the CID constructor arg version is a {0|1} - // TODO: https://github.com/multiformats/js-cid/pull/129 - cidVersion = options.version - } - - const multihash = await multihashing(bytes, mhtype) - const cid = new CID(cidVersion, format, multihash) + const release = options.pin ? await repo.gcLock.readLock() : null - block = new Block(bytes, cid) - } - } + try { + const cidVersion = options.version != null ? options.version : 0 + const codecName = options.format || (cidVersion === 0 ? 'dag-pb' : 'raw') - const release = await gcLock.readLock() + const hasher = await hashers.getHasher(options.mhtype || 'sha2-256') + const hash = await hasher.digest(block) + const codec = await codecs.getCodec(codecName) + const cid = CID.create(cidVersion, codec.code, hash) - try { - await blockService.put(block, { + await repo.blocks.put(cid, block, { signal: options.signal }) if (options.preload !== false) { - preload(block.cid) + preload(cid) } if (options.pin === true) { - await pin.add(block.cid, { - recursive: true, + await repo.pins.pinRecursively(cid, { signal: options.signal }) } - return block + return cid } finally { - release() + if (release) { + release() + } } } diff --git a/packages/ipfs-core/src/components/block/rm.js b/packages/ipfs-core/src/components/block/rm.js index 3ac995c375..89041a0f68 100644 --- a/packages/ipfs-core/src/components/block/rm.js +++ b/packages/ipfs-core/src/components/block/rm.js @@ -1,10 +1,8 @@ 'use strict' -const CID = require('cids') const errCode = require('err-code') const { parallelMap, filter } = require('streaming-iterables') const { pipe } = require('it-pipe') -const { PinTypes } = require('../pin/pin-manager') const { cleanCid } = require('./utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') @@ -12,11 +10,9 @@ const BLOCK_RM_CONCURRENCY = 8 /** * @param {Object} config - * @param {import('ipfs-block-service')} config.blockService - * @param {import('../pin/pin-manager')} config.pinManager - * @param {import('.').GCLock} config.gcLock + * @param {import('ipfs-repo').IPFSRepo} config.repo */ -module.exports = ({ blockService, gcLock, pinManager }) => { +module.exports = ({ repo }) => { /** * @type {import('ipfs-core-types/src/block').API["rm"]} */ @@ -27,7 +23,7 @@ module.exports = ({ blockService, gcLock, pinManager }) => { // We need to take a write lock here to ensure that adding and removing // blocks are exclusive operations - const release = await gcLock.writeLock() + const release = await repo.gcLock.writeLock() try { yield * pipe( @@ -39,25 +35,13 @@ module.exports = ({ blockService, gcLock, pinManager }) => { const result = { cid } try { - const pinResult = await pinManager.isPinnedWithType(cid, PinTypes.all) - - if (pinResult.pinned) { - if (CID.isCID(pinResult.reason)) { // eslint-disable-line max-depth - throw errCode(new Error(`pinned via ${pinResult.reason}`), 'ERR_BLOCK_PINNED') - } - - throw errCode(new Error(`pinned: ${pinResult.reason}`), 'ERR_BLOCK_PINNED') - } - - // remove has check when https://github.com/ipfs/js-ipfs-block-service/pull/88 is merged - // @ts-ignore - this accesses some internals - const has = await blockService._repo.blocks.has(cid) + const has = await repo.blocks.has(cid) if (!has) { throw errCode(new Error('block not found'), 'ERR_BLOCK_NOT_FOUND') } - await blockService.delete(cid) + await repo.blocks.delete(cid) } catch (err) { if (!options.force) { err.message = `cannot remove ${cid}: ${err.message}` diff --git a/packages/ipfs-core/src/components/block/stat.js b/packages/ipfs-core/src/components/block/stat.js index 191479ccb7..938099b160 100644 --- a/packages/ipfs-core/src/components/block/stat.js +++ b/packages/ipfs-core/src/components/block/stat.js @@ -5,11 +5,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-block-service')} config.blockService + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ blockService, preload }) => { +module.exports = ({ repo, preload }) => { /** * @type {import('ipfs-core-types/src/block').API["stat"]} */ @@ -20,9 +20,9 @@ module.exports = ({ blockService, preload }) => { preload(cid) } - const block = await blockService.get(cid) + const block = await repo.blocks.get(cid) - return { cid, size: block.data.length } + return { cid, size: block.length } } return withTimeoutOption(stat) diff --git a/packages/ipfs-core/src/components/block/utils.js b/packages/ipfs-core/src/components/block/utils.js index d384db759f..3495ba3226 100644 --- a/packages/ipfs-core/src/components/block/utils.js +++ b/packages/ipfs-core/src/components/block/utils.js @@ -1,22 +1,23 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const errCode = require('err-code') /** * @param {string|Uint8Array|CID} cid - * @returns {CID} */ exports.cleanCid = cid => { - if (CID.isCID(cid)) { + if (cid instanceof CID) { return cid } - // CID constructor knows how to do the cleaning :) - try { - // @ts-ignore - string|Uint8Array union seems to confuse CID typedefs. - return new CID(cid) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') + if (typeof cid === 'string') { + return CID.parse(cid) } + + if (cid instanceof Uint8Array) { + return CID.decode(cid) + } + + throw errCode(new Error('Invalid CID'), 'ERR_INVALID_CID') } diff --git a/packages/ipfs-core/src/components/bootstrap/add.js b/packages/ipfs-core/src/components/bootstrap/add.js index 47275d179c..15caf6c180 100644 --- a/packages/ipfs-core/src/components/bootstrap/add.js +++ b/packages/ipfs-core/src/components/bootstrap/add.js @@ -5,7 +5,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/bootstrap/clear.js b/packages/ipfs-core/src/components/bootstrap/clear.js index eea3666444..651728a9bb 100644 --- a/packages/ipfs-core/src/components/bootstrap/clear.js +++ b/packages/ipfs-core/src/components/bootstrap/clear.js @@ -5,7 +5,7 @@ const { Multiaddr } = require('multiaddr') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/bootstrap/index.js b/packages/ipfs-core/src/components/bootstrap/index.js index 72a568d9c6..b1411f1a73 100644 --- a/packages/ipfs-core/src/components/bootstrap/index.js +++ b/packages/ipfs-core/src/components/bootstrap/index.js @@ -8,7 +8,7 @@ const createRm = require('./rm') class BootstrapAPI { /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ constructor ({ repo }) { this.add = createAdd({ repo }) diff --git a/packages/ipfs-core/src/components/bootstrap/list.js b/packages/ipfs-core/src/components/bootstrap/list.js index 4501a38aec..7cade65533 100644 --- a/packages/ipfs-core/src/components/bootstrap/list.js +++ b/packages/ipfs-core/src/components/bootstrap/list.js @@ -5,7 +5,7 @@ const { Multiaddr } = require('multiaddr') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/bootstrap/reset.js b/packages/ipfs-core/src/components/bootstrap/reset.js index f8d1a134bd..cefb1695a0 100644 --- a/packages/ipfs-core/src/components/bootstrap/reset.js +++ b/packages/ipfs-core/src/components/bootstrap/reset.js @@ -6,7 +6,7 @@ const { Multiaddr } = require('multiaddr') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/bootstrap/rm.js b/packages/ipfs-core/src/components/bootstrap/rm.js index a326748a04..9117719348 100644 --- a/packages/ipfs-core/src/components/bootstrap/rm.js +++ b/packages/ipfs-core/src/components/bootstrap/rm.js @@ -5,7 +5,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/cat.js b/packages/ipfs-core/src/components/cat.js index 1b491f3fd1..3c01d9cd1e 100644 --- a/packages/ipfs-core/src/components/cat.js +++ b/packages/ipfs-core/src/components/cat.js @@ -3,16 +3,16 @@ const { exporter } = require('ipfs-unixfs-exporter') const { normalizeCidPath } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @typedef {Object} Context - * @property {import('ipld')} ipld + * @property {import('ipfs-repo').IPFSRepo} repo * @property {import('../types').Preload} preload * * @param {Context} context */ -module.exports = function ({ ipld, preload }) { +module.exports = function ({ repo, preload }) { /** * @type {import('ipfs-core-types/src/root').API["cat"]} */ @@ -21,10 +21,10 @@ module.exports = function ({ ipld, preload }) { if (options.preload !== false) { const pathComponents = ipfsPath.split('/') - preload(new CID(pathComponents[0])) + preload(CID.parse(pathComponents[0])) } - const file = await exporter(ipfsPath, ipld, options) + const file = await exporter(ipfsPath, repo.blocks, options) // File may not have unixfs prop if small & imported with rawLeaves true if (file.type === 'directory') { diff --git a/packages/ipfs-core/src/components/config.js b/packages/ipfs-core/src/components/config.js index 6fc3e3455b..38fad9ef1f 100644 --- a/packages/ipfs-core/src/components/config.js +++ b/packages/ipfs-core/src/components/config.js @@ -15,7 +15,7 @@ const log = require('debug')('ipfs:core:config') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { return { diff --git a/packages/ipfs-core/src/components/dag/get.js b/packages/ipfs-core/src/components/dag/get.js index 0ee69cf552..f15ba45383 100644 --- a/packages/ipfs-core/src/components/dag/get.js +++ b/packages/ipfs-core/src/components/dag/get.js @@ -3,13 +3,16 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const first = require('it-first') const last = require('it-last') +const { resolve } = require('../../utils') +const errCode = require('err-code') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ ipld, preload }) => { +module.exports = ({ codecs, repo, preload }) => { /** * @type {import('ipfs-core-types/src/dag').API["get"]} */ @@ -20,15 +23,24 @@ module.exports = ({ ipld, preload }) => { if (options.path) { const entry = options.localResolve - ? await first(ipld.resolve(cid, options.path)) - : await last(ipld.resolve(cid, options.path)) + ? await first(resolve(cid, options.path, codecs, repo, options)) + : await last(resolve(cid, options.path, codecs, repo, options)) /** @type {import('ipfs-core-types/src/dag').GetResult} - first and last will return undefined when empty */ const result = (entry) + + if (!result) { + throw errCode(new Error('Not found'), 'ERR_NOT_FOUND') + } + return result } + const codec = await codecs.getCodec(cid.code) + const block = await repo.blocks.get(cid, options) + const node = codec.decode(block) + return { - value: await ipld.get(cid, options), + value: node, remainderPath: '' } } diff --git a/packages/ipfs-core/src/components/dag/index.js b/packages/ipfs-core/src/components/dag/index.js index 932811bd73..a749082554 100644 --- a/packages/ipfs-core/src/components/dag/index.js +++ b/packages/ipfs-core/src/components/dag/index.js @@ -2,34 +2,20 @@ const createGet = require('./get') const createResolve = require('./resolve') -const createTree = require('./tree') const createPut = require('./put') -/** - * @typedef {Object} ReaderConfig - * @property {IPLD} ipld - * @property {Preload} preload - * - * @typedef {import('ipld')} IPLD - * @typedef {import('../../types').Preload} Preload - * @typedef {import('ipfs-core-types/src/pin').API} Pin - * @typedef {import('../gc-lock').GCLock} GCLock - * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions - */ - class DagAPI { /** * @param {Object} config - * @param {IPLD} config.ipld - * @param {Preload} config.preload - * @param {Pin} config.pin - * @param {GCLock} config.gcLock + * @param {import('ipfs-core-utils/src/multihashes')} config.hashers + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('../../types').Preload} config.preload + * @param {import('ipfs-repo').IPFSRepo} config.repo */ - constructor ({ ipld, pin, preload, gcLock }) { - this.get = createGet({ ipld, preload }) - this.resolve = createResolve({ ipld, preload }) - this.tree = createTree({ ipld, preload }) - this.put = createPut({ ipld, preload, pin, gcLock }) + constructor ({ repo, codecs, hashers, preload }) { + this.get = createGet({ codecs, repo, preload }) + this.resolve = createResolve({ repo, codecs, preload }) + this.put = createPut({ repo, codecs, hashers, preload }) } } diff --git a/packages/ipfs-core/src/components/dag/put.js b/packages/ipfs-core/src/components/dag/put.js index edb33cb1ac..6ee884abaf 100644 --- a/packages/ipfs-core/src/components/dag/put.js +++ b/packages/ipfs-core/src/components/dag/put.js @@ -1,54 +1,47 @@ 'use strict' -const multicodec = require('multicodec') -const multihashes = require('multihashing-async').multihash - -/** - * @typedef {import('cids')} CID - * @typedef {import('cids').CIDVersion} CIDVersion - * @typedef {import('multicodec').CodecCode} CodecCode - * @typedef {import('multicodec').CodecName} CodecName - * @typedef {import('multihashes').HashCode} HashCode - * @typedef {import('multihashes').HashName} HashName - */ -/** - * - * @param {CodecName} name - */ -const nameToCodec = name => multicodec.getCodeFromName(name) -/** - * @param {HashName} name - */ -const nameToHashCode = name => multihashes.names[name] +const { CID } = require('multiformats/cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipld')} config.ipld - * @param {import('ipfs-core-types/src/pin').API} config.pin + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-core-utils/src/multihashes')} config.hashers * @param {import('../../types').Preload} config.preload - * @param {import('.').GCLock} config.gcLock */ -module.exports = ({ ipld, pin, gcLock, preload }) => { +module.exports = ({ repo, codecs, hashers, preload }) => { /** * @type {import('ipfs-core-types/src/dag').API["put"]} */ async function put (dagNode, options = {}) { - const { cidVersion, format, hashAlg } = readEncodingOptions(options) - - const release = options.pin ? await gcLock.readLock() : null + const release = options.pin ? await repo.gcLock.readLock() : null try { - const cid = await ipld.put(dagNode, format, { - hashAlg, - cidVersion, + const codecName = options.format || 'dag-cbor' + const cidVersion = options.version != null ? options.version : (codecName === 'dag-pb' ? 0 : 1) + const codec = await codecs.getCodec(codecName) + + if (!codec) { + throw new Error(`Unknown codec ${options.format}, please configure additional BlockCodecs for this IPFS instance`) + } + + const hasher = await hashers.getHasher(options.hashAlg || 'sha2-256') + + if (!hasher) { + throw new Error(`Unknown hash algorithm ${options.hashAlg}, please configure additional MultihashHashers for this IPFS instance`) + } + + const buf = codec.encode(dagNode) + const hash = await hasher.digest(buf) + const cid = CID.create(cidVersion, codec.code, hash) + + await repo.blocks.put(cid, buf, { signal: options.signal }) if (options.pin) { - await pin.add(cid, { - lock: false - }) + await repo.pins.pinRecursively(cid) } if (options.preload !== false) { @@ -65,66 +58,3 @@ module.exports = ({ ipld, pin, gcLock, preload }) => { return withTimeoutOption(put) } - -/** - * @param {import('ipfs-core-types/src/dag').PutOptions} options - */ -const readEncodingOptions = (options) => { - if (options.cid && (options.format || options.hashAlg)) { - throw new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.') - } else if (((options.format && !options.hashAlg) || (!options.format && options.hashAlg))) { - throw new Error('Can\'t put dag node. Please provide `format` AND `hashAlg` options.') - } - - const { hashAlg, format } = options.cid != null - ? { format: options.cid.code, hashAlg: undefined } - : encodingCodes({ ...defaultCIDOptions, ...options }) - const cidVersion = readVersion({ ...options, format, hashAlg }) - - return { - cidVersion, - format, - hashAlg - } -} - -/** - * - * @param {Object} options - * @param {CodecCode|CodecName} options.format - * @param {HashCode|HashName} [options.hashAlg] - */ -const encodingCodes = ({ format, hashAlg }) => ({ - format: typeof format === 'string' ? nameToCodec(format) : format, - hashAlg: typeof hashAlg === 'string' ? nameToHashCode(hashAlg) : hashAlg -}) - -/** - * Figures out what version of CID should be used given the options. - * - * @param {Object} options - * @param {CIDVersion} [options.version] - * @param {CID} [options.cid] - * @param {CodecCode} [options.format] - * @param {HashCode} [options.hashAlg] - */ -const readVersion = ({ version, cid, format, hashAlg }) => { - // If version is passed just use that. - if (typeof version === 'number') { - return version - // If cid is provided use version field from it. - } else if (cid) { - return cid.version - // If it's dag-pb nodes use version 0 - } else if (format === multicodec.DAG_PB && hashAlg === multicodec.SHA2_256) { - return 0 - } else { - // Otherwise use version 1 - return 1 - } -} - -const defaultCIDOptions = { - format: multicodec.getCodeFromName('dag-cbor'), - hashAlg: multihashes.names['sha2-256'] -} diff --git a/packages/ipfs-core/src/components/dag/resolve.js b/packages/ipfs-core/src/components/dag/resolve.js index f8e0c7855b..b9302e355e 100644 --- a/packages/ipfs-core/src/components/dag/resolve.js +++ b/packages/ipfs-core/src/components/dag/resolve.js @@ -1,66 +1,30 @@ 'use strict' -const CID = require('cids') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') +const { resolvePath } = require('../../utils') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ ipld, preload }) => { +module.exports = ({ repo, codecs, preload }) => { /** * @type {import('ipfs-core-types/src/dag').API["resolve"]} */ - async function resolve (ipfsPath, options = {}) { + async function dagResolve (ipfsPath, options = {}) { const { - cid, - path + cid } = toCidAndPath(ipfsPath) if (options.preload !== false) { preload(cid) } - if (path) { - options.path = path - } - - let lastCid = cid - let lastRemainderPath = options.path || '' - - if (lastRemainderPath.startsWith('/')) { - lastRemainderPath = lastRemainderPath.substring(1) - } - - if (options.path) { - try { - for await (const { value, remainderPath } of ipld.resolve(cid, options.path, { - signal: options.signal - })) { - if (!CID.isCID(value)) { - break - } - - lastRemainderPath = remainderPath - lastCid = value - } - } catch (err) { - // TODO: add error codes to IPLD - if (err.message.startsWith('Object has no property')) { - err.message = `no link named "${lastRemainderPath.split('/')[0]}" under ${lastCid}` - err.code = 'ERR_NO_LINK' - } - throw err - } - } - - return { - cid: lastCid, - remainderPath: lastRemainderPath || '' - } + return resolvePath(repo, codecs, ipfsPath, options) } - return withTimeoutOption(resolve) + return withTimeoutOption(dagResolve) } diff --git a/packages/ipfs-core/src/components/dag/tree.js b/packages/ipfs-core/src/components/dag/tree.js deleted file mode 100644 index d65b627532..0000000000 --- a/packages/ipfs-core/src/components/dag/tree.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') - -/** - * @param {Object} config - * @param {import('ipld')} config.ipld - * @param {import('../../types').Preload} config.preload - */ -module.exports = ({ ipld, preload }) => { - /** - * @type {import('ipfs-core-types/src/dag').API["tree"]} - */ - async function * tree (ipfsPath, options = {}) { // eslint-disable-line require-await - const { - cid, - path - } = toCidAndPath(ipfsPath) - - if (path) { - options.path = path - } - - if (options.preload !== false) { - preload(cid) - } - - yield * ipld.tree(cid, options.path, options) - } - - return withTimeoutOption(tree) -} diff --git a/packages/ipfs-core/src/components/dht.js b/packages/ipfs-core/src/components/dht.js index 426fed203d..1b4a62bc02 100644 --- a/packages/ipfs-core/src/components/dht.js +++ b/packages/ipfs-core/src/components/dht.js @@ -1,7 +1,7 @@ 'use strict' const PeerId = require('peer-id') -const CID = require('cids') +const { CID } = require('multiformats/cid') const errCode = require('err-code') const { NotEnabledError } = require('../errors') const get = require('dlv') @@ -10,7 +10,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config * @param {import('../types').NetworkService} config.network - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ network, repo }) => { const { get, put, findProvs, findPeer, provide, query } = { @@ -52,7 +52,7 @@ module.exports = ({ network, repo }) => { */ async findPeer (peerId, options) { const { libp2p } = await use(network, options) - const peer = await libp2p._dht.findPeer(PeerId.createFromCID(peerId)) + const peer = await libp2p._dht.findPeer(PeerId.createFromB58String(peerId)) return { id: peer.id.toB58String(), @@ -65,12 +65,13 @@ module.exports = ({ network, repo }) => { */ async * provide (cids, options = { recursive: false }) { const { libp2p } = await use(network, options) - cids = Array.isArray(cids) ? cids : [cids] + /** @type {CID[]} */ + const cidArr = Array.isArray(cids) ? cids : [cids] for (const i in cids) { if (typeof cids[i] === 'string') { try { - cids[i] = new CID(cids[i]) + cids[i] = CID.parse(cids[i]) } catch (err) { throw errCode(err, 'ERR_INVALID_CID') } @@ -78,7 +79,7 @@ module.exports = ({ network, repo }) => { } // ensure blocks are actually local - const hasCids = await Promise.all(cids.map(cid => repo.blocks.has(cid))) + const hasCids = await Promise.all(cidArr.map(cid => repo.blocks.has(cid))) const hasAll = hasCids.every(has => has) if (!hasAll) { @@ -101,7 +102,7 @@ module.exports = ({ network, repo }) => { async * query (peerId, options) { const { libp2p } = await use(network, options) - for await (const closerPeerId of libp2p._dht.getClosestPeers(PeerId.createFromCID(peerId).toBytes())) { + for await (const closerPeerId of libp2p._dht.getClosestPeers(PeerId.createFromB58String(peerId).toBytes())) { yield { id: closerPeerId.toB58String(), addrs: [] // TODO: get addrs? @@ -132,7 +133,7 @@ const parseCID = cid => { const cidStr = cid.toString().split('/') .filter((/** @type {string} */ part) => part && part !== 'ipfs' && part !== 'ipns')[0] - return (new CID(cidStr)).bytes + return CID.parse(cidStr).bytes } catch (error) { throw errCode(error, 'ERR_INVALID_CID') } diff --git a/packages/ipfs-core/src/components/files/chmod.js b/packages/ipfs-core/src/components/files/chmod.js index 3bc09e4820..92be83bedf 100644 --- a/packages/ipfs-core/src/components/files/chmod.js +++ b/packages/ipfs-core/src/components/files/chmod.js @@ -9,9 +9,8 @@ const toTrail = require('./utils/to-trail') const addLink = require('./utils/add-link') const updateTree = require('./utils/update-tree') const updateMfsRoot = require('./utils/update-mfs-root') -const { DAGNode } = require('ipld-dag-pb') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash +const dagPb = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') const { pipe } = require('it-pipe') const { importer } = require('ipfs-unixfs-importer') const { recursive } = require('ipfs-unixfs-exporter') @@ -23,12 +22,13 @@ const persist = require('ipfs-unixfs-importer/src/utils/persist') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion + * @typedef {import('@ipld/dag-pb').PBNode} PBNode * @typedef {import('./').MfsContext} MfsContext + * * @typedef {object} DefaultOptions * @property {boolean} flush - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {CIDVersion} cidVersion * @property {number} shardSplitThreshold * @property {boolean} recursive @@ -228,7 +228,7 @@ module.exports = (context) => { name } = await toMfsPath(context, path, opts) - if (cid.codec !== 'dag-pb') { + if (cid.code !== dagPb.code) { throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') } @@ -237,14 +237,17 @@ module.exports = (context) => { // but do not reimport files, only manipulate dag-pb nodes const root = await pipe( async function * () { - for await (const entry of recursive(cid, context.ipld)) { + for await (const entry of recursive(cid, context.repo.blocks)) { if (entry.type !== 'file' && entry.type !== 'directory') { throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') } entry.unixfs.mode = calculateMode(mode, entry.unixfs) - const node = new DAGNode(entry.unixfs.marshal(), entry.node.Links) + const node = dagPb.prepare({ + Data: entry.unixfs.marshal(), + Links: entry.node.Links + }) yield { path: entry.path, @@ -252,19 +255,24 @@ module.exports = (context) => { } } }, - // @ts-ignore source is not compatible because we are not importing files - (source) => importer(source, context.block, { + // @ts-ignore we account for the incompatible source type with our custom dag builder below + (source) => importer(source, context.repo.blocks, { ...opts, pin: false, dagBuilder: async function * (source, block, opts) { for await (const entry of source) { yield async function () { - /** @type {DAGNode} */ + /** @type {PBNode} */ // @ts-ignore - cannot derive type const node = entry.content - const buf = node.serialize() + const buf = dagPb.encode(node) const cid = await persist(buf, block, opts) + + if (!node.Data) { + throw errCode(new Error(`${cid} had no data`), 'ERR_INVALID_NODE') + } + const unixfs = UnixFS.unmarshal(node.Data) return { @@ -293,31 +301,43 @@ module.exports = (context) => { return } - let node = await context.ipld.get(cid) + const block = await context.repo.blocks.get(cid) + const node = dagPb.decode(block) + + if (!node.Data) { + throw errCode(new Error(`${cid} had no data`), 'ERR_INVALID_NODE') + } + const metadata = UnixFS.unmarshal(node.Data) metadata.mode = calculateMode(mode, metadata) - node = new DAGNode(metadata.marshal(), node.Links) + const updatedBlock = dagPb.encode({ + Data: metadata.marshal(), + Links: node.Links + }) - /** @type {HashName} */ const hashAlg = opts.hashAlg || defaultOptions.hashAlg + const hasher = await context.hashers.getHasher(hashAlg) + const hash = await hasher.digest(updatedBlock) + const updatedCid = CID.create(opts.cidVersion, dagPb.code, hash) - const updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: cid.version, - hashAlg: mh.names[hashAlg], - onlyHash: !opts.flush - }) + if (opts.flush) { + await context.repo.blocks.put(updatedCid, updatedBlock) + } const trail = await toTrail(context, mfsDirectory) const parent = trail[trail.length - 1] - const parentNode = await context.ipld.get(parent.cid) + const parentCid = CID.decode(parent.cid.bytes) + const parentBlock = await context.repo.blocks.get(parentCid) + const parentNode = dagPb.decode(parentBlock) const result = await addLink(context, { parent: parentNode, name: name, cid: updatedCid, - size: node.serialize().length, + size: updatedBlock.length, flush: opts.flush, - hashAlg: hashAlg, + // TODO vmx 2021-03-29: decide on the API, whether it should be a `hashAlg` or `hasher` + hashAlg, cidVersion: cid.version, shardSplitThreshold: Infinity }) diff --git a/packages/ipfs-core/src/components/files/cp.js b/packages/ipfs-core/src/components/files/cp.js index 64a4e6561a..4549fc9736 100644 --- a/packages/ipfs-core/src/components/files/cp.js +++ b/packages/ipfs-core/src/components/files/cp.js @@ -13,10 +13,9 @@ const toTrail = require('./utils/to-trail') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids')} CID - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('@ipld/dag-pb').PBNode} DAGNode + * @typedef {import('multiformats/cid').CID} CID + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('ipfs-unixfs').Mtime} Mtime * @typedef {import('./utils/to-mfs-path').MfsPath} MfsPath * @typedef {import('./utils/to-trail').MfsTrail} MfsTrail @@ -24,7 +23,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') * @typedef {object} DefaultOptions * @property {boolean} parents * @property {boolean} flush - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {CIDVersion} cidVersion * @property {number} shardSplitThreshold * @property {AbortSignal} [signal] @@ -58,7 +57,7 @@ module.exports = (context) => { } const sources = await Promise.all( - from.map(path => toMfsPath(context, path, options)) + from.map((/** @type {CID | string} */ path) => toMfsPath(context, path, options)) ) let destination = await toMfsPath(context, to, options) @@ -78,6 +77,7 @@ module.exports = (context) => { if (destination.exists) { log('Destination exists') + // @ts-ignore ts seems to think `sources` will always have a length of 10 if (sources.length === 1 && !destinationIsDirectory) { throw errCode(new Error('directory already has entry by that name'), 'ERR_ALREADY_EXISTS') } @@ -116,6 +116,7 @@ module.exports = (context) => { const destinationPath = isDirectory(destination) ? destination.mfsPath : destination.mfsDirectory const trail = await toTrail(context, destinationPath) + // @ts-ignore ts seems to think `sources` will always have a length of 10 if (sources.length === 1) { const source = sources.pop() @@ -205,13 +206,12 @@ const copyToDirectory = async (context, sources, destination, destinationTrail, */ const addSourceToParent = async (context, source, childName, parent, options) => { const sourceBlock = await context.repo.blocks.get(source.cid) - const { node, cid } = await addLink(context, { parentCid: parent.cid, - size: sourceBlock.data.length, + size: sourceBlock.length, cid: source.cid, name: childName, hashAlg: options.hashAlg, diff --git a/packages/ipfs-core/src/components/files/index.js b/packages/ipfs-core/src/components/files/index.js index 46898d4b83..38a24ababa 100644 --- a/packages/ipfs-core/src/components/files/index.js +++ b/packages/ipfs-core/src/components/files/index.js @@ -4,10 +4,13 @@ const createLock = require('./utils/create-lock') const isIpfs = require('is-ipfs') /** + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * @typedef {import('ipfs-core-utils/src/multihashes')} Multihashes + * @typedef {import('ipfs-repo').IPFSRepo} IPFSRepo + * * @typedef {object} MfsContext - * @property {import('ipld')} ipld - * @property {import('ipfs-repo')} repo - * @property {import('ipfs-core-types/src/block').API} block + * @property {IPFSRepo} repo + * @property {Multihashes} hashers */ /** @@ -47,7 +50,7 @@ const unwrappedOperations = { /** * @param {object} arg - * @param {*} arg.options + * @param {MfsContext} arg.options * @param {*} arg.mfs * @param {*} arg.operations * @param {*} arg.lock @@ -62,23 +65,20 @@ const wrap = ({ const defaultOptions = { repoOwner: true, - ipld: null, repo: null } /** - * @param {*} options + * @param {object} options + * @param {IPFSRepo} options.repo + * @param {boolean} options.repoOwner + * @param {Multihashes} options.hashers */ function createMfs (options) { const { repoOwner } = Object.assign({}, defaultOptions || {}, options) - options.repo = { - blocks: options.blocks, - datastore: options.datastore - } - const lock = createLock(repoOwner) /** @@ -114,21 +114,17 @@ function createMfs (options) { /** * @param {object} context - * @param {import('ipld')} context.ipld - * @param {import('ipfs-core-types/src/block').API} context.block - * @param {import('ipfs-block-service')} context.blockService - * @param {import('ipfs-repo')} context.repo + * @param {IPFSRepo} context.repo * @param {import('../../types').Preload} context.preload * @param {import('..').Options} context.options + * @param {Multihashes} context.hashers * @returns {import('ipfs-core-types/src/files').API} */ -module.exports = ({ ipld, block, blockService, repo, preload, options: constructorOptions }) => { +module.exports = ({ repo, preload, hashers, options: constructorOptions }) => { const methods = createMfs({ - ipld, - block, - blocks: blockService, - datastore: repo.root, - repoOwner: constructorOptions.repoOwner + repo, + repoOwner: Boolean(constructorOptions.repoOwner), + hashers }) /** diff --git a/packages/ipfs-core/src/components/files/ls.js b/packages/ipfs-core/src/components/files/ls.js index c0f6db2515..245cf6bf69 100644 --- a/packages/ipfs-core/src/components/files/ls.js +++ b/packages/ipfs-core/src/components/files/ls.js @@ -39,7 +39,7 @@ module.exports = (context) => { */ async function * mfsLs (path, options = {}) { const mfsPath = await toMfsPath(context, path, options) - const fsEntry = await exporter(mfsPath.mfsPath, context.ipld) + const fsEntry = await exporter(mfsPath.mfsPath, context.repo.blocks) // directory, perhaps sharded if (fsEntry.type === 'directory') { diff --git a/packages/ipfs-core/src/components/files/mkdir.js b/packages/ipfs-core/src/components/files/mkdir.js index dbc8c6b48a..999e1ae60b 100644 --- a/packages/ipfs-core/src/components/files/mkdir.js +++ b/packages/ipfs-core/src/components/files/mkdir.js @@ -13,16 +13,14 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('ipld-dag-pb').DAGLink} DAGLink - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids')} CID - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('multiformats/cid').CID} CID + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('ipfs-unixfs').MtimeLike} MtimeLike * @typedef {import('./').MfsContext} MfsContext * @typedef {object} DefaultOptions * @property {boolean} parents - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {CIDVersion} cidVersion * @property {number} shardSplitThreshold * @property {boolean} flush @@ -91,7 +89,7 @@ module.exports = (context) => { const subPath = `/ipfs/${root}/${subPathComponents.join('/')}` try { - parent = await exporter(subPath, context.ipld) + parent = await exporter(subPath, context.repo.blocks) if (parent.type !== 'file' && parent.type !== 'directory') { throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') @@ -139,8 +137,8 @@ module.exports = (context) => { /** * @param {MfsContext} context * @param {string} childName - * @param {{ cid: CID, node: { size: number }}} emptyDir - * @param {{ cid?: CID, node?: DAGNode }} parent + * @param {{ cid: CID, node?: PBNode }} emptyDir + * @param {{ cid?: CID, node?: PBNode }} parent * @param {{ name: string, cid: CID }[]} trail * @param {DefaultOptions} options */ @@ -150,7 +148,8 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) const result = await addLink(context, { parent: parent.node, parentCid: parent.cid, - size: emptyDir.node.size, + // TODO vmx 2021-03-09: Remove the usage of size completely + size: 0, cid: emptyDir.cid, name: childName, hashAlg: options.hashAlg, diff --git a/packages/ipfs-core/src/components/files/mv.js b/packages/ipfs-core/src/components/files/mv.js index 92844286ab..6d85473dc4 100644 --- a/packages/ipfs-core/src/components/files/mv.js +++ b/packages/ipfs-core/src/components/files/mv.js @@ -6,14 +6,13 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('./').MfsContext} MfsContext * @typedef {object} DefaultOptions * @property {boolean} parents * @property {boolean} flush * @property {CIDVersion} cidVersion - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {number} shardSplitThreshold * @property {AbortSignal} [signal] * @property {number} [timeout] diff --git a/packages/ipfs-core/src/components/files/read.js b/packages/ipfs-core/src/components/files/read.js index eb71aecb09..f73d70e453 100644 --- a/packages/ipfs-core/src/components/files/read.js +++ b/packages/ipfs-core/src/components/files/read.js @@ -37,7 +37,7 @@ module.exports = (context) => { return { [Symbol.asyncIterator]: async function * read () { const mfsPath = await toMfsPath(context, path, options) - const result = await exporter(mfsPath.mfsPath, context.ipld) + const result = await exporter(mfsPath.mfsPath, context.repo.blocks) if (result.type !== 'file') { throw errCode(new Error(`${path} was not a file`), 'ERR_NOT_FILE') diff --git a/packages/ipfs-core/src/components/files/rm.js b/packages/ipfs-core/src/components/files/rm.js index f235feafee..7da8e2e2d8 100644 --- a/packages/ipfs-core/src/components/files/rm.js +++ b/packages/ipfs-core/src/components/files/rm.js @@ -10,13 +10,12 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) /** - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('./').MfsContext} MfsContext * @typedef {object} DefaultOptions * @property {boolean} recursive * @property {CIDVersion} cidVersion - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {boolean} flush * @property {number} shardSplitThreshold * @property {AbortSignal} [signal] diff --git a/packages/ipfs-core/src/components/files/stat.js b/packages/ipfs-core/src/components/files/stat.js index cd38b9d907..51933bb6a5 100644 --- a/packages/ipfs-core/src/components/files/stat.js +++ b/packages/ipfs-core/src/components/files/stat.js @@ -6,6 +6,7 @@ const { exporter } = require('ipfs-unixfs-exporter') const log = require('debug')('ipfs:mfs:stat') const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const dagPb = require('@ipld/dag-pb') /** * @typedef {import('./').MfsContext} MfsContext @@ -49,7 +50,7 @@ module.exports = (context) => { let file try { - file = await exporter(exportPath, context.ipld) + file = await exporter(exportPath, context.repo.blocks) } catch (err) { if (err.code === 'ERR_NOT_FOUND') { throw errCode(new Error(`${path} does not exist`), 'ERR_NOT_FOUND') @@ -59,7 +60,7 @@ module.exports = (context) => { } if (!statters[file.type]) { - throw new Error(`Cannot stat codec ${file.cid.codec}`) + throw new Error(`Cannot stat codec ${file.cid.code}`) } return statters[file.type](file) @@ -94,7 +95,7 @@ const statters = { cid: file.cid, type: 'file', size: file.unixfs.fileSize(), - cumulativeSize: file.node.size, + cumulativeSize: dagPb.encode(file.node).length + (file.node.Links || []).reduce((acc, curr) => acc + (curr.Tsize || 0), 0), blocks: file.unixfs.blockSizes.length, local: undefined, sizeLocal: undefined, @@ -117,7 +118,7 @@ const statters = { cid: file.cid, type: 'directory', size: 0, - cumulativeSize: file.node.size, + cumulativeSize: dagPb.encode(file.node).length + (file.node.Links || []).reduce((acc, curr) => acc + (curr.Tsize || 0), 0), blocks: file.node.Links.length, local: undefined, sizeLocal: undefined, diff --git a/packages/ipfs-core/src/components/files/touch.js b/packages/ipfs-core/src/components/files/touch.js index c194821ba0..c50bbc6306 100644 --- a/packages/ipfs-core/src/components/files/touch.js +++ b/packages/ipfs-core/src/components/files/touch.js @@ -9,21 +9,19 @@ const toTrail = require('./utils/to-trail') const addLink = require('./utils/add-link') const updateTree = require('./utils/update-tree') const updateMfsRoot = require('./utils/update-mfs-root') -const { DAGNode } = require('ipld-dag-pb') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash +const dagPb = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('ipfs-unixfs').MtimeLike} MtimeLike * @typedef {import('./').MfsContext} MfsContext * @typedef {object} DefaultOptions * @property {boolean} flush * @property {number} shardSplitThreshold * @property {CIDVersion} cidVersion - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {MtimeLike} [mtime] * @property {AbortSignal} [signal] * @property {number} [timeout] @@ -60,7 +58,10 @@ module.exports = (context) => { exists } = await toMfsPath(context, path, settings) - let node + const hashAlg = options.hashAlg || defaultOptions.hashAlg + const hasher = await context.hashers.getHasher(hashAlg) + + let updatedBlock let updatedCid let cidVersion = settings.cidVersion @@ -71,47 +72,61 @@ module.exports = (context) => { // @ts-ignore TODO: restore hrtime support to ipfs-unixfs constructor - it's in the code, just not the signature mtime: settings.mtime }) - node = new DAGNode(metadata.marshal()) - updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: settings.cidVersion, - hashAlg: mh.names['sha2-256'], - onlyHash: !settings.flush - }) + updatedBlock = dagPb.encode({ Data: metadata.marshal(), Links: [] }) + + const hash = await hasher.digest(updatedBlock) + + updatedCid = CID.create(settings.cidVersion, dagPb.code, hash) + + if (settings.flush) { + await context.repo.blocks.put(updatedCid, updatedBlock) + } } else { - if (cid.codec !== 'dag-pb') { + if (cid.code !== dagPb.code) { throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') } cidVersion = cid.version - node = await context.ipld.get(cid) + const block = await context.repo.blocks.get(cid) + const node = dagPb.decode(block) + + if (!node.Data) { + throw errCode(new Error(`${path} had no data`), 'ERR_INVALID_NODE') + } const metadata = UnixFS.unmarshal(node.Data) // @ts-ignore TODO: restore setting all date types as mtime - it's in the code, just not the signature metadata.mtime = settings.mtime - node = new DAGNode(metadata.marshal(), node.Links) - - updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: cid.version, - hashAlg: mh.names['sha2-256'], - onlyHash: !settings.flush + updatedBlock = dagPb.encode({ + Data: metadata.marshal(), + Links: node.Links }) + + const hash = await hasher.digest(updatedBlock) + updatedCid = CID.create(settings.cidVersion, dagPb.code, hash) + + if (settings.flush) { + await context.repo.blocks.put(updatedCid, updatedBlock) + } } const trail = await toTrail(context, mfsDirectory) const parent = trail[trail.length - 1] - const parentNode = await context.ipld.get(parent.cid) + const parentCid = parent.cid + const parentBlock = await context.repo.blocks.get(parentCid) + const parentNode = dagPb.decode(parentBlock) const result = await addLink(context, { parent: parentNode, name: name, cid: updatedCid, - size: node.serialize().length, + size: updatedBlock.length, flush: settings.flush, shardSplitThreshold: settings.shardSplitThreshold, - hashAlg: 'sha2-256', + hashAlg: settings.hashAlg, cidVersion }) diff --git a/packages/ipfs-core/src/components/files/utils/add-link.js b/packages/ipfs-core/src/components/files/utils/add-link.js index a316c354e3..a5568466c2 100644 --- a/packages/ipfs-core/src/components/files/utils/add-link.js +++ b/packages/ipfs-core/src/components/files/utils/add-link.js @@ -1,10 +1,9 @@ 'use strict' -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') -const CID = require('cids') +// @ts-ignore +const dagPb = require('@ipld/dag-pb') +const { sha256, sha512 } = require('multiformats/hashes/sha2') +const { CID } = require('multiformats/cid') const log = require('debug')('ipfs:mfs:core:utils:add-link') const { UnixFS } = require('ipfs-unixfs') // @ts-ignore - refactor this to not need deep require @@ -20,16 +19,15 @@ const { addLinksToHamtBucket } = require('./hamt-utils') const errCode = require('err-code') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash const last = require('it-last') /** * @typedef {import('ipfs-unixfs').Mtime} Mtime - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('hamt-sharding').Bucket} Bucket * @typedef {import('../').MfsContext} MfsContext + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ /** @@ -39,22 +37,28 @@ const last = require('it-last') * @param {string} options.name * @param {number} options.size * @param {number} options.shardSplitThreshold - * @param {HashName} options.hashAlg + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush * @param {CID} [options.parentCid] - * @param {DAGNode} [options.parent] + * @param {PBNode} [options.parent] */ const addLink = async (context, options) => { let parent = options.parent if (options.parentCid) { - if (!CID.isCID(options.parentCid)) { + const parentCid = CID.asCID(options.parentCid) + if (parentCid === null) { throw errCode(new Error('Invalid CID passed to addLink'), 'EINVALIDPARENTCID') } - log(`Loading parent node ${options.parentCid}`) - parent = await context.ipld.get(options.parentCid) + if (parentCid.code !== dagPb.code) { + throw errCode(new Error('Unsupported codec. Only DAG-PB is supported'), 'EINVALIDPARENTCID') + } + + log(`Loading parent node ${parentCid}`) + const block = await context.repo.blocks.get(parentCid) + parent = dagPb.decode(block) } if (!parent) { @@ -69,14 +73,14 @@ const addLink = async (context, options) => { throw errCode(new Error('No child name passed to addLink'), 'EINVALIDCHILDNAME') } - if (!CID.isCID(options.cid)) { - options.cid = new CID(options.cid) - } - if (!options.size && options.size !== 0) { throw errCode(new Error('No child size passed to addLink'), 'EINVALIDCHILDSIZE') } + if (!parent.Data) { + throw errCode(new Error('Parent node with no data passed to addLink'), 'ERR_INVALID_PARENT') + } + const meta = UnixFS.unmarshal(parent.Data) if (meta.type === 'hamt-sharded-directory') { @@ -113,8 +117,8 @@ const addLink = async (context, options) => { * @param {CID} options.cid * @param {string} options.name * @param {number} options.size - * @param {DAGNode} options.parent - * @param {HashName} options.hashAlg + * @param {PBNode} options.parent + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush * @param {Mtime} [options.mtime] @@ -122,8 +126,8 @@ const addLink = async (context, options) => { */ const convertToShardedDirectory = async (context, options) => { const result = await createShard(context, options.parent.Links.map(link => ({ - name: link.Name, - size: link.Tsize, + name: (link.Name || ''), + size: link.Tsize || 0, cid: link.Hash })).concat({ name: options.name, @@ -142,19 +146,31 @@ const convertToShardedDirectory = async (context, options) => { * @param {CID} options.cid * @param {string} options.name * @param {number} options.size - * @param {DAGNode} options.parent - * @param {HashName} options.hashAlg + * @param {PBNode} options.parent + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush * @param {Mtime} [options.mtime] * @param {number} [options.mode] */ const addToDirectory = async (context, options) => { - options.parent.rmLink(options.name) - options.parent.addLink(new DAGLink(options.name, options.size, options.cid)) + // Remove existing link if it exists + const parentLinks = options.parent.Links.filter((link) => { + return link.Name !== options.name + }) + parentLinks.push({ + Name: options.name, + Tsize: options.size, + Hash: options.cid + }) + + if (!options.parent.Data) { + throw errCode(new Error('Parent node with no data passed to addToDirectory'), 'ERR_INVALID_PARENT') + } const node = UnixFS.unmarshal(options.parent.Data) + let data if (node.mtime) { // Update mtime if previously set const ms = Date.now() @@ -165,22 +181,29 @@ const addToDirectory = async (context, options) => { nsecs: (ms - (secs * 1000)) * 1000 } - options.parent = new DAGNode(node.marshal(), options.parent.Links) + data = node.marshal() + } else { + data = options.parent.Data } + options.parent = dagPb.prepare({ + Data: data, + Links: parentLinks + }) - const hashAlg = mh.names[options.hashAlg] + // Persist the new parent PbNode + const hasher = await context.hashers.getHasher(options.hashAlg) + const buf = dagPb.encode(options.parent) + const hash = await hasher.digest(buf) + const cid = CID.create(options.cidVersion, dagPb.code, hash) - // Persist the new parent DAGNode - const cid = await context.ipld.put(options.parent, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg, - onlyHash: !options.flush - }) + if (options.flush) { + await context.repo.blocks.put(cid, buf) + } return { node: options.parent, cid, - size: options.parent.size + size: buf.length } } @@ -190,8 +213,8 @@ const addToDirectory = async (context, options) => { * @param {CID} options.cid * @param {string} options.name * @param {number} options.size - * @param {DAGNode} options.parent - * @param {HashName} options.hashAlg + * @param {PBNode} options.parent + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush */ @@ -199,30 +222,27 @@ const addToShardedDirectory = async (context, options) => { const { shard, path } = await addFileToShardedDirectory(context, options) - - const result = await last(shard.flush(context.block)) - /** @type {DAGNode} */ - const node = await context.ipld.get(result.cid) + const result = await last(shard.flush(context.repo.blocks)) + const block = await context.repo.blocks.get(result.cid) + const node = dagPb.decode(block) // we have written out the shard, but only one sub-shard will have been written so replace it in the original shard - const oldLink = options.parent.Links - .find(link => link.Name.substring(0, 2) === path[0].prefix) + const parentLinks = options.parent.Links.filter((link) => { + // TODO vmx 2021-03-31: Check that there cannot be multiple ones matching + // Remove the old link + return (link.Name || '').substring(0, 2) !== path[0].prefix + }) - /** @type {DAGLink | undefined} */ const newLink = node.Links - .find(link => link.Name.substring(0, 2) === path[0].prefix) + .find(link => (link.Name || '').substring(0, 2) === path[0].prefix) if (!newLink) { throw new Error(`No link found with prefix ${path[0].prefix}`) } - if (oldLink) { - options.parent.rmLink(oldLink.Name) - } - - options.parent.addLink(newLink) + parentLinks.push(newLink) - return updateHamtDirectory(context, options.parent.Links, path[0].bucket, options) + return updateHamtDirectory(context, parentLinks, path[0].bucket, options) } /** @@ -231,8 +251,8 @@ const addToShardedDirectory = async (context, options) => { * @param {CID} options.cid * @param {string} options.name * @param {number} options.size - * @param {DAGNode} options.parent - * @param {HashName} options.hashAlg + * @param {PBNode} options.parent + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion */ const addFileToShardedDirectory = async (context, options) => { @@ -242,11 +262,30 @@ const addFileToShardedDirectory = async (context, options) => { size: options.size } + if (!options.parent.Data) { + throw errCode(new Error('Parent node with no data passed to addFileToShardedDirectory'), 'ERR_INVALID_PARENT') + } + // start at the root bucket and descend, loading nodes as we go const rootBucket = await recreateInitialHamtLevel(options.parent.Links) const node = UnixFS.unmarshal(options.parent.Data) const importerOptions = defaultImporterOptions() + // NOTE vmx 2021-04-01: in ipfs the hash algorithm is a constant in unixfs + // it's an implementation. Do the option conversion at the boundary between + // ipfs and unixfs. + let hasher + switch (options.hashAlg) { + case 'sha2-256': + hasher = sha256 + break + case 'sha2-512': + hasher = sha512 + break + default: + throw new Error(`TODO vmx 2021-03-31: Proper error message for unsupported hash algorithms like ${options.hashAlg}`) + } + const shard = new DirSharded({ root: true, dir: true, @@ -260,6 +299,7 @@ const addFileToShardedDirectory = async (context, options) => { hamtHashFn: importerOptions.hamtHashFn, hamtHashCode: importerOptions.hamtHashCode, hamtBucketBits: importerOptions.hamtBucketBits, + hasher, ...options }) shard._bucket = rootBucket @@ -285,7 +325,7 @@ const addFileToShardedDirectory = async (context, options) => { } const link = node.Links - .find(link => link.Name.substring(0, 2) === segment.prefix) + .find(link => (link.Name || '').substring(0, 2) === segment.prefix) if (!link) { // prefix is new, file will be added to the current bucket @@ -303,7 +343,7 @@ const addFileToShardedDirectory = async (context, options) => { break } - if (link.Name.length > 2) { + if ((link.Name || '').length > 2) { // another file had the same prefix, will be replaced with a subshard log(`Link ${link.Name} ${link.Hash} will be replaced with a subshard`) index = path.length @@ -313,7 +353,8 @@ const addFileToShardedDirectory = async (context, options) => { // load sub-shard log(`Found subshard ${segment.prefix}`) - const subShard = await context.ipld.get(link.Hash) + const block = await context.repo.blocks.get(link.Hash) + const subShard = dagPb.decode(block) // subshard hasn't been loaded, descend to the next level of the HAMT if (!path[index]) { @@ -352,7 +393,7 @@ const addFileToShardedDirectory = async (context, options) => { /** * @param {{ pos: number, bucket: Bucket }} position - * @returns {{ bucket: Bucket, prefix: string, node?: DAGNode }[]} + * @returns {{ bucket: Bucket, prefix: string, node?: PBNode }[]} */ const toBucketPath = (position) => { const path = [{ diff --git a/packages/ipfs-core/src/components/files/utils/create-node.js b/packages/ipfs-core/src/components/files/utils/create-node.js index e2e05fc13f..9fb40cd05a 100644 --- a/packages/ipfs-core/src/components/files/utils/create-node.js +++ b/packages/ipfs-core/src/components/files/utils/create-node.js @@ -1,15 +1,12 @@ 'use strict' const { UnixFS } = require('ipfs-unixfs') -const { - DAGNode -} = require('ipld-dag-pb') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash +const dagPb = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') /** * @typedef {import('ipfs-unixfs').MtimeLike} MtimeLike - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('../').MfsContext} MfsContext */ @@ -17,14 +14,13 @@ const mh = require('multihashing-async').multihash * @param {MfsContext} context * @param {'file' | 'directory'} type * @param {object} options - * @param {import('multihashes').HashName} options.hashAlg + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush * @param {MtimeLike} [options.mtime] * @param {number} [options.mode] */ const createNode = async (context, type, options) => { - const hashAlg = mh.names[options.hashAlg] const metadata = new UnixFS({ type, mode: options.mode, @@ -32,12 +28,19 @@ const createNode = async (context, type, options) => { mtime: options.mtime }) - const node = new DAGNode(metadata.marshal()) - const cid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg, - onlyHash: !options.flush - }) + // Persist the new parent PBNode + const hasher = await context.hashers.getHasher(options.hashAlg) + const node = { + Data: metadata.marshal(), + Links: [] + } + const buf = dagPb.encode(node) + const hash = await hasher.digest(buf) + const cid = CID.create(options.cidVersion, dagPb.code, hash) + + if (options.flush) { + await context.repo.blocks.put(cid, buf) + } return { cid, diff --git a/packages/ipfs-core/src/components/files/utils/hamt-utils.js b/packages/ipfs-core/src/components/files/utils/hamt-utils.js index f155b557f0..5fa18989c1 100644 --- a/packages/ipfs-core/src/components/files/utils/hamt-utils.js +++ b/packages/ipfs-core/src/components/files/utils/hamt-utils.js @@ -1,8 +1,6 @@ 'use strict' -const { - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') const { Bucket, createHAMT @@ -13,32 +11,34 @@ const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded') const defaultImporterOptions = require('ipfs-unixfs-importer/src/options') const log = require('debug')('ipfs:mfs:core:utils:hamt-utils') const { UnixFS } = require('ipfs-unixfs') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash const last = require('it-last') +const { CID } = require('multiformats/cid') /** - * @typedef {import('ipld-dag-pb').DAGLink} DAGLink - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('ipfs-unixfs').Mtime} Mtime - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids')} CID * @typedef {import('../').MfsContext} MfsContext + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ /** * @param {MfsContext} context - * @param {DAGLink[]} links + * @param {PBLink[]} links * @param {Bucket} bucket * @param {object} options - * @param {DAGNode} options.parent + * @param {PBNode} options.parent * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush - * @param {HashName} options.hashAlg + * @param {string} options.hashAlg */ const updateHamtDirectory = async (context, links, bucket, options) => { const importerOptions = defaultImporterOptions() + if (!options.parent.Data) { + throw new Error('Could not update HAMT directory because parent had no data') + } + // update parent with new bit field const data = Uint8Array.from(bucket._children.bitField().reverse()) const node = UnixFS.unmarshal(options.parent.Data) @@ -51,23 +51,28 @@ const updateHamtDirectory = async (context, links, bucket, options) => { mtime: node.mtime }) - const hashAlg = mh.names[options.hashAlg] - const parent = new DAGNode(dir.marshal(), links) - const cid = await context.ipld.put(parent, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg, - onlyHash: !options.flush - }) + const hasher = await context.hashers.getHasher(options.hashAlg) + const parent = { + Data: dir.marshal(), + Links: links.sort((a, b) => (a.Name || '').localeCompare(b.Name || '')) + } + const buf = dagPb.encode(parent) + const hash = await hasher.digest(buf) + const cid = CID.create(options.cidVersion, dagPb.code, hash) + + if (options.flush) { + await context.repo.blocks.put(cid, buf) + } return { node: parent, cid, - size: parent.size + size: links.reduce((sum, link) => sum + (link.Tsize || 0), buf.length) } } /** - * @param {DAGLink[]} links + * @param {PBLink[]} links * @param {Bucket} rootBucket * @param {Bucket} parentBucket * @param {number} positionAtParent @@ -86,7 +91,7 @@ const recreateHamtLevel = async (links, rootBucket, parentBucket, positionAtPare } /** - * @param {DAGLink[]} links + * @param {PBLink[]} links */ const recreateInitialHamtLevel = async (links) => { const importerOptions = defaultImporterOptions() @@ -101,15 +106,17 @@ const recreateInitialHamtLevel = async (links) => { } /** - * @param {DAGLink[]} links + * @param {PBLink[]} links * @param {Bucket} bucket * @param {Bucket} rootBucket */ const addLinksToHamtBucket = async (links, bucket, rootBucket) => { await Promise.all( links.map(link => { - if (link.Name.length === 2) { - const pos = parseInt(link.Name, 16) + const linkName = (link.Name || '') + + if (linkName.length === 2) { + const pos = parseInt(linkName, 16) bucket._putObjectAt(pos, new Bucket({ hash: rootBucket._options.hash, @@ -119,7 +126,7 @@ const addLinksToHamtBucket = async (links, bucket, rootBucket) => { return Promise.resolve() } - return rootBucket.put(link.Name.substring(2), { + return rootBucket.put(linkName.substring(2), { size: link.Tsize, cid: link.Hash }) @@ -141,7 +148,7 @@ const toPrefix = (position) => { /** * @param {MfsContext} context * @param {string} fileName - * @param {DAGNode} rootNode + * @param {PBNode} rootNode */ const generatePath = async (context, fileName, rootNode) => { // start at the root bucket and descend, loading nodes as we go @@ -149,7 +156,7 @@ const generatePath = async (context, fileName, rootNode) => { const position = await rootBucket._findNewBucketAndPos(fileName) // the path to the root bucket - /** @type {{ bucket: Bucket, prefix: string, node?: DAGNode }[]} */ + /** @type {{ bucket: Bucket, prefix: string, node?: PBNode }[]} */ const path = [{ bucket: position.bucket, prefix: toPrefix(position.pos) @@ -169,7 +176,7 @@ const generatePath = async (context, fileName, rootNode) => { path.reverse() path[0].node = rootNode - // load DAGNode for each path segment + // load PbNode for each path segment for (let i = 0; i < path.length; i++) { const segment = path[i] @@ -179,7 +186,7 @@ const generatePath = async (context, fileName, rootNode) => { // find prefix in links const link = segment.node.Links - .filter(link => link.Name.substring(0, 2) === segment.prefix) + .filter(link => (link.Name || '').substring(0, 2) === segment.prefix) .pop() // entry was not in shard @@ -200,7 +207,8 @@ const generatePath = async (context, fileName, rootNode) => { // found subshard log(`Found subshard ${segment.prefix}`) - const node = await context.ipld.get(link.Hash) + const block = await context.repo.blocks.get(link.Hash) + const node = dagPb.decode(block) // subshard hasn't been loaded, descend to the next level of the HAMT if (!path[i + 1]) { @@ -261,8 +269,9 @@ const createShard = async (context, contents, options = {}) => { hamtHashFn: importerOptions.hamtHashFn, hamtHashCode: importerOptions.hamtHashCode, hamtBucketBits: importerOptions.hamtBucketBits, + hasher: importerOptions.hasher, ...options, - codec: 'dag-pb' + codec: dagPb }) for (let i = 0; i < contents.length; i++) { @@ -272,7 +281,7 @@ const createShard = async (context, contents, options = {}) => { }) } - return last(shard.flush(context.block)) + return last(shard.flush(context.repo.blocks)) } module.exports = { diff --git a/packages/ipfs-core/src/components/files/utils/remove-link.js b/packages/ipfs-core/src/components/files/utils/remove-link.js index cd76807707..96ada77158 100644 --- a/packages/ipfs-core/src/components/files/utils/remove-link.js +++ b/packages/ipfs-core/src/components/files/utils/remove-link.js @@ -1,9 +1,8 @@ 'use strict' -const { - DAGLink -} = require('ipld-dag-pb') -const CID = require('cids') +// @ts-ignore - TODO vmx 2021-03-31 +const dagPb = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') const log = require('debug')('ipfs:mfs:core:utils:remove-link') const { UnixFS } = require('ipfs-unixfs') const { @@ -11,32 +10,29 @@ const { updateHamtDirectory } = require('./hamt-utils') const errCode = require('err-code') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash /** * @typedef {import('../').MfsContext} MfsContext - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('hamt-sharding').Bucket} Bucket - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * @typedef {import('@ipld/dag-pb').PBNode} PBNode * * @typedef {object} RemoveLinkOptions * @property {string} name * @property {number} shardSplitThreshold - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {CIDVersion} cidVersion * @property {boolean} flush * @property {CID} [parentCid] - * @property {DAGNode} [parent] + * @property {PBNode} [parent] * * @typedef {object} RemoveLinkOptionsInternal * @property {string} name * @property {number} shardSplitThreshold - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {CIDVersion} cidVersion * @property {boolean} flush - * @property {DAGNode} parent + * @property {PBNode} parent */ /** @@ -47,12 +43,14 @@ const removeLink = async (context, options) => { let parent = options.parent if (options.parentCid) { - if (!CID.isCID(options.parentCid)) { + const parentCid = CID.asCID(options.parentCid) + if (parentCid === null) { throw errCode(new Error('Invalid CID passed to removeLink'), 'EINVALIDPARENTCID') } - log(`Loading parent node ${options.parentCid}`) - parent = await context.ipld.get(options.parentCid) + log(`Loading parent node ${parentCid}`) + const block = await context.repo.blocks.get(parentCid) + parent = dagPb.decode(block) } if (!parent) { @@ -63,6 +61,10 @@ const removeLink = async (context, options) => { throw errCode(new Error('No child name passed to removeLink'), 'EINVALIDCHILDNAME') } + if (!parent.Data) { + throw errCode(new Error('Parent node had no data'), 'ERR_INVALID_NODE') + } + const meta = UnixFS.unmarshal(parent.Data) if (meta.type === 'hamt-sharded-directory') { @@ -87,19 +89,23 @@ const removeLink = async (context, options) => { * @param {RemoveLinkOptionsInternal} options */ const removeFromDirectory = async (context, options) => { - const hashAlg = mh.names[options.hashAlg] - - options.parent.rmLink(options.name) - const cid = await context.ipld.put(options.parent, mc.DAG_PB, { - cidVersion: options.cidVersion, - hashAlg + // Remove existing link if it exists + options.parent.Links = options.parent.Links.filter((link) => { + return link.Name !== options.name }) - log(`Updated regular directory ${cid}`) + const parentBlock = await dagPb.encode(options.parent) + const hasher = await context.hashers.getHasher(options.hashAlg) + const hash = await hasher.digest(parentBlock) + const parentCid = CID.create(options.cidVersion, dagPb.code, hash) + + await context.repo.blocks.put(parentCid, parentBlock) + + log(`Updated regular directory ${parentCid}`) return { node: options.parent, - cid + cid: parentCid } } @@ -123,10 +129,10 @@ const removeFromShardedDirectory = async (context, options) => { /** * @param {MfsContext} context - * @param {{ bucket: Bucket, prefix: string, node?: DAGNode }[]} positions + * @param {{ bucket: Bucket, prefix: string, node?: PBNode }[]} positions * @param {string} name * @param {RemoveLinkOptionsInternal} options - * @returns {Promise<{ node: DAGNode, cid: CID, size: number }>} + * @returns {Promise<{ node: PBNode, cid: CID, size: number }>} */ const updateShard = async (context, positions, name, options) => { const last = positions.pop() @@ -146,7 +152,7 @@ const updateShard = async (context, positions, name, options) => { } const link = node.Links - .find(link => link.Name.substring(0, 2) === prefix) + .find(link => (link.Name || '').substring(0, 2) === prefix) if (!link) { throw errCode(new Error(`No link found with prefix ${prefix} for file ${name}`), 'ERR_NOT_FOUND') @@ -155,11 +161,13 @@ const updateShard = async (context, positions, name, options) => { if (link.Name === `${prefix}${name}`) { log(`Removing existing link ${link.Name}`) - node.rmLink(link.Name) + const links = node.Links.filter((nodeLink) => { + return nodeLink.Name !== link.Name + }) await bucket.del(name) - return updateHamtDirectory(context, node.Links, bucket, options) + return updateHamtDirectory(context, links, bucket, options) } log(`Descending into sub-shard ${link.Name} for ${prefix}${name}`) @@ -176,9 +184,9 @@ const updateShard = async (context, positions, name, options) => { // convert shard back to normal dir const link = result.node.Links[0] - newName = `${prefix}${link.Name.substring(2)}` + newName = `${prefix}${(link.Name || '').substring(2)}` cid = link.Hash - size = link.Tsize + size = link.Tsize || 0 } log(`Updating shard ${prefix} with name ${newName}`) @@ -189,7 +197,7 @@ const updateShard = async (context, positions, name, options) => { /** * @param {MfsContext} context * @param {Bucket} bucket - * @param {DAGNode} parent + * @param {PBNode} parent * @param {string} oldName * @param {string} newName * @param {number} size @@ -197,10 +205,17 @@ const updateShard = async (context, positions, name, options) => { * @param {RemoveLinkOptionsInternal} options */ const updateShardParent = (context, bucket, parent, oldName, newName, size, cid, options) => { - parent.rmLink(oldName) - parent.addLink(new DAGLink(newName, size, cid)) + // Remove existing link if it exists + const parentLinks = parent.Links.filter((link) => { + return link.Name !== oldName + }) + parentLinks.push({ + Name: newName, + Tsize: size, + Hash: cid + }) - return updateHamtDirectory(context, parent.Links, bucket, options) + return updateHamtDirectory(context, parentLinks, bucket, options) } module.exports = removeLink diff --git a/packages/ipfs-core/src/components/files/utils/to-mfs-path.js b/packages/ipfs-core/src/components/files/utils/to-mfs-path.js index 7c82641894..a3db10b2bd 100644 --- a/packages/ipfs-core/src/components/files/utils/to-mfs-path.js +++ b/packages/ipfs-core/src/components/files/utils/to-mfs-path.js @@ -4,7 +4,7 @@ const loadMfsRoot = require('./with-mfs-root') const toPathComponents = require('./to-path-components') const { exporter } = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const CID = require('cids') +const { CID } = require('multiformats/cid') const IPFS_PREFIX = 'ipfs' @@ -97,30 +97,34 @@ const toMfsPath = async (context, path, options) => { entryType: 'file' } - if (CID.isCID(path)) { - path = `/ipfs/${path}` + let ipfsPath = '' + + if (path instanceof CID) { + ipfsPath = `/ipfs/${path}` + } else { + ipfsPath = path } - path = (path || '').trim() - path = path.replace(/(\/\/+)/g, '/') + ipfsPath = ipfsPath.trim() + ipfsPath = ipfsPath.replace(/(\/\/+)/g, '/') - if (path.endsWith('/') && path.length > 1) { - path = path.substring(0, path.length - 1) + if (ipfsPath.endsWith('/') && ipfsPath.length > 1) { + ipfsPath = ipfsPath.substring(0, ipfsPath.length - 1) } - if (!path) { + if (!ipfsPath) { throw errCode(new Error('paths must not be empty'), 'ERR_NO_PATH') } - if (path.substring(0, 1) !== '/') { + if (ipfsPath.substring(0, 1) !== '/') { throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') } - if (path.substring(path.length - 1) === '/') { - path = path.substring(0, path.length - 1) + if (ipfsPath.substring(ipfsPath.length - 1) === '/') { + ipfsPath = ipfsPath.substring(0, ipfsPath.length - 1) } - const pathComponents = toPathComponents(path) + const pathComponents = toPathComponents(ipfsPath) if (pathComponents[0] === IPFS_PREFIX) { // e.g. /ipfs/QMfoo or /ipfs/Qmfoo/sub/path @@ -165,7 +169,7 @@ const toMfsPath = async (context, path, options) => { const cidPath = output.type === 'mfs' ? output.mfsPath : output.path try { - const res = await exporter(cidPath, context.ipld) + const res = await exporter(cidPath, context.repo.blocks, options) output.cid = res.cid output.mfsPath = `/ipfs/${res.path}` diff --git a/packages/ipfs-core/src/components/files/utils/to-trail.js b/packages/ipfs-core/src/components/files/utils/to-trail.js index 387c50a920..4f3b5d9282 100644 --- a/packages/ipfs-core/src/components/files/utils/to-trail.js +++ b/packages/ipfs-core/src/components/files/utils/to-trail.js @@ -7,7 +7,7 @@ const log = require('debug')('ipfs:mfs:utils:to-trail') * @typedef {import('../').MfsContext} MfsContext * @typedef {object} MfsTrail * @property {string} name - * @property {import('cids')} cid + * @property {import('multiformats/cid').CID} cid * @property {number} [size] * @property {string} [type] * @@ -24,20 +24,11 @@ const toTrail = async (context, path) => { const output = [] - for await (const fsEntry of walkPath(path, context.ipld)) { - let size - - // TODO: include `.size` property in unixfs-exporter output - if (fsEntry.node instanceof Uint8Array) { - size = fsEntry.node.length - } else { - size = fsEntry.node.size - } - + for await (const fsEntry of walkPath(path, context.repo.blocks)) { output.push({ name: fsEntry.name, cid: fsEntry.cid, - size, + size: fsEntry.size, type: fsEntry.type }) } diff --git a/packages/ipfs-core/src/components/files/utils/update-mfs-root.js b/packages/ipfs-core/src/components/files/utils/update-mfs-root.js index b96ba32983..47ba7e8abc 100644 --- a/packages/ipfs-core/src/components/files/utils/update-mfs-root.js +++ b/packages/ipfs-core/src/components/files/utils/update-mfs-root.js @@ -12,7 +12,7 @@ const errCode = require('err-code') /** * @param {MfsContext} context - * @param {import('cids')} cid + * @param {import('multiformats/cid').CID} cid * @param {import('ipfs-core-types/src/utils').AbortOptions} options */ const updateMfsRoot = async (context, cid, options) => { diff --git a/packages/ipfs-core/src/components/files/utils/update-tree.js b/packages/ipfs-core/src/components/files/utils/update-tree.js index 226f9c79a8..443a7724a4 100644 --- a/packages/ipfs-core/src/components/files/utils/update-tree.js +++ b/packages/ipfs-core/src/components/files/utils/update-tree.js @@ -2,15 +2,17 @@ const log = require('debug')('ipfs:mfs:utils:update-tree') const addLink = require('./add-link') +const { + decode +} = require('@ipld/dag-pb') const defaultOptions = { shardSplitThreshold: 1000 } /** - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids')} CID - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CID} CID + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('../').MfsContext} MfsContext * @typedef {import('./to-trail').MfsTrail} MfsTrail */ @@ -22,7 +24,7 @@ const defaultOptions = { * @param {MfsTrail[]} trail * @param {object} options * @param {number} options.shardSplitThreshold - * @param {HashName} options.hashAlg + * @param {string} options.hashAlg * @param {CIDVersion} options.cidVersion * @param {boolean} options.flush */ @@ -35,7 +37,8 @@ const updateTree = async (context, trail, options) => { let index = 0 let child - for await (const node of context.ipld.getMany(trail.map(node => node.cid))) { + for await (const block of context.repo.blocks.getMany(trail.map(node => node.cid))) { + const node = decode(block) const cid = trail[index].cid const name = trail[index].name index++ @@ -44,7 +47,7 @@ const updateTree = async (context, trail, options) => { child = { cid, name, - size: node.size + size: block.length } continue @@ -55,6 +58,7 @@ const updateTree = async (context, trail, options) => { parent: node, name: child.name, cid: child.cid, + // TODO vmx 2021-04-05: check what to do with the size size: child.size, flush: options.flush, shardSplitThreshold: options.shardSplitThreshold, @@ -66,6 +70,7 @@ const updateTree = async (context, trail, options) => { child = { cid: result.cid, name, + // TODO vmx 2021-04-05: check what to do with the size size: result.size } } diff --git a/packages/ipfs-core/src/components/files/utils/with-mfs-root.js b/packages/ipfs-core/src/components/files/utils/with-mfs-root.js index 234016f18c..f9b66146de 100644 --- a/packages/ipfs-core/src/components/files/utils/with-mfs-root.js +++ b/packages/ipfs-core/src/components/files/utils/with-mfs-root.js @@ -1,13 +1,10 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const { UnixFS } = require('ipfs-unixfs') -const { - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +const { sha256 } = require('multiformats/hashes/sha2') const log = require('debug')('ipfs:mfs:utils:with-mfs-root') -const mc = require('multicodec') -const mh = require('multihashing-async').multihash const errCode = require('err-code') const { @@ -36,18 +33,20 @@ const loadMfsRoot = async (context, options) => { try { const buf = await context.repo.datastore.get(MFS_ROOT_KEY) - cid = new CID(buf) + cid = CID.decode(buf) } catch (err) { if (err.code !== 'ERR_NOT_FOUND') { throw err } log('Creating new MFS root') - const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal()) - cid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] // why can't ipld look this up? + const buf = dagPb.encode({ + Data: new UnixFS({ type: 'directory' }).marshal(), + Links: [] }) + const hash = await sha256.digest(buf) + cid = CID.createV0(hash) + await context.repo.blocks.put(cid, buf) if (options && options.signal && options.signal.aborted) { throw errCode(new Error('Request aborted'), 'ERR_ABORTED', { name: 'Aborted' }) diff --git a/packages/ipfs-core/src/components/files/write.js b/packages/ipfs-core/src/components/files/write.js index c24ffd86a2..e3d1330164 100644 --- a/packages/ipfs-core/src/components/files/write.js +++ b/packages/ipfs-core/src/components/files/write.js @@ -2,6 +2,11 @@ const log = require('debug')('ipfs:mfs:write') const { importer } = require('ipfs-unixfs-importer') +const { + decode +// @ts-ignore - TODO vmx 2021-03-31 +} = require('@ipld/dag-pb') +const { sha256, sha512 } = require('multiformats/hashes/sha2') const stat = require('./stat') const mkdir = require('./mkdir') const addLink = require('./utils/add-link') @@ -25,12 +30,13 @@ const { } = require('ipfs-unixfs') /** - * @typedef {import('multihashes').HashName} HashName - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('ipfs-unixfs').MtimeLike} MtimeLike * @typedef {import('./').MfsContext} MfsContext * @typedef {import('./utils/to-mfs-path').FilePath} FilePath * @typedef {import('./utils/to-mfs-path').MfsPath} MfsPath + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * * @typedef {object} DefaultOptions * @property {number} offset * @property {number} length @@ -39,7 +45,7 @@ const { * @property {boolean} rawLeaves * @property {boolean} reduceSingleLeafToSelf * @property {CIDVersion} cidVersion - * @property {HashName} hashAlg + * @property {string} hashAlg * @property {boolean} parents * @property {import('ipfs-core-types/src/root').AddProgressFn} progress * @property {'trickle' | 'balanced'} strategy @@ -173,7 +179,8 @@ const updateOrImport = async (context, path, source, destination, options) => { throw errCode(new Error(`cannot write to ${parent.name}: Not a directory`), 'ERR_NOT_A_DIRECTORY') } - const parentNode = await context.ipld.get(parent.cid) + const parentBlock = await context.repo.blocks.get(parent.cid) + const parentNode = decode(parentBlock) const result = await addLink(context, { parent: parentNode, @@ -286,21 +293,32 @@ const write = async (context, source, destination, options) => { mtime = destination.unixfs.mtime } + let hasher + switch (options.hashAlg) { + case 'sha2-256': + hasher = sha256 + break + case 'sha2-512': + hasher = sha512 + break + default: + throw new Error(`TODO vmx 2021-03-31: Proper error message for unsupported hash algorithms like ${options.hashAlg}`) + } + const result = await last(importer([{ content: content, // persist mode & mtime if set previously mode, mtime - }], context.block, { + }], context.repo.blocks, { progress: options.progress, - hashAlg: options.hashAlg, + hasher, cidVersion: options.cidVersion, strategy: options.strategy, rawLeaves: options.rawLeaves, reduceSingleLeafToSelf: options.reduceSingleLeafToSelf, - leafType: options.leafType, - pin: false + leafType: options.leafType })) if (!result) { diff --git a/packages/ipfs-core/src/components/gc-lock.js b/packages/ipfs-core/src/components/gc-lock.js deleted file mode 100644 index 16a7053c2c..0000000000 --- a/packages/ipfs-core/src/components/gc-lock.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -// @ts-ignore - no types -const mortice = require('mortice') - -/** - * @param {Object} config - * @param {string} config.path - * @param {boolean} [config.repoOwner] - * @returns {GCLock} - */ -module.exports = ({ path, repoOwner }) => - mortice(path, { - singleProcess: repoOwner !== false - }) - -/** - * @typedef {RWLock} GCLock - * - * @typedef {Object} RWLock - * @property {() => Promise} readLock - * @property {() => Promise} writeLock - * - * @typedef {() => void} Lock - */ diff --git a/packages/ipfs-core/src/components/get.js b/packages/ipfs-core/src/components/get.js index 0c972902b1..df1421dc1f 100644 --- a/packages/ipfs-core/src/components/get.js +++ b/packages/ipfs-core/src/components/get.js @@ -4,16 +4,16 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @typedef {Object} Context - * @property {import('ipld')} ipld + * @property {import('ipfs-repo').IPFSRepo} repo * @property {import('../types').Preload} preload * * @param {Context} context */ -module.exports = function ({ ipld, preload }) { +module.exports = function ({ repo, preload }) { /** * @type {import('ipfs-core-types/src/root').API["get"]} */ @@ -27,10 +27,12 @@ module.exports = function ({ ipld, preload }) { throw errCode(err, 'ERR_INVALID_PATH') } - preload(new CID(pathComponents[0])) + preload(CID.parse(pathComponents[0])) } - for await (const file of exporter.recursive(ipfsPath, ipld, options)) { + const ipfsPathOrCid = CID.asCID(ipfsPath) || ipfsPath + + for await (const file of exporter.recursive(ipfsPathOrCid, repo.blocks, options)) { yield mapFile(file, { ...options, includeContent: true diff --git a/packages/ipfs-core/src/components/index.js b/packages/ipfs-core/src/components/index.js index 051f2b6819..05a861ea0a 100644 --- a/packages/ipfs-core/src/components/index.js +++ b/packages/ipfs-core/src/components/index.js @@ -1,11 +1,15 @@ 'use strict' -const { mergeOptions } = require('../utils') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const { isTest } = require('ipfs-utils/src/env') const log = require('debug')('ipfs') const errCode = require('err-code') -const { DAGNode } = require('ipld-dag-pb') const { UnixFS } = require('ipfs-unixfs') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const { identity } = require('multiformats/hashes/identity') +const { bases, hashes, codecs } = require('multiformats/basics') + const initAssets = require('../runtime/init-assets-nodejs') const { AlreadyInitializedError } = require('../errors') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -28,7 +32,6 @@ const createVersionAPI = require('./version') const createIDAPI = require('./id') const createConfigAPI = require('./config') const DagAPI = require('./dag') -const PinManagerAPI = require('./pin/pin-manager') const createPreloadAPI = require('../preload') const createMfsPreloadAPI = require('../mfs-preload') const createFilesAPI = require('./files') @@ -36,21 +39,22 @@ const KeyAPI = require('./key') const ObjectAPI = require('./object') const RepoAPI = require('./repo') const StatsAPI = require('./stats') -const BlockService = require('ipfs-block-service') -const createIPLD = require('./ipld') const Storage = require('./storage') const Network = require('./network') const Service = require('../utils/service') const SwarmAPI = require('./swarm') -const createGCLockAPI = require('./gc-lock') const createPingAPI = require('./ping') const createDHTAPI = require('./dht') const createPubSubAPI = require('./pubsub') +const Multicodecs = require('ipfs-core-utils/src/multicodecs') +const Multihashes = require('ipfs-core-utils/src/multihashes') +const Multibases = require('ipfs-core-utils/src/multibases') /** * @typedef {import('../types').Options} Options * @typedef {import('../types').Print} Print * @typedef {import('./storage')} StorageAPI + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec */ class IPFS { @@ -58,59 +62,61 @@ class IPFS { * @param {Object} config * @param {Print} config.print * @param {StorageAPI} config.storage + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs * @param {Options} config.options */ - constructor ({ print, storage, options }) { + constructor ({ print, storage, codecs, options }) { const { peerId, repo, keychain } = storage const network = Service.create(Network) const preload = createPreloadAPI(options.preload) - const blockService = new BlockService(storage.repo) - const ipld = createIPLD({ blockService, options: options.ipld }) - - const gcLock = createGCLockAPI({ - path: repo.path, - repoOwner: options.repoOwner - }) const dns = createDNSAPI() const isOnline = createIsOnlineAPI({ network }) // @ts-ignore This type check fails as options. // libp2p can be a function, while IPNS router config expects libp2p config const ipns = new IPNSAPI(options) + this.hashers = new Multihashes({ + hashers: Object.values(hashes).concat(options.ipld && options.ipld.hashers ? options.ipld.hashers : []), + loadHasher: options.ipld && options.ipld.loadHasher + }) + + this.bases = new Multibases({ + bases: Object.values(bases).concat(options.ipld && options.ipld.bases ? options.ipld.bases : []), + loadBase: options.ipld && options.ipld.loadBase + }) + + const pin = new PinAPI({ repo, codecs }) + const block = new BlockAPI({ codecs, hashers: this.hashers, preload, repo }) + const name = new NameAPI({ dns, ipns, - ipld, + repo, + codecs, peerId, isOnline, keychain, options }) - const resolve = createResolveAPI({ ipld, name }) - const pinManager = new PinManagerAPI({ repo, ipld }) - const pin = new PinAPI({ gcLock, pinManager, ipld }) - const block = new BlockAPI({ blockService, preload, gcLock, pinManager, pin }) - const dag = new DagAPI({ ipld, preload, gcLock, pin }) - const refs = Object.assign(createRefsAPI({ ipld, resolve, preload }), { + + const resolve = createResolveAPI({ repo, codecs, bases: this.bases, name }) + + const dag = new DagAPI({ repo, codecs, hashers: this.hashers, preload }) + const refs = Object.assign(createRefsAPI({ repo, codecs, resolve, preload }), { local: createRefsLocalAPI({ repo: storage.repo }) }) const { add, addAll, cat, get, ls } = new RootAPI({ - gcLock, preload, - pin, - block, - ipld, + repo, options: options.EXPERIMENTAL }) const files = createFilesAPI({ - ipld, - block, - blockService, repo, preload, + hashers: this.hashers, options }) @@ -122,7 +128,6 @@ class IPFS { this.preload = preload this.name = name - this.ipld = ipld this.ipns = ipns this.pin = pin this.resolve = resolve @@ -133,7 +138,6 @@ class IPFS { network, peerId, repo, - blockService, preload, ipns, mfsPreload, @@ -146,7 +150,6 @@ class IPFS { network, preload, mfsPreload, - blockService, ipns, repo }) @@ -171,8 +174,8 @@ class IPFS { this.dag = dag this.files = files this.key = new KeyAPI({ keychain }) - this.object = new ObjectAPI({ ipld, preload, gcLock }) - this.repo = new RepoAPI({ gcLock, pin, repo, refs }) + this.object = new ObjectAPI({ preload, codecs, repo }) + this.repo = new RepoAPI({ repo, hashers: this.hashers }) this.stats = new StatsAPI({ repo, network }) this.swarm = new SwarmAPI({ network }) @@ -199,6 +202,8 @@ class IPFS { tail: notImplementedIter } this.mount = notImplemented + + this.codecs = codecs } /** @@ -218,14 +223,30 @@ class IPFS { options = mergeOptions(getDefaultOptions(), options) const initOptions = options.init || {} + /** + * @type {BlockCodec} + */ + const id = { + name: identity.name, + code: identity.code, + encode: (id) => id, + decode: (id) => id + } + + const multicodecs = new Multicodecs({ + codecs: Object.values(codecs).concat([dagPb, dagCbor, id]).concat((options.ipld && options.ipld.codecs) || []), + loadCodec: options.ipld && options.ipld.loadCodec + }) + // eslint-disable-next-line no-console const print = options.silent ? log : console.log - const storage = await Storage.start(print, options) + const storage = await Storage.start(print, multicodecs, options) const config = await storage.repo.config.getAll() const ipfs = new IPFS({ storage, print, + codecs: multicodecs, options: { ...options, config } }) @@ -258,12 +279,14 @@ module.exports = IPFS * @param {IPFS} ipfs */ const addEmptyDir = async (ipfs) => { - const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal()) - const cid = await ipfs.dag.put(node, { - version: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - preload: false + const buf = dagPb.encode({ + Data: new UnixFS({ type: 'directory' }).marshal(), + Links: [] + }) + + const cid = await ipfs.block.put(buf, { + mhtype: 'sha2-256', + format: 'dag-pb' }) await ipfs.pin.add(cid) diff --git a/packages/ipfs-core/src/components/ipld.js b/packages/ipfs-core/src/components/ipld.js deleted file mode 100644 index 4f9f688fca..0000000000 --- a/packages/ipfs-core/src/components/ipld.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict' - -const getDefaultIpldOptions = require('../runtime/ipld') -const Ipld = require('ipld') - -/** - * @param {Object} config - * @param {import('ipfs-block-service')} config.blockService - * @param {Partial} [config.options] - */ -const createIPLD = ({ blockService, options }) => { - return new Ipld(getDefaultIpldOptions(blockService, options)) -} - -module.exports = createIPLD diff --git a/packages/ipfs-core/src/components/ipns.js b/packages/ipfs-core/src/components/ipns.js index 2f85ed61f8..a5b70eadc9 100644 --- a/packages/ipfs-core/src/components/ipns.js +++ b/packages/ipfs-core/src/components/ipns.js @@ -58,7 +58,7 @@ class IPNSAPI { * initializeKeyspace feature. * * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('peer-id')} config.peerId * @param {import('libp2p/src/keychain')} config.keychain */ @@ -78,7 +78,7 @@ class IPNSAPI { /** * @param {Object} config * @param {import('libp2p')} config.libp2p - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('peer-id')} config.peerId * @param {import('libp2p/src/keychain')} config.keychain */ diff --git a/packages/ipfs-core/src/components/libp2p.js b/packages/ipfs-core/src/components/libp2p.js index de7098ba09..7967153011 100644 --- a/packages/ipfs-core/src/components/libp2p.js +++ b/packages/ipfs-core/src/components/libp2p.js @@ -1,22 +1,27 @@ 'use strict' const get = require('dlv') -const mergeOptions = require('merge-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const errCode = require('err-code') const PubsubRouters = require('../runtime/libp2p-pubsub-routers-nodejs') +// @ts-ignore - no types +const DelegatedPeerRouter = require('libp2p-delegated-peer-routing') +// @ts-ignore - no types +const DelegatedContentRouter = require('libp2p-delegated-content-routing') +const { create: ipfsHttpClient } = require('ipfs-http-client') +const { Multiaddr } = require('multiaddr') const pkgversion = require('../../package.json').version /** * @typedef {Object} KeychainConfig * @property {string} [pass] * - * @typedef {import('ipfs-repo')} Repo + * @typedef {import('ipfs-repo').IPFSRepo} Repo * @typedef {import('peer-id')} PeerId * @typedef {import('../types').Options} IPFSOptions * @typedef {import('libp2p')} LibP2P * @typedef {import('libp2p').Libp2pOptions & import('libp2p').CreateOptions} Libp2pOptions * @typedef {import('ipfs-core-types/src/config').Config} IPFSConfig - * @typedef {import('multiaddr').Multiaddr} Multiaddr */ /** @@ -89,8 +94,13 @@ function getLibp2pOptions ({ options, config, datastore, keys, keychainConfig, p } const libp2pOptions = { + /** + * @type {Partial} + */ modules: { - pubsub: getPubsubRouter() + pubsub: getPubsubRouter(), + contentRouting: [], + peerRouting: [] }, config: { peerDiscovery: { @@ -166,5 +176,31 @@ function getLibp2pOptions ({ options, config, datastore, keys, keychainConfig, p libp2pConfig.modules.peerDiscovery.push(require('libp2p-bootstrap')) } + // Set up Delegate Routing based on the presence of Delegates in the config + const delegateHosts = get(options, 'config.Addresses.Delegates', + get(config, 'Addresses.Delegates', []) + ) + + if (delegateHosts.length > 0) { + // Pick a random delegate host + const delegateString = delegateHosts[Math.floor(Math.random() * delegateHosts.length)] + const delegateAddr = new Multiaddr(delegateString).toOptions() + const delegateApiOptions = { + host: delegateAddr.host, + // port is a string atm, so we need to convert for the check + // @ts-ignore - parseInt(input:string) => number + protocol: parseInt(delegateAddr.port) === 443 ? 'https' : 'http', + port: delegateAddr.port + } + + const delegateHttpClient = ipfsHttpClient(delegateApiOptions) + + libp2pOptions.modules.contentRouting = libp2pOptions.modules.contentRouting || [] + libp2pOptions.modules.contentRouting.push(new DelegatedContentRouter(peerId, delegateHttpClient)) + + libp2pOptions.modules.peerRouting = libp2pOptions.modules.peerRouting || [] + libp2pOptions.modules.peerRouting.push(new DelegatedPeerRouter(delegateHttpClient)) + } + return libp2pConfig } diff --git a/packages/ipfs-core/src/components/ls.js b/packages/ipfs-core/src/components/ls.js index d7d703d8b2..60570cff46 100644 --- a/packages/ipfs-core/src/components/ls.js +++ b/packages/ipfs-core/src/components/ls.js @@ -4,28 +4,30 @@ const { exporter, recursive } = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @typedef {Object} Context - * @property {import('ipld')} ipld + * @property {import('ipfs-repo').IPFSRepo} repo * @property {import('../types').Preload} preload * * @param {Context} context */ -module.exports = function ({ ipld, preload }) { +module.exports = function ({ repo, preload }) { /** * @type {import('ipfs-core-types/src/root').API["ls"]} */ async function * ls (ipfsPath, options = {}) { - const path = normalizeCidPath(ipfsPath) - const pathComponents = path.split('/') + const legacyPath = normalizeCidPath(ipfsPath) + const pathComponents = legacyPath.split('/') if (options.preload !== false) { - preload(new CID(pathComponents[0])) + preload(CID.parse(pathComponents[0])) } - const file = await exporter(ipfsPath, ipld, options) + const ipfsPathOrCid = CID.asCID(legacyPath) || legacyPath + + const file = await exporter(ipfsPathOrCid, repo.blocks, options) if (file.type === 'file') { yield mapFile(file, options) @@ -34,8 +36,8 @@ module.exports = function ({ ipld, preload }) { if (file.type === 'directory') { if (options.recursive) { - for await (const child of recursive(file.cid, ipld, options)) { - if (file.cid.toBaseEncodedString() === child.cid.toBaseEncodedString()) { + for await (const child of recursive(file.cid, repo.blocks, options)) { + if (file.cid.toString() === child.cid.toString()) { continue } diff --git a/packages/ipfs-core/src/components/name/index.js b/packages/ipfs-core/src/components/name/index.js index 58fa79e72f..9a8819968e 100644 --- a/packages/ipfs-core/src/components/name/index.js +++ b/packages/ipfs-core/src/components/name/index.js @@ -10,13 +10,14 @@ class NameAPI { * @param {import('../ipns')} config.ipns * @param {import('peer-id')} config.peerId * @param {import('../../types').Options} config.options - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs * @param {import('ipfs-core-types/src/root').API["isOnline"]} config.isOnline * @param {import('libp2p/src/keychain')} config.keychain * @param {import('ipfs-core-types/src/root').API["dns"]} config.dns */ - constructor ({ dns, ipns, ipld, peerId, isOnline, keychain, options }) { - this.publish = createPublishAPI({ ipns, ipld, peerId, isOnline, keychain }) + constructor ({ dns, ipns, repo, codecs, peerId, isOnline, keychain, options }) { + this.publish = createPublishAPI({ ipns, repo, codecs, peerId, isOnline, keychain }) this.resolve = createResolveAPI({ dns, ipns, peerId, isOnline, options }) this.pubsub = new PubSubAPI({ ipns, options }) } diff --git a/packages/ipfs-core/src/components/name/publish.js b/packages/ipfs-core/src/components/name/publish.js index 3f19d3798c..c26d58d29a 100644 --- a/packages/ipfs-core/src/components/name/publish.js +++ b/packages/ipfs-core/src/components/name/publish.js @@ -20,12 +20,13 @@ const { resolvePath } = require('./utils') * * @param {Object} config * @param {import('../ipns')} config.ipns - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs * @param {import('peer-id')} config.peerId * @param {import('ipfs-core-types/src/root').API["isOnline"]} config.isOnline * @param {import('libp2p/src/keychain')} config.keychain */ -module.exports = ({ ipns, ipld, peerId, isOnline, keychain }) => { +module.exports = ({ ipns, repo, codecs, peerId, isOnline, keychain }) => { /** * @param {string} keyName */ @@ -82,7 +83,7 @@ module.exports = ({ ipns, ipld, peerId, isOnline, keychain }) => { // verify if the path exists, if not, an error will stop the execution lookupKey(key), // if resolving, do a get so we make sure we have the blocks - resolve ? resolvePath({ ipns, ipld }, value) : Promise.resolve() + resolve ? resolvePath({ ipns, repo, codecs }, value) : Promise.resolve() ]) const bytes = uint8ArrayFromString(value) diff --git a/packages/ipfs-core/src/components/name/resolve.js b/packages/ipfs-core/src/components/name/resolve.js index a6e7a81f43..7a665a21e7 100644 --- a/packages/ipfs-core/src/components/name/resolve.js +++ b/packages/ipfs-core/src/components/name/resolve.js @@ -2,8 +2,9 @@ const debug = require('debug') const errcode = require('err-code') -const { mergeOptions } = require('../../utils') -const CID = require('cids') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) +const { CID } = require('multiformats/cid') +const PeerId = require('peer-id') // @ts-ignore no types const isDomain = require('is-domain-name') const uint8ArrayToString = require('uint8arrays/to-string') @@ -62,7 +63,11 @@ module.exports = ({ dns, ipns, peerId, isOnline, options: { offline } }) => { const [namespace, hash, ...remainder] = name.slice(1).split('/') try { - new CID(hash) // eslint-disable-line no-new + if (hash.substring(0, 1) === '1') { + PeerId.parse(hash) + } else { + CID.parse(hash) + } } catch (err) { // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns if (isDomain(hash)) { diff --git a/packages/ipfs-core/src/components/name/utils.js b/packages/ipfs-core/src/components/name/utils.js index 18f8b48910..321ef248d2 100644 --- a/packages/ipfs-core/src/components/name/utils.js +++ b/packages/ipfs-core/src/components/name/utils.js @@ -3,6 +3,7 @@ const isIPFS = require('is-ipfs') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') const drain = require('it-drain') +const { resolve } = require('../../utils') /** * resolves the given path by parsing out protocol-specific entries @@ -10,10 +11,12 @@ const drain = require('it-drain') * * @param {Object} context * @param {import('../ipns')} context.ipns - * @param {import('ipld')} context.ipld + * @param {import('ipfs-repo').IPFSRepo} context.repo + * @param {import('ipfs-core-utils/src/multicodecs')} context.codecs * @param {string} name + * @param {import('ipfs-core-types/src/utils').AbortOptions} [options] */ -exports.resolvePath = async ({ ipns, ipld }, name) => { +exports.resolvePath = async ({ ipns, repo, codecs }, name, options) => { // ipns path if (isIPFS.ipnsPath(name)) { return ipns.resolve(name) @@ -25,5 +28,5 @@ exports.resolvePath = async ({ ipns, ipld }, name) => { } = toCidAndPath(name) // ipfs path - await drain(ipld.resolve(cid, path || '')) + await drain(resolve(cid, path || '', codecs, repo, options)) } diff --git a/packages/ipfs-core/src/components/network.js b/packages/ipfs-core/src/components/network.js index 1516fb9fc4..bc85c4f188 100644 --- a/packages/ipfs-core/src/components/network.js +++ b/packages/ipfs-core/src/components/network.js @@ -1,9 +1,10 @@ 'use strict' -const IPFSBitswap = require('ipfs-bitswap') +const { createBitswap } = require('ipfs-bitswap') const createLibP2P = require('./libp2p') const { Multiaddr } = require('multiaddr') const errCode = require('err-code') +const BlockStorage = require('../block-storage') /** * @typedef {Object} Online @@ -18,10 +19,10 @@ const errCode = require('err-code') * * @typedef {import('ipfs-core-types/src/config').Config} IPFSConfig * @typedef {import('../types').Options} IPFSOptions - * @typedef {import('ipfs-repo')} Repo + * @typedef {import('ipfs-repo').IPFSRepo} Repo * @typedef {import('../types').Print} Print * @typedef {import('libp2p')} libp2p - * @typedef {import('ipfs-bitswap')} Bitswap + * @typedef {import('ipfs-bitswap').IPFSBitswap} Bitswap * @typedef {import('peer-id')} PeerId * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions */ @@ -31,11 +32,15 @@ class Network { * @param {PeerId} peerId * @param {libp2p} libp2p * @param {Bitswap} bitswap + * @param {Repo} repo + * @param {BlockStorage} blockstore */ - constructor (peerId, libp2p, bitswap) { + constructor (peerId, libp2p, bitswap, repo, blockstore) { this.peerId = peerId this.libp2p = libp2p this.bitswap = bitswap + this.repo = repo + this.blockstore = blockstore } /** @@ -70,16 +75,25 @@ class Network { print(`Swarm listening on ${ma}/p2p/${peerId.toB58String()}`) } - const bitswap = new IPFSBitswap(libp2p, repo.blocks, { statsEnabled: true }) + const bitswap = createBitswap(libp2p, repo.blocks, { statsEnabled: true }) await bitswap.start() - return new Network(peerId, libp2p, bitswap) + const blockstore = new BlockStorage(repo.blocks, bitswap) + repo.blocks = blockstore + // @ts-ignore private field + repo.pins.blockstore = blockstore + + return new Network(peerId, libp2p, bitswap, repo, blockstore) } /** * @param {Network} network */ static async stop (network) { + network.repo.blocks = network.blockstore.unwrap() + // @ts-ignore private field + network.repo.pins.blockstore = network.blockstore.unwrap() + await Promise.all([ network.bitswap.stop(), network.libp2p.stop() diff --git a/packages/ipfs-core/src/components/object/data.js b/packages/ipfs-core/src/components/object/data.js index 4894a99ee4..f5052a4163 100644 --- a/packages/ipfs-core/src/components/object/data.js +++ b/packages/ipfs-core/src/components/object/data.js @@ -4,18 +4,18 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ ipld, preload }) => { - const get = require('./get')({ ipld, preload }) +module.exports = ({ repo, preload }) => { + const get = require('./get')({ repo, preload }) /** * @type {import('ipfs-core-types/src/object').API["data"]} */ async function data (multihash, options = {}) { const node = await get(multihash, options) - return node.Data + return node.Data || new Uint8Array(0) } return withTimeoutOption(data) diff --git a/packages/ipfs-core/src/components/object/get.js b/packages/ipfs-core/src/components/object/get.js index 7633b71b60..8e9ca6c962 100644 --- a/packages/ipfs-core/src/components/object/get.js +++ b/packages/ipfs-core/src/components/object/get.js @@ -1,36 +1,25 @@ 'use strict' -const CID = require('cids') -const errCode = require('err-code') +const dagPb = require('@ipld/dag-pb') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -/** - * @typedef {import('multibase').BaseName} BaseName - */ - /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ ipld, preload }) => { +module.exports = ({ repo, preload }) => { /** * @type {import('ipfs-core-types/src/object').API["get"]} */ - async function get (multihash, options = {}) { // eslint-disable-line require-await - let cid - - try { - cid = new CID(multihash) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - + async function get (cid, options = {}) { // eslint-disable-line require-await if (options.preload !== false) { preload(cid) } - return ipld.get(cid, { signal: options.signal }) + const block = await repo.blocks.get(cid, options) + + return dagPb.decode(block) } return withTimeoutOption(get) diff --git a/packages/ipfs-core/src/components/object/index.js b/packages/ipfs-core/src/components/object/index.js index 7587da1b82..dfb9749ee3 100644 --- a/packages/ipfs-core/src/components/object/index.js +++ b/packages/ipfs-core/src/components/object/index.js @@ -9,28 +9,26 @@ const createStat = require('./stat') const ObjectPatchAPI = require('./patch') /** - * @typedef {import('ipld')} IPLD * @typedef {import('../../types').Preload} Preload - * @typedef {import('../gc-lock').GCLock} GCLock - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions */ class ObjectAPI { /** * @param {Object} config - * @param {IPLD} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs * @param {Preload} config.preload - * @param {GCLock} config.gcLock */ - constructor ({ ipld, preload, gcLock }) { - this.data = createData({ ipld, preload }) - this.get = createGet({ ipld, preload }) - this.links = createLinks({ ipld }) - this.new = createNew({ ipld, preload }) - this.put = createPut({ ipld, preload, gcLock }) - this.stat = createStat({ ipld, preload }) - this.patch = new ObjectPatchAPI({ ipld, preload, gcLock }) + constructor ({ repo, codecs, preload }) { + this.data = createData({ repo, preload }) + this.get = createGet({ repo, preload }) + this.links = createLinks({ repo, codecs }) + this.new = createNew({ repo, preload }) + this.put = createPut({ repo, preload }) + this.stat = createStat({ repo, preload }) + this.patch = new ObjectPatchAPI({ repo, preload }) } } diff --git a/packages/ipfs-core/src/components/object/links.js b/packages/ipfs-core/src/components/object/links.js index 7713cb7243..28a14064ab 100644 --- a/packages/ipfs-core/src/components/object/links.js +++ b/packages/ipfs-core/src/components/object/links.js @@ -1,11 +1,15 @@ 'use strict' -const { - DAGLink -} = require('ipld-dag-pb') -const CID = require('cids') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') +const { CID } = require('multiformats/cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('@ipld/dag-pb').PBLink} DAGLink + */ + /** * @param {any} node * @param {DAGLink[]} [links] @@ -17,15 +21,23 @@ function findLinks (node, links = []) { if (key === '/' && Object.keys(node).length === 1) { try { - links.push(new DAGLink('', 0, new CID(val))) + links.push({ + Name: '', + Tsize: 0, + Hash: CID.parse(val) + }) continue } catch (_) { // not a CID } } - if (CID.isCID(val)) { - links.push(new DAGLink('', 0, val)) + if (val instanceof CID) { + links.push({ + Name: '', + Tsize: 0, + Hash: val + }) continue } @@ -43,29 +55,31 @@ function findLinks (node, links = []) { /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs */ -module.exports = ({ ipld }) => { +module.exports = ({ repo, codecs }) => { /** * @type {import('ipfs-core-types/src/object').API["links"]} */ - async function links (multihash, options = {}) { - const cid = new CID(multihash) - const result = await ipld.get(cid, options) + async function links (cid, options = {}) { + const codec = await codecs.getCodec(cid.code) + const block = await repo.blocks.get(cid, options) + const node = codec.decode(block) - if (cid.codec === 'raw') { + if (cid.code === raw.code) { return [] } - if (cid.codec === 'dag-pb') { - return result.Links + if (cid.code === dagPb.code) { + return node.Links } - if (cid.codec === 'dag-cbor') { - return findLinks(result) + if (cid.code === dagCbor.code) { + return findLinks(node) } - throw new Error(`Cannot resolve links from codec ${cid.codec}`) + throw new Error(`Cannot resolve links from codec ${cid.code}`) } return withTimeoutOption(links) diff --git a/packages/ipfs-core/src/components/object/new.js b/packages/ipfs-core/src/components/object/new.js index 2289c100c8..0cb5e92a36 100644 --- a/packages/ipfs-core/src/components/object/new.js +++ b/packages/ipfs-core/src/components/object/new.js @@ -1,19 +1,17 @@ 'use strict' -const { - DAGNode -} = require('ipld-dag-pb') -const multicodec = require('multicodec') -const mh = require('multihashing-async').multihash +const dagPb = require('@ipld/dag-pb') +const { sha256 } = require('multiformats/hashes/sha2') const { UnixFS } = require('ipfs-unixfs') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const { CID } = require('multiformats/cid') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ ipld, preload }) => { +module.exports = ({ repo, preload }) => { /** * @type {import('ipfs-core-types/src/object').API["new"]} */ @@ -26,15 +24,16 @@ module.exports = ({ ipld, preload }) => { } else { throw new Error('unknown template') } - } else { - data = new Uint8Array(0) } - const node = new DAGNode(data) + const buf = dagPb.encode({ + Data: data, + Links: [] + }) + const hash = await sha256.digest(buf) + const cid = CID.createV0(hash) - const cid = await ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'], + await repo.blocks.put(cid, buf, { signal: options.signal }) diff --git a/packages/ipfs-core/src/components/object/patch/add-link.js b/packages/ipfs-core/src/components/object/patch/add-link.js index ec9aca29a0..f3193adf69 100644 --- a/packages/ipfs-core/src/components/object/patch/add-link.js +++ b/packages/ipfs-core/src/components/object/patch/add-link.js @@ -4,21 +4,23 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../../types').Preload} config.preload - * @param {import('.').GCLock} config.gcLock */ -module.exports = ({ ipld, gcLock, preload }) => { - const get = require('../get')({ ipld, preload }) - const put = require('../put')({ ipld, gcLock, preload }) +module.exports = ({ repo, preload }) => { + const get = require('../get')({ repo, preload }) + const put = require('../put')({ repo, preload }) /** * @type {import('ipfs-core-types/src/object/patch').API["addLink"]} */ - async function addLink (multihash, link, options = {}) { - const node = await get(multihash, options) - node.addLink(link) - return put(node, options) + async function addLink (cid, link, options = {}) { + const node = await get(cid, options) + + return put({ + ...node, + Links: node.Links.concat([link]) + }, options) } return withTimeoutOption(addLink) diff --git a/packages/ipfs-core/src/components/object/patch/append-data.js b/packages/ipfs-core/src/components/object/patch/append-data.js index 7f7286c851..b9096e600e 100644 --- a/packages/ipfs-core/src/components/object/patch/append-data.js +++ b/packages/ipfs-core/src/components/object/patch/append-data.js @@ -1,26 +1,28 @@ 'use strict' -const { DAGNode } = require('ipld-dag-pb') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const uint8ArrayConcat = require('uint8arrays/concat') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../../types').Preload} config.preload - * @param {import('.').GCLock} config.gcLock */ -module.exports = ({ ipld, gcLock, preload }) => { - const get = require('../get')({ ipld, preload }) - const put = require('../put')({ ipld, gcLock, preload }) +module.exports = ({ repo, preload }) => { + const get = require('../get')({ repo, preload }) + const put = require('../put')({ repo, preload }) /** * @type {import('ipfs-core-types/src/object/patch').API["appendData"]} */ - async function appendData (multihash, data, options = {}) { - const node = await get(multihash, options) - const newData = uint8ArrayConcat([node.Data, data]) - return put(new DAGNode(newData, node.Links), options) + async function appendData (cid, data, options = {}) { + const node = await get(cid, options) + const newData = uint8ArrayConcat([node.Data || [], data]) + + return put({ + ...node, + Data: newData + }, options) } return withTimeoutOption(appendData) diff --git a/packages/ipfs-core/src/components/object/patch/index.js b/packages/ipfs-core/src/components/object/patch/index.js index b8d4929459..7d81aaff5c 100644 --- a/packages/ipfs-core/src/components/object/patch/index.js +++ b/packages/ipfs-core/src/components/object/patch/index.js @@ -6,25 +6,21 @@ const createRmLink = require('./rm-link') const createSetData = require('./set-data') /** - * @typedef {import('ipld')} IPLD * @typedef {import('../../../types').Preload} Preload - * @typedef {import('..').GCLock} GCLock - * @typedef {import('cids')} CID - * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions */ class ObjectPatchAPI { /** * @param {Object} config - * @param {IPLD} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {Preload} config.preload - * @param {GCLock} config.gcLock */ - constructor ({ ipld, preload, gcLock }) { - this.addLink = createAddLink({ ipld, preload, gcLock }) - this.appendData = createAppendData({ ipld, preload, gcLock }) - this.rmLink = createRmLink({ ipld, preload, gcLock }) - this.setData = createSetData({ ipld, preload, gcLock }) + constructor ({ repo, preload }) { + this.addLink = createAddLink({ repo, preload }) + this.appendData = createAppendData({ repo, preload }) + this.rmLink = createRmLink({ repo, preload }) + this.setData = createSetData({ repo, preload }) } } + module.exports = ObjectPatchAPI diff --git a/packages/ipfs-core/src/components/object/patch/rm-link.js b/packages/ipfs-core/src/components/object/patch/rm-link.js index 28a2e1e79b..0f0eb2a194 100644 --- a/packages/ipfs-core/src/components/object/patch/rm-link.js +++ b/packages/ipfs-core/src/components/object/patch/rm-link.js @@ -4,21 +4,22 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../../types').Preload} config.preload - * @param {import('.').GCLock} config.gcLock */ -module.exports = ({ ipld, gcLock, preload }) => { - const get = require('../get')({ ipld, preload }) - const put = require('../put')({ ipld, gcLock, preload }) +module.exports = ({ repo, preload }) => { + const get = require('../get')({ repo, preload }) + const put = require('../put')({ repo, preload }) /** * @type {import('ipfs-core-types/src/object/patch').API["rmLink"]} */ async function rmLink (multihash, linkRef, options = {}) { const node = await get(multihash, options) - // @ts-ignore - loose input types - node.rmLink(linkRef.Name || linkRef.name || linkRef) + const name = (typeof linkRef === 'string' ? linkRef : linkRef.Name) || '' + + node.Links = node.Links.filter(l => l.Name !== name) + return put(node, options) } diff --git a/packages/ipfs-core/src/components/object/patch/set-data.js b/packages/ipfs-core/src/components/object/patch/set-data.js index 56ac8eb984..d13d2e4df6 100644 --- a/packages/ipfs-core/src/components/object/patch/set-data.js +++ b/packages/ipfs-core/src/components/object/patch/set-data.js @@ -1,24 +1,26 @@ 'use strict' -const { DAGNode } = require('ipld-dag-pb') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../../types').Preload} config.preload - * @param {import('.').GCLock} config.gcLock */ -module.exports = ({ ipld, gcLock, preload }) => { - const get = require('../get')({ ipld, preload }) - const put = require('../put')({ ipld, gcLock, preload }) +module.exports = ({ repo, preload }) => { + const get = require('../get')({ repo, preload }) + const put = require('../put')({ repo, preload }) /** * @type {import('ipfs-core-types/src/object/patch').API["setData"]} */ - async function setData (multihash, data, options = {}) { - const node = await get(multihash, options) - return put(new DAGNode(data, node.Links), options) + async function setData (cid, data, options = {}) { + const node = await get(cid, options) + + return put({ + ...node, + Data: data + }, options) } return withTimeoutOption(setData) diff --git a/packages/ipfs-core/src/components/object/put.js b/packages/ipfs-core/src/components/object/put.js index 9d92baf8b9..9f48f3ec06 100644 --- a/packages/ipfs-core/src/components/object/put.js +++ b/packages/ipfs-core/src/components/object/put.js @@ -1,110 +1,41 @@ 'use strict' -const { - DAGNode, - DAGLink, - util: DAGLinkUtil -} = require('ipld-dag-pb') -const mh = require('multihashing-async').multihash -const multicodec = require('multicodec') +const dagPb = require('@ipld/dag-pb') +const { CID } = require('multiformats/cid') +const { sha256 } = require('multiformats/hashes/sha2') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const uint8ArrayToString = require('uint8arrays/to-string') -const uint8ArrayFromString = require('uint8arrays/from-string') - -/** - * @param {Uint8Array} buf - * @param {import('ipfs-core-types/src/object').PutEncoding} encoding - */ -function parseBuffer (buf, encoding) { - switch (encoding) { - case 'json': - return parseJSONBuffer(buf) - case 'protobuf': - return parseProtoBuffer(buf) - default: - throw new Error(`unknown encoding: ${encoding}`) - } -} - -/** - * @param {Uint8Array} buf - */ -function parseJSONBuffer (buf) { - let data - let links - - try { - const parsed = JSON.parse(uint8ArrayToString(buf)) - - // @ts-ignore - loose input types - links = (parsed.Links || []).map((link) => { - return new DAGLink( - // @ts-ignore - loose input types - link.Name || link.name, - // @ts-ignore - loose input types - link.Size || link.size, - // @ts-ignore - loose input types - mh.fromB58String(link.Hash || link.hash || link.multihash) - ) - }) - - // @ts-ignore - loose input types - data = uint8ArrayFromString(parsed.Data) - } catch (err) { - throw new Error('failed to parse JSON: ' + err) - } - - return new DAGNode(data, links) -} - -/** - * @param {Uint8Array} buf - */ -function parseProtoBuffer (buf) { - return DAGLinkUtil.deserialize(buf) -} /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload - * @param {import('.').GCLock} config.gcLock */ -module.exports = ({ ipld, gcLock, preload }) => { +module.exports = ({ repo, preload }) => { /** * @type {import('ipfs-core-types/src/object').API["put"]} */ async function put (obj, options = {}) { - const encoding = options.enc - let node - - if (obj instanceof Uint8Array) { - if (encoding) { - node = await parseBuffer(obj, encoding) - } else { - node = new DAGNode(obj) - } - } else if (obj instanceof DAGNode) { - // already a dag node - node = obj - } else if (typeof obj === 'object') { - node = new DAGNode(obj.Data, obj.Links) - } else { - throw new Error('obj not recognized') - } - - const release = await gcLock.readLock() + const release = await repo.gcLock.readLock() try { - const cid = await ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const buf = dagPb.encode(obj) + const hash = await sha256.digest(buf) + const cid = CID.createV0(hash) + + await repo.blocks.put(cid, buf, { + signal: options.signal }) if (options.preload !== false) { preload(cid) } + if (options.pin) { + await repo.pins.pinRecursively(cid, { + signal: options.signal + }) + } + return cid } finally { release() diff --git a/packages/ipfs-core/src/components/object/stat.js b/packages/ipfs-core/src/components/object/stat.js index 43c8170e41..e02998e6a6 100644 --- a/packages/ipfs-core/src/components/object/stat.js +++ b/packages/ipfs-core/src/components/object/stat.js @@ -1,35 +1,31 @@ 'use strict' -const dagPB = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').Preload} config.preload */ -module.exports = ({ ipld, preload }) => { - const get = require('./get')({ ipld, preload }) +module.exports = ({ repo, preload }) => { + const get = require('./get')({ repo, preload }) /** * @type {import('ipfs-core-types/src/object').API["stat"]} */ - async function stat (multihash, options = {}) { - const node = await get(multihash, options) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { - cidVersion: 0 - }) - + async function stat (cid, options = {}) { + const node = await get(cid, options) + const serialized = dagPb.encode(node) const blockSize = serialized.length - const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) + const linkLength = node.Links.reduce((a, l) => a + (l.Tsize || 0), 0) return { - Hash: cid.toBaseEncodedString(), + Hash: cid, NumLinks: node.Links.length, BlockSize: blockSize, - LinksSize: blockSize - node.Data.length, - DataSize: node.Data.length, + LinksSize: blockSize - (node.Data || []).length, + DataSize: (node.Data || []).length, CumulativeSize: blockSize + linkLength } } diff --git a/packages/ipfs-core/src/components/pin/add-all.js b/packages/ipfs-core/src/components/pin/add-all.js index 6b5aa0ac58..a9bd600090 100644 --- a/packages/ipfs-core/src/components/pin/add-all.js +++ b/packages/ipfs-core/src/components/pin/add-all.js @@ -2,16 +2,15 @@ 'use strict' const { resolvePath } = require('../../utils') -const PinManager = require('./pin-manager') -const { PinTypes } = PinManager const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +const { PinTypes } = require('ipfs-repo') /** * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Source} Source * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Pin} PinTarget * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID */ /** @@ -21,11 +20,10 @@ const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') /** * @param {Object} config - * @param {import('../gc-lock').GCLock} config.gcLock - * @param {import('ipld')} config.ipld - * @param {import('./pin-manager')} config.pinManager + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-repo').IPFSRepo} config.repo */ -module.exports = ({ pinManager, gcLock, ipld }) => { +module.exports = ({ repo, codecs }) => { /** * @type {import('ipfs-core-types/src/pin').API["addAll"]} */ @@ -35,10 +33,10 @@ module.exports = ({ pinManager, gcLock, ipld }) => { */ const pinAdd = async function * () { for await (const { path, recursive, metadata } of normaliseInput(source)) { - const cid = await resolvePath(ipld, path) + const { cid } = await resolvePath(repo, codecs, path) // verify that each hash can be pinned - const { reason } = await pinManager.isPinnedWithType(cid, [PinTypes.recursive, PinTypes.direct]) + const { reason } = await repo.pins.isPinnedWithType(cid, [PinTypes.recursive, PinTypes.direct]) if (reason === 'recursive' && !recursive) { // only disallow trying to override recursive pins @@ -46,9 +44,9 @@ module.exports = ({ pinManager, gcLock, ipld }) => { } if (recursive) { - await pinManager.pinRecursively(cid, { metadata }) + await repo.pins.pinRecursively(cid, { metadata }) } else { - await pinManager.pinDirectly(cid, { metadata }) + await repo.pins.pinDirectly(cid, { metadata }) } yield cid @@ -64,7 +62,7 @@ module.exports = ({ pinManager, gcLock, ipld }) => { return } - const release = await gcLock.readLock() + const release = await repo.gcLock.readLock() try { yield * pinAdd() diff --git a/packages/ipfs-core/src/components/pin/add.js b/packages/ipfs-core/src/components/pin/add.js index 842d34d166..75dee59342 100644 --- a/packages/ipfs-core/src/components/pin/add.js +++ b/packages/ipfs-core/src/components/pin/add.js @@ -1,7 +1,7 @@ 'use strict' const last = require('it-last') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @param {Object} config @@ -14,7 +14,7 @@ module.exports = ({ addAll }) => (path, options = {}) => { let iter - if (CID.isCID(path)) { + if (path instanceof CID) { iter = addAll([{ cid: path, ...options diff --git a/packages/ipfs-core/src/components/pin/index.js b/packages/ipfs-core/src/components/pin/index.js index b78d33f647..19297eaa7d 100644 --- a/packages/ipfs-core/src/components/pin/index.js +++ b/packages/ipfs-core/src/components/pin/index.js @@ -6,26 +6,20 @@ const createLs = require('./ls') const createRm = require('./rm') const createRmAll = require('./rm-all') -/** - * @typedef {import('../gc-lock').GCLock} GCLock - * @typedef {import('./pin-manager')} PinManager - */ - class PinAPI { /** * @param {Object} config - * @param {GCLock} config.gcLock - * @param {import('ipld')} config.ipld - * @param {PinManager} config.pinManager + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-repo').IPFSRepo} config.repo */ - constructor ({ gcLock, ipld, pinManager }) { - const addAll = createAddAll({ gcLock, ipld, pinManager }) + constructor ({ codecs, repo }) { + const addAll = createAddAll({ codecs, repo }) this.addAll = addAll this.add = createAdd({ addAll }) - const rmAll = createRmAll({ gcLock, ipld, pinManager }) + const rmAll = createRmAll({ codecs, repo }) this.rmAll = rmAll this.rm = createRm({ rmAll }) - this.ls = createLs({ ipld, pinManager }) + this.ls = createLs({ codecs, repo }) /** @type {import('ipfs-core-types/src/pin/remote').API} */ this.remote = { @@ -36,6 +30,8 @@ class PinAPI { service: { add: (name, credentials) => Promise.reject(new Error('Not implemented')), rm: (name, options = {}) => Promise.reject(new Error('Not implemented')), + // @ts-ignore return types seem to be broken by a recent ts release. doesn't matter here because + // we are just throwing. Will be removed by https://github.com/protocol/web3-dev-team/pull/58 ls: (options = {}) => Promise.reject(new Error('Not implemented')) } } diff --git a/packages/ipfs-core/src/components/pin/ls.js b/packages/ipfs-core/src/components/pin/ls.js index b06b255fcc..b6927cc556 100644 --- a/packages/ipfs-core/src/components/pin/ls.js +++ b/packages/ipfs-core/src/components/pin/ls.js @@ -1,14 +1,14 @@ /* eslint max-nested-callbacks: ["error", 8] */ 'use strict' -const PinManager = require('./pin-manager') -const { PinTypes } = PinManager +const { PinTypes } = require('ipfs-repo') const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') const { resolvePath } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const errCode = require('err-code') /** - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID */ /** @@ -32,10 +32,10 @@ function toPin (type, cid, metadata) { /** * @param {Object} config - * @param {import('./pin-manager')} config.pinManager - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs */ -module.exports = ({ pinManager, ipld }) => { +module.exports = ({ repo, codecs }) => { /** * @type {import('ipfs-core-types/src/pin').API["ls"]} */ @@ -46,7 +46,9 @@ module.exports = ({ pinManager, ipld }) => { if (options.type) { type = options.type - PinManager.checkPinType(type) + if (!Object.keys(PinTypes).includes(type)) { + throw errCode(new Error('Invalid pin type'), 'ERR_INVALID_PIN_TYPE') + } } if (options.paths) { @@ -54,11 +56,11 @@ module.exports = ({ pinManager, ipld }) => { let matched = false for await (const { path } of normaliseInput(options.paths)) { - const cid = await resolvePath(ipld, path) - const { reason, pinned, parent, metadata } = await pinManager.isPinnedWithType(cid, type) + const { cid } = await resolvePath(repo, codecs, path) + const { reason, pinned, parent, metadata } = await repo.pins.isPinnedWithType(cid, type) if (!pinned) { - throw new Error(`path '${path}' is not pinned`) + throw errCode(new Error(`path '${path}' is not pinned`), 'ERR_NOT_PINNED') } switch (reason) { @@ -81,7 +83,7 @@ module.exports = ({ pinManager, ipld }) => { } if (type === PinTypes.recursive || type === PinTypes.all) { - for await (const { cid, metadata } of pinManager.recursiveKeys()) { + for await (const { cid, metadata } of repo.pins.recursiveKeys()) { yield toPin(PinTypes.recursive, cid, metadata) } } @@ -89,13 +91,13 @@ module.exports = ({ pinManager, ipld }) => { if (type === PinTypes.indirect || type === PinTypes.all) { // @ts-ignore - LsSettings & AbortOptions have no properties in common // with type { preload?: boolean } - for await (const cid of pinManager.indirectKeys(options)) { + for await (const cid of repo.pins.indirectKeys(options)) { yield toPin(PinTypes.indirect, cid) } } if (type === PinTypes.direct || type === PinTypes.all) { - for await (const { cid, metadata } of pinManager.directKeys()) { + for await (const { cid, metadata } of repo.pins.directKeys()) { yield toPin(PinTypes.direct, cid, metadata) } } diff --git a/packages/ipfs-core/src/components/pin/pin-manager.js b/packages/ipfs-core/src/components/pin/pin-manager.js deleted file mode 100644 index 0378cd8795..0000000000 --- a/packages/ipfs-core/src/components/pin/pin-manager.js +++ /dev/null @@ -1,351 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 8] */ -'use strict' - -const CID = require('cids') -const errCode = require('err-code') -// @ts-ignore - no types -const dagCborLinks = require('dag-cbor-links') -const debug = require('debug') -const first = require('it-first') -const all = require('it-all') -const cborg = require('cborg') -const multibase = require('multibase') -const multicodec = require('multicodec') -const { Key } = require('interface-datastore') - -/** - * @typedef {object} Pin - * @property {number} depth - * @property {CID.CIDVersion} [version] - * @property {multicodec.CodecCode} [codec] - * @property {Record} [metadata] - */ - -/** - * @typedef {import('ipfs-core-types/src/pin').PinType} PinType - * @typedef {import('ipfs-core-types/src/pin').PinQueryType} PinQueryType - */ - -/** - * @typedef {Object} PinOptions - * @property {any} [metadata] - * - * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions - */ - -// arbitrary limit to the number of concurrent dag operations -// const WALK_DAG_CONCURRENCY_LIMIT = 300 -// const IS_PINNED_WITH_TYPE_CONCURRENCY_LIMIT = 300 -// const PIN_DS_KEY = new Key('/local/pins') - -/** - * @param {string} type - */ -function invalidPinTypeErr (type) { - const errMsg = `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` - return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE') -} - -const encoder = multibase.encoding('base32upper') - -/** - * @param {CID} cid - */ -function cidToKey (cid) { - return new Key(`/${encoder.encode(cid.multihash)}`) -} - -/** - * @param {Key | string} key - */ -function keyToMultihash (key) { - return encoder.decode(key.toString().slice(1)) -} - -const PinTypes = { - /** @type {'direct'} */ - direct: ('direct'), - /** @type {'recursive'} */ - recursive: ('recursive'), - /** @type {'indirect'} */ - indirect: ('indirect'), - /** @type {'all'} */ - all: ('all') -} - -class PinManager { - /** - * @param {Object} config - * @param {import('ipfs-repo')} config.repo - * @param {import('ipld')} config.ipld - */ - constructor ({ repo, ipld }) { - this.repo = repo - this.ipld = ipld - this.log = debug('ipfs:pin') - this.directPins = new Set() - this.recursivePins = new Set() - } - - /** - * @private - * @param {CID} cid - * @param {AbortOptions} [options] - * @returns {AsyncGenerator} - */ - async * _walkDag (cid, options) { - const node = await this.ipld.get(cid, options) - - if (cid.codec === 'dag-pb') { - for (const link of node.Links) { - yield link.Hash - yield * this._walkDag(link.Hash, options) - } - } else if (cid.codec === 'dag-cbor') { - for (const [, childCid] of dagCborLinks(node)) { - yield childCid - yield * this._walkDag(childCid, options) - } - } - } - - /** - * @param {CID} cid - * @param {PinOptions & AbortOptions} [options] - * @returns {Promise} - */ - async pinDirectly (cid, options = {}) { - await this.ipld.get(cid, options) - - /** @type {Pin} */ - const pin = { - depth: 0 - } - - if (cid.version !== 0) { - pin.version = cid.version - } - - if (cid.codec !== 'dag-pb') { - pin.codec = multicodec.getNumber(cid.codec) - } - - if (options.metadata) { - pin.metadata = options.metadata - } - - return this.repo.pins.put(cidToKey(cid), cborg.encode(pin)) - } - - /** - * @param {CID} cid - * @param {AbortOptions} [options] - * @returns {Promise} - */ - // eslint-disable-next-line require-await - async unpin (cid, options) { - return this.repo.pins.delete(cidToKey(cid)) - } - - /** - * @param {CID} cid - * @param {PinOptions & AbortOptions} [options] - * @returns {Promise} - */ - async pinRecursively (cid, options = {}) { - await this.fetchCompleteDag(cid, options) - - /** @type {Pin} */ - const pin = { - depth: Infinity - } - - if (cid.version !== 0) { - pin.version = cid.version - } - - if (cid.codec !== 'dag-pb') { - pin.codec = multicodec.getNumber(cid.codec) - } - - if (options.metadata) { - pin.metadata = options.metadata - } - - await this.repo.pins.put(cidToKey(cid), cborg.encode(pin)) - } - - /** - * @param {AbortOptions} [options] - */ - async * directKeys (options) { - for await (const entry of this.repo.pins.query({ - filters: [(entry) => { - const pin = cborg.decode(entry.value) - - return pin.depth === 0 - }] - })) { - const pin = cborg.decode(entry.value) - const version = pin.version || 0 - const codec = pin.codec ? multicodec.getName(pin.codec) : 'dag-pb' - const multihash = keyToMultihash(entry.key) - - yield { - cid: new CID(version, codec, multihash), - metadata: pin.metadata - } - } - } - - /** - * @param {AbortOptions} [options] - */ - async * recursiveKeys (options) { - for await (const entry of this.repo.pins.query({ - filters: [(entry) => { - const pin = cborg.decode(entry.value) - - return pin.depth === Infinity - }] - })) { - const pin = cborg.decode(entry.value) - const version = pin.version || 0 - const codec = pin.codec ? multicodec.getName(pin.codec) : 'dag-pb' - const multihash = keyToMultihash(entry.key) - - yield { - cid: new CID(version, codec, multihash), - metadata: pin.metadata - } - } - } - - /** - * @param {AbortOptions} [options] - */ - async * indirectKeys (options) { - for await (const { cid } of this.recursiveKeys()) { - for await (const childCid of this._walkDag(cid, options)) { - // recursive pins override indirect pins - const types = [ - PinTypes.recursive - ] - - const result = await this.isPinnedWithType(childCid, types) - - if (result.pinned) { - continue - } - - yield childCid - } - } - } - - /** - * @param {CID} cid - * @param {PinQueryType|PinQueryType[]} types - * @param {AbortOptions} [options] - */ - async isPinnedWithType (cid, types, options) { - if (!Array.isArray(types)) { - types = [types] - } - - const all = types.includes(PinTypes.all) - const direct = types.includes(PinTypes.direct) - const recursive = types.includes(PinTypes.recursive) - const indirect = types.includes(PinTypes.indirect) - - if (recursive || direct || all) { - const result = await first(this.repo.pins.query({ - prefix: cidToKey(cid).toString(), - filters: [entry => { - if (all) { - return true - } - - const pin = cborg.decode(entry.value) - - return types.includes(pin.depth === 0 ? PinTypes.direct : PinTypes.recursive) - }], - limit: 1 - })) - - if (result) { - const pin = cborg.decode(result.value) - - return { - cid, - pinned: true, - reason: pin.depth === 0 ? PinTypes.direct : PinTypes.recursive, - metadata: pin.metadata - } - } - } - - const self = this - - /** - * @param {CID} key - * @param {AsyncIterable<{ cid: CID, metadata: any }>} source - */ - async function * findChild (key, source) { - for await (const { cid: parentCid } of source) { - for await (const childCid of self._walkDag(parentCid)) { - if (childCid.equals(key)) { - yield parentCid - return - } - } - } - } - - if (all || indirect) { - // indirect (default) - // check each recursive key to see if multihash is under it - - const parentCid = await first(findChild(cid, this.recursiveKeys())) - - if (parentCid) { - return { - cid, - pinned: true, - reason: PinTypes.indirect, - parent: parentCid - } - } - } - - return { - cid, - pinned: false - } - } - - /** - * @param {CID} cid - * @param {AbortOptions} options - */ - async fetchCompleteDag (cid, options) { - await all(this._walkDag(cid, options)) - } - - /** - * Throws an error if the pin type is invalid - * - * @param {any} type - * @returns {type is PinType} - */ - static checkPinType (type) { - if (typeof type !== 'string' || !Object.keys(PinTypes).includes(type)) { - throw invalidPinTypeErr(type) - } - return true - } -} - -PinManager.PinTypes = PinTypes - -module.exports = PinManager diff --git a/packages/ipfs-core/src/components/pin/rm-all.js b/packages/ipfs-core/src/components/pin/rm-all.js index 9e84fb4274..c282d3fbf2 100644 --- a/packages/ipfs-core/src/components/pin/rm-all.js +++ b/packages/ipfs-core/src/components/pin/rm-all.js @@ -3,26 +3,25 @@ const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') const { resolvePath } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const { PinTypes } = require('./pin-manager') +const { PinTypes } = require('ipfs-repo') /** * @param {Object} config - * @param {import('./pin-manager')} config.pinManager - * @param {import('.').GCLock} config.gcLock - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs */ -module.exports = ({ pinManager, gcLock, ipld }) => { +module.exports = ({ repo, codecs }) => { /** * @type {import('ipfs-core-types/src/pin').API["rmAll"]} */ async function * rmAll (source, _options = {}) { - const release = await gcLock.readLock() + const release = await repo.gcLock.readLock() try { // verify that each hash can be unpinned for await (const { path, recursive } of normaliseInput(source)) { - const cid = await resolvePath(ipld, path) - const { pinned, reason } = await pinManager.isPinnedWithType(cid, PinTypes.all) + const { cid } = await resolvePath(repo, codecs, path) + const { pinned, reason } = await repo.pins.isPinnedWithType(cid, PinTypes.all) if (!pinned) { throw new Error(`${cid} is not pinned`) @@ -34,13 +33,13 @@ module.exports = ({ pinManager, gcLock, ipld }) => { throw new Error(`${cid} is pinned recursively`) } - await pinManager.unpin(cid) + await repo.pins.unpin(cid) yield cid break case (PinTypes.direct): - await pinManager.unpin(cid) + await repo.pins.unpin(cid) yield cid diff --git a/packages/ipfs-core/src/components/ping.js b/packages/ipfs-core/src/components/ping.js index d7fca94fd5..2c80f68ec2 100644 --- a/packages/ipfs-core/src/components/ping.js +++ b/packages/ipfs-core/src/components/ping.js @@ -17,7 +17,7 @@ module.exports = ({ network }) => { const { libp2p } = await network.use() options.count = options.count || 10 - const peer = PeerId.createFromCID(peerId) + const peer = PeerId.createFromB58String(peerId) const storedPeer = libp2p.peerStore.get(peer) let id = storedPeer && storedPeer.id diff --git a/packages/ipfs-core/src/components/refs/index.js b/packages/ipfs-core/src/components/refs/index.js index db4018883e..3a4e146a27 100644 --- a/packages/ipfs-core/src/components/refs/index.js +++ b/packages/ipfs-core/src/components/refs/index.js @@ -1,11 +1,11 @@ 'use strict' -const CID = require('cids') -const { DAGNode } = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') const { Errors } = require('interface-datastore') const ERR_NOT_FOUND = Errors.notFoundError().code const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const toCIDAndPath = require('ipfs-core-utils/src/to-cid-and-path') +const { CID } = require('multiformats/cid') const Format = { default: '', @@ -25,11 +25,12 @@ const Format = { /** * @param {Object} config - * @param {import('ipld')} config.ipld + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs * @param {import('ipfs-core-types/src/root').API["resolve"]} config.resolve * @param {import('../../types').Preload} config.preload */ -module.exports = function ({ ipld, resolve, preload }) { +module.exports = function ({ repo, codecs, resolve, preload }) { /** * @type {import('ipfs-core-types/src/refs').API["refs"]} */ @@ -54,7 +55,7 @@ module.exports = function ({ ipld, resolve, preload }) { const paths = rawPaths.map(p => getFullPath(preload, p, options)) for (const path of paths) { - yield * refsStream(resolve, ipld, path, options) + yield * refsStream(resolve, repo, codecs, path, options) } } @@ -85,11 +86,12 @@ function getFullPath (preload, ipfsPath, options) { * Get a stream of refs at the given path * * @param {import('ipfs-core-types/src/root').API["resolve"]} resolve - * @param {import('ipld')} ipld + * @param {import('ipfs-repo').IPFSRepo} repo + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {string} path * @param {import('ipfs-core-types/src/refs').RefsOptions} options */ -async function * refsStream (resolve, ipld, path, options) { +async function * refsStream (resolve, repo, codecs, path, options) { // Resolve to the target CID of the path const resPath = await resolve(path) const { @@ -100,7 +102,7 @@ async function * refsStream (resolve, ipld, path, options) { const unique = options.unique || false // Traverse the DAG, converting it into a stream - for await (const obj of objectStream(ipld, cid, maxDepth, unique)) { + for await (const obj of objectStream(repo, codecs, cid, maxDepth, unique)) { // Root object will not have a parent if (!obj.parent) { continue @@ -137,12 +139,13 @@ function formatLink (srcCid, dstCid, linkName = '', format = Format.default) { /** * Do a depth first search of the DAG, starting from the given root cid * - * @param {import('ipld')} ipld + * @param {import('ipfs-repo').IPFSRepo} repo + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {CID} rootCid * @param {number} maxDepth * @param {boolean} uniqueOnly */ -async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await +async function * objectStream (repo, codecs, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await const seen = new Set() /** @@ -161,7 +164,7 @@ async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint- // Get this object's links try { // Look at each link, parent and the new depth - for (const link of await getLinks(ipld, parent.cid)) { + for await (const link of getLinks(repo, codecs, parent.cid)) { yield { parent: parent, node: link, @@ -187,44 +190,90 @@ async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint- } /** - * Fetch a node from IPLD then get all its links + * Fetch a node and then get all its links * - * @param {import('ipld')} ipld + * @param {import('ipfs-repo').IPFSRepo} repo + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {CID} cid + * @param {Array} base + * @returns {AsyncGenerator<{ name: string, cid: CID }, void, undefined>} */ -async function getLinks (ipld, cid) { - const node = await ipld.get(cid) - - if (node instanceof DAGNode) { - /** - * @param {import('ipld-dag-pb').DAGLink} arg - */ - const mapper = ({ Name, Hash }) => ({ name: Name, cid: Hash }) - return node.Links.map(mapper) - } +async function * getLinks (repo, codecs, cid, base = []) { + const block = await repo.blocks.get(cid) + const codec = await codecs.getCodec(cid.code) + const value = codec.decode(block) + const isDagPb = cid.code === dagPb.code + + for (const [name, cid] of links(value, base)) { + // special case for dag-pb - use the name of the link + // instead of the path within the object + if (isDagPb) { + const match = name.match(/^Links\/(\d+)\/Hash$/) + + if (match) { + const index = Number(match[1]) + + if (index < value.Links.length) { + yield { + name: value.Links[index].Name, + cid + } + + continue + } + } + } - return getNodeLinks(node) + yield { + name, + cid + } + } } /** - * Recursively search the node for CIDs - * - * @param {object} node - * @param {string} [path] - * @returns {Node[]} + * @param {*} source + * @param {Array} base + * @returns {Iterable<[string, CID]>} */ -function getNodeLinks (node, path = '') { - /** @type {Node[]} */ - let links = [] - for (const [name, value] of Object.entries(node)) { - if (CID.isCID(value)) { - links.push({ - name: path + name, - cid: value - }) - } else if (typeof value === 'object') { - links = links.concat(getNodeLinks(value, path + name + '/')) +const links = function * (source, base) { + if (source == null) { + return + } + + if (source instanceof Uint8Array) { + return + } + + for (const [key, value] of Object.entries(source)) { + const path = [...base, key] + + if (value != null && typeof value === 'object') { + if (Array.isArray(value)) { + for (const [index, element] of value.entries()) { + const elementPath = [...path, index] + const cid = CID.asCID(element) + + // eslint-disable-next-line max-depth + if (cid) { + yield [elementPath.join('/'), cid] + } else if (typeof element === 'object') { + yield * links(element, elementPath) + } + } + } else { + const cid = CID.asCID(value) + + if (cid) { + yield [path.join('/'), cid] + } else { + yield * links(value, path) + } + } } } - return links + + // ts requires a @returns annotation when a function is recursive, + // eslint requires a return when you use a @returns annotation. + return [] } diff --git a/packages/ipfs-core/src/components/refs/local.js b/packages/ipfs-core/src/components/refs/local.js index 8c03835080..2cba818c85 100644 --- a/packages/ipfs-core/src/components/refs/local.js +++ b/packages/ipfs-core/src/components/refs/local.js @@ -4,7 +4,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = function ({ repo }) { /** diff --git a/packages/ipfs-core/src/components/repo/gc.js b/packages/ipfs-core/src/components/repo/gc.js index e022c23f89..4f862b7045 100644 --- a/packages/ipfs-core/src/components/repo/gc.js +++ b/packages/ipfs-core/src/components/repo/gc.js @@ -1,149 +1,52 @@ 'use strict' -const CID = require('cids') const log = require('debug')('ipfs:repo:gc') -const { MFS_ROOT_KEY } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const { Errors } = require('interface-datastore') -const ERR_NOT_FOUND = Errors.notFoundError().code -const { parallelMerge, transform, map } = require('streaming-iterables') -const multibase = require('multibase') - -// Limit on the number of parallel block remove operations -const BLOCK_RM_CONCURRENCY = 256 +const loadMfsRoot = require('../files/utils/with-mfs-root') /** * @typedef {import('ipfs-core-types/src/pin').API} PinAPI * @typedef {import('ipfs-core-types/src/refs').API} RefsAPI - * @typedef {import('ipfs-repo')} IPFSRepo + * @typedef {import('ipfs-repo').IPFSRepo} IPFSRepo * @typedef {import('interface-datastore').Key} Key - * @typedef {import('ipld-block')} Block + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * @typedef {import('ipfs-core-utils/src/multihashes')} Multihashes */ /** * Perform mark and sweep garbage collection * * @param {Object} config - * @param {import('../gc-lock').GCLock} config.gcLock - * @param {PinAPI} config.pin - * @param {RefsAPI["refs"]} config.refs * @param {IPFSRepo} config.repo + * @param {Multihashes} config.hashers */ -module.exports = ({ gcLock, pin, refs, repo }) => { +module.exports = ({ repo, hashers }) => { /** * @type {import('ipfs-core-types/src/repo').API["gc"]} */ - async function * gc (_options = {}) { + async function * gc (options = {}) { const start = Date.now() - log('Creating set of marked blocks') - - const release = await gcLock.writeLock() + let mfsRootCid try { - // Mark all blocks that are being used - const markedSet = await createMarkedSet({ pin, refs, repo }) - // Get all blocks keys from the blockstore - const blockKeys = repo.blocks.queryKeys({}) + mfsRootCid = await loadMfsRoot({ + repo, + hashers + }, options) - // Delete blocks that are not being used - yield * deleteUnmarkedBlocks({ repo }, markedSet, blockKeys) + // temporarily pin mfs root + await repo.pins.pinRecursively(mfsRootCid) - log(`Complete (${Date.now() - start}ms)`) + yield * repo.gc() } finally { - release() - } - } - - return withTimeoutOption(gc) -} - -/** - * Get Set of CIDs of blocks to keep - * - * @param {object} arg - * @param {PinAPI} arg.pin - * @param {RefsAPI["refs"]} arg.refs - * @param {IPFSRepo} arg.repo - */ -async function createMarkedSet ({ pin, refs, repo }) { - const pinsSource = map(({ cid }) => cid, pin.ls()) - - const mfsSource = (async function * () { - let mh - try { - mh = await repo.root.get(MFS_ROOT_KEY) - } catch (err) { - if (err.code === ERR_NOT_FOUND) { - log('No blocks in MFS') - return - } - throw err - } - - const rootCid = new CID(mh) - yield rootCid - - for await (const { ref } of refs(rootCid, { recursive: true })) { - yield new CID(ref) - } - })() - - const output = new Set() - for await (const cid of parallelMerge(pinsSource, mfsSource)) { - output.add(multibase.encode('base32', cid.multihash).toString()) - } - return output -} - -/** - * Delete all blocks that are not marked as in use - * - * @param {object} arg - * @param {IPFSRepo} arg.repo - * @param {Set} markedSet - * @param {AsyncIterable} blockKeys - */ -async function * deleteUnmarkedBlocks ({ repo }, markedSet, blockKeys) { - // Iterate through all blocks and find those that are not in the marked set - // blockKeys yields { key: Key() } - let blocksCount = 0 - let removedBlocksCount = 0 - - /** - * @param {CID} cid - */ - const removeBlock = async (cid) => { - blocksCount++ - - try { - const b32 = multibase.encode('base32', cid.multihash).toString() - - if (markedSet.has(b32)) { - return null + // gc complete, unpin mfs root + if (mfsRootCid) { + await repo.pins.unpin(mfsRootCid) } - - try { - await repo.blocks.delete(cid) - removedBlocksCount++ - } catch (err) { - return { - err: new Error(`Could not delete block with CID ${cid}: ${err.message}`) - } - } - - return { cid } - } catch (err) { - const msg = `Could delete block with CID ${cid}` - log(msg, err) - return { err: new Error(msg + `: ${err.message}`) } } - } - for await (const res of transform(BLOCK_RM_CONCURRENCY, removeBlock, blockKeys)) { - // filter nulls (blocks that were retained) - if (res) yield res + log(`Complete (${Date.now() - start}ms)`) } - log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blocksCount} blocks. ` + - `Deleted ${removedBlocksCount} blocks.`) + return withTimeoutOption(gc) } diff --git a/packages/ipfs-core/src/components/repo/index.js b/packages/ipfs-core/src/components/repo/index.js index 7854c39df7..df0a795f5f 100644 --- a/packages/ipfs-core/src/components/repo/index.js +++ b/packages/ipfs-core/src/components/repo/index.js @@ -4,18 +4,26 @@ const createGC = require('./gc') const createStat = require('./stat') const createVersion = require('./version') +/** + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * @typedef {import('ipfs-core-utils/src/multihashes')} Multihashes + */ + class RepoAPI { /** * @param {Object} config - * @param {import('../gc-lock').GCLock} config.gcLock - * @param {import('ipfs-core-types/src/pin').API} config.pin - * @param {import('ipfs-repo')} config.repo - * @param {import('ipfs-core-types/src/refs').API["refs"]} config.refs + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {Multihashes} config.hashers */ - constructor ({ gcLock, pin, repo, refs }) { - this.gc = createGC({ gcLock, pin, refs, repo }) + constructor ({ repo, hashers }) { + this.gc = createGC({ repo, hashers }) this.stat = createStat({ repo }) this.version = createVersion({ repo }) + + /** + * @param {string} addr + */ + this.setApiAddr = (addr) => repo.apiAddr.set(addr) } } module.exports = RepoAPI diff --git a/packages/ipfs-core/src/components/repo/stat.js b/packages/ipfs-core/src/components/repo/stat.js index 1813644304..f46499643b 100644 --- a/packages/ipfs-core/src/components/repo/stat.js +++ b/packages/ipfs-core/src/components/repo/stat.js @@ -4,7 +4,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/repo/version.js b/packages/ipfs-core/src/components/repo/version.js index b47a2970dd..447dd54c44 100644 --- a/packages/ipfs-core/src/components/repo/version.js +++ b/packages/ipfs-core/src/components/repo/version.js @@ -5,7 +5,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/components/resolve.js b/packages/ipfs-core/src/components/resolve.js index c220a60404..9ad17151a5 100644 --- a/packages/ipfs-core/src/components/resolve.js +++ b/packages/ipfs-core/src/components/resolve.js @@ -1,16 +1,18 @@ 'use strict' const isIpfs = require('is-ipfs') -const CID = require('cids') -const { cidToString } = require('ipfs-core-utils/src/cid') +const { CID } = require('multiformats/cid') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const { resolve: res } = require('../utils') /** * @param {Object} config - * @param {import('ipld')} config.ipld - * @param {import('ipfs-core-types/src/name').API} config.name - An IPFS core interface name API + * @param {import('ipfs-repo').IPFSRepo} config.repo + * @param {import('ipfs-core-utils/src/multicodecs')} config.codecs + * @param {import('ipfs-core-utils/src/multibases')} config.bases + * @param {import('ipfs-core-types/src/name').API} config.name */ -module.exports = ({ ipld, name }) => { +module.exports = ({ repo, codecs, bases, name }) => { /** * @type {import('ipfs-core-types/src/root').API["resolve"]} */ @@ -20,37 +22,34 @@ module.exports = ({ ipld, name }) => { } if (isIpfs.ipnsPath(path)) { - if (!name) { - throw new Error('failed to resolve IPNS path: name API unavailable') - } - for await (const resolvedPath of name.resolve(path, opts)) { path = resolvedPath } } const [, , hash, ...rest] = path.split('/') // ['', 'ipfs', 'hash', ...path] - const cid = new CID(hash) + const cid = CID.parse(hash) + const base = opts.cidBase ? await bases.getBase(opts.cidBase) : undefined // nothing to resolve return the input if (rest.length === 0) { - return `/ipfs/${cidToString(cid, { base: opts.cidBase })}` + return `/ipfs/${cid.toString(base && base.encoder)}` } path = rest.join('/') - const results = ipld.resolve(cid, path) + const results = res(cid, path, codecs, repo, opts) let value = cid let remainderPath = path for await (const result of results) { - if (CID.isCID(result.value)) { + if (result.value instanceof CID) { value = result.value remainderPath = result.remainderPath } } - return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` + return `/ipfs/${value.toString(base && base.encoder)}${remainderPath ? '/' + remainderPath : ''}` } return withTimeoutOption(resolve) diff --git a/packages/ipfs-core/src/components/root.js b/packages/ipfs-core/src/components/root.js index 054a61e341..e16318b58a 100644 --- a/packages/ipfs-core/src/components/root.js +++ b/packages/ipfs-core/src/components/root.js @@ -17,20 +17,18 @@ class Root { /** * @param {Context} context */ - constructor ({ preload, gcLock, pin, block, ipld, options }) { + constructor ({ preload, repo, options }) { const addAll = createAddAllAPI({ preload, - gcLock, - block, - pin, + repo, options }) this.addAll = addAll this.add = createAddAPI({ addAll }) - this.cat = createCatAPI({ ipld, preload }) - this.get = createGetAPI({ ipld, preload }) - this.ls = createLsAPI({ ipld, preload }) + this.cat = createCatAPI({ repo, preload }) + this.get = createGetAPI({ repo, preload }) + this.ls = createLsAPI({ repo, preload }) } } diff --git a/packages/ipfs-core/src/components/start.js b/packages/ipfs-core/src/components/start.js index b56b1c7d84..19f129fbf3 100644 --- a/packages/ipfs-core/src/components/start.js +++ b/packages/ipfs-core/src/components/start.js @@ -6,8 +6,7 @@ const Service = require('../utils/service') * @param {Object} config * @param {import('../types').NetworkService} config.network * @param {import('peer-id')} config.peerId - * @param {import('ipfs-repo')} config.repo - * @param {import('ipfs-block-service')} config.blockService + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../types').Print} config.print * @param {import('../types').Preload} config.preload * @param {import('../types').MfsPreload} config.mfsPreload @@ -15,20 +14,18 @@ const Service = require('../utils/service') * @param {import('libp2p/src/keychain')} config.keychain * @param {import('../types').Options} config.options */ -module.exports = ({ network, preload, peerId, keychain, repo, ipns, blockService, mfsPreload, print, options }) => { +module.exports = ({ network, preload, peerId, keychain, repo, ipns, mfsPreload, print, options }) => { /** * @type {import('ipfs-core-types/src/root').API["start"]} */ const start = async () => { - const { bitswap, libp2p } = await Service.start(network, { + const { libp2p } = await Service.start(network, { peerId, repo, print, options }) - blockService.setExchange(bitswap) - await Promise.all([ ipns.startOnline({ keychain, libp2p, peerId, repo }), preload.start(), diff --git a/packages/ipfs-core/src/components/stats/bw.js b/packages/ipfs-core/src/components/stats/bw.js index 52c5a72a14..fae29b608b 100644 --- a/packages/ipfs-core/src/components/stats/bw.js +++ b/packages/ipfs-core/src/components/stats/bw.js @@ -19,7 +19,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') * * @typedef {import('libp2p')} libp2p * @typedef {import('peer-id')} PeerId - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions */ diff --git a/packages/ipfs-core/src/components/stats/index.js b/packages/ipfs-core/src/components/stats/index.js index 700653cbcb..8b86373de4 100644 --- a/packages/ipfs-core/src/components/stats/index.js +++ b/packages/ipfs-core/src/components/stats/index.js @@ -7,7 +7,7 @@ const createBitswap = require('../bitswap/stat') class StatsAPI { /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../../types').NetworkService} config.network */ constructor ({ repo, network }) { diff --git a/packages/ipfs-core/src/components/stop.js b/packages/ipfs-core/src/components/stop.js index 5cde54ef1d..5435c9c8bf 100644 --- a/packages/ipfs-core/src/components/stop.js +++ b/packages/ipfs-core/src/components/stop.js @@ -6,17 +6,15 @@ const Service = require('../utils/service') * @param {Object} config * @param {import('../types').NetworkService} config.network * @param {import('../types').Preload} config.preload - * @param {import('ipfs-block-service')} config.blockService * @param {import('./ipns')} config.ipns - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo * @param {import('../types').MfsPreload} config.mfsPreload */ -module.exports = ({ network, preload, blockService, ipns, repo, mfsPreload }) => { +module.exports = ({ network, preload, ipns, repo, mfsPreload }) => { /** * @type {import('ipfs-core-types/src/root').API["stop"]} */ const stop = async () => { - blockService.unsetExchange() await Promise.all([ preload.stop(), ipns.stop(), diff --git a/packages/ipfs-core/src/components/storage.js b/packages/ipfs-core/src/components/storage.js index d00f26e2f2..08d732be84 100644 --- a/packages/ipfs-core/src/components/storage.js +++ b/packages/ipfs-core/src/components/storage.js @@ -7,13 +7,13 @@ const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const PeerId = require('peer-id') -const { mergeOptions } = require('../utils') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const configService = require('./config') const { NotEnabledError, NotInitializedError } = require('../errors') const createLibP2P = require('./libp2p') /** - * @typedef {import('ipfs-repo')} IPFSRepo + * @typedef {import('ipfs-repo').IPFSRepo} IPFSRepo * @typedef {import('../types').Options} IPFSOptions * @typedef {import('../types').InitOptions} InitOptions * @typedef {import('../types').Print} Print @@ -42,13 +42,14 @@ class Storage { /** * @param {Print} print + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {IPFSOptions} options */ - static async start (print, options) { + static async start (print, codecs, options) { const { repoAutoMigrate, repo: inputRepo, onMigrationProgress } = options const repo = (typeof inputRepo === 'string' || inputRepo == null) - ? createRepo(print, { + ? createRepo(print, codecs, { path: inputRepo, autoMigrate: repoAutoMigrate, onMigrationProgress: onMigrationProgress diff --git a/packages/ipfs-core/src/components/version.js b/packages/ipfs-core/src/components/version.js index 4471ff8b29..049db7213a 100644 --- a/packages/ipfs-core/src/components/version.js +++ b/packages/ipfs-core/src/components/version.js @@ -5,7 +5,7 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-repo').IPFSRepo} config.repo */ module.exports = ({ repo }) => { /** diff --git a/packages/ipfs-core/src/index.js b/packages/ipfs-core/src/index.js index 02dd242be5..29b3e93d19 100644 --- a/packages/ipfs-core/src/index.js +++ b/packages/ipfs-core/src/index.js @@ -6,15 +6,11 @@ const PeerId = require('peer-id') const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') const { multiaddr } = require('multiaddr') -const multibase = require('multibase') -const multicodec = require('multicodec') -const multihashing = require('multihashing-async') -const multihash = multihashing.multihash -const CID = require('cids') +const { CID } = require('multiformats/cid') const { create } = require('./components') /** - * @typedef {import('./components')} IPFS + * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {import('./types').Options} Options */ @@ -24,10 +20,6 @@ module.exports = { isIPFS, CID, multiaddr, - multibase, - multihash, - multihashing, - multicodec, PeerId, globSource, urlSource diff --git a/packages/ipfs-core/src/ipns/publisher.js b/packages/ipfs-core/src/ipns/publisher.js index 004ffb9adf..06704632f8 100644 --- a/packages/ipfs-core/src/ipns/publisher.js +++ b/packages/ipfs-core/src/ipns/publisher.js @@ -92,7 +92,7 @@ class IpnsPublisher { * @param {IPNSEntry} entry */ async _publishEntry (key, entry) { - if (!(Key.isKey(key))) { + if (!(key instanceof Key)) { const errMsg = 'datastore key does not have a valid format' log.error(errMsg) @@ -130,7 +130,7 @@ class IpnsPublisher { * @param {PublicKey} publicKey */ async _publishPublicKey (key, publicKey) { - if ((!Key.isKey(key))) { + if (!(key instanceof Key)) { const errMsg = 'datastore key does not have a valid format' log.error(errMsg) diff --git a/packages/ipfs-core/src/ipns/resolver.js b/packages/ipfs-core/src/ipns/resolver.js index a7faf11e35..c40764393e 100644 --- a/packages/ipfs-core/src/ipns/resolver.js +++ b/packages/ipfs-core/src/ipns/resolver.js @@ -89,7 +89,7 @@ class IpnsResolver { * @param {string} name */ async _resolveName (name) { - const peerId = PeerId.createFromCID(name) + const peerId = PeerId.parse(name) const { routingKey } = ipns.getIdKeys(peerId.toBytes()) let record diff --git a/packages/ipfs-core/src/ipns/routing/config.js b/packages/ipfs-core/src/ipns/routing/config.js index 033b928d8a..3869b74652 100644 --- a/packages/ipfs-core/src/ipns/routing/config.js +++ b/packages/ipfs-core/src/ipns/routing/config.js @@ -9,7 +9,7 @@ const OfflineDatastore = require('./offline-datastore') /** * @param {object} arg * @param {import('libp2p')} arg.libp2p - * @param {import('ipfs-repo')} arg.repo + * @param {import('ipfs-repo').IPFSRepo} arg.repo * @param {import('peer-id')} arg.peerId * @param {object} arg.options */ diff --git a/packages/ipfs-core/src/ipns/routing/offline-datastore.js b/packages/ipfs-core/src/ipns/routing/offline-datastore.js index 7a2c9e3f48..dc2c523b01 100644 --- a/packages/ipfs-core/src/ipns/routing/offline-datastore.js +++ b/packages/ipfs-core/src/ipns/routing/offline-datastore.js @@ -14,7 +14,7 @@ const log = Object.assign(debug('ipfs:ipns:offline-datastore'), { // to the local datastore class OfflineDatastore { /** - * @param {import('ipfs-repo')} repo + * @param {import('ipfs-repo').IPFSRepo} repo */ constructor (repo) { this._repo = repo diff --git a/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js b/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js index f6d4da49bc..90ecb90b45 100644 --- a/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js +++ b/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js @@ -1,7 +1,7 @@ 'use strict' const ipns = require('ipns') -const { toB58String } = require('multihashing-async').multihash +const { base58btc } = require('multiformats/bases/base58') const PubsubDatastore = require('datastore-pubsub') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -63,8 +63,8 @@ class IpnsPubsubDatastore { const ns = key.slice(0, ipns.namespaceLength) if (uint8ArrayToString(ns) === ipns.namespace) { - const stringifiedTopic = toB58String(key) - const id = toB58String(key.slice(ipns.namespaceLength)) + const stringifiedTopic = base58btc.encode(key).substring(1) + const id = base58btc.encode(key.slice(ipns.namespaceLength)).substring(1) this._subscriptions[stringifiedTopic] = id diff --git a/packages/ipfs-core/src/mfs-preload.js b/packages/ipfs-core/src/mfs-preload.js index 96fc18f206..6023e32f71 100644 --- a/packages/ipfs-core/src/mfs-preload.js +++ b/packages/ipfs-core/src/mfs-preload.js @@ -1,7 +1,6 @@ 'use strict' const debug = require('debug') -const { cidToString } = require('ipfs-core-utils/src/cid') const log = Object.assign(debug('ipfs:mfs-preload'), { error: debug('ipfs:mfs-preload:error') }) @@ -35,7 +34,7 @@ module.exports = ({ preload, files, options = {} }) => { const preloadMfs = async () => { try { const stats = await files.stat('/') - const nextRootCid = cidToString(stats.cid, { base: 'base32' }) + const nextRootCid = stats.cid.toString() if (rootCid !== nextRootCid) { log(`preloading updated MFS root ${rootCid} -> ${stats.cid}`) @@ -55,7 +54,7 @@ module.exports = ({ preload, files, options = {} }) => { */ async start () { const stats = await files.stat('/') - rootCid = cidToString(stats.cid, { base: 'base32' }) + rootCid = stats.cid.toString() log(`monitoring MFS root ${stats.cid}`) timeoutId = setTimeout(preloadMfs, options.interval) }, diff --git a/packages/ipfs-core/src/runtime/ipld.js b/packages/ipfs-core/src/runtime/ipld.js deleted file mode 100644 index b439caa964..0000000000 --- a/packages/ipfs-core/src/runtime/ipld.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict' - -const mergeOptions = require('merge-options') -const multicodec = require('multicodec') - -/** - * @typedef {import('interface-ipld-format').Format} IPLDFormat - * @typedef {import('ipld').Options} IPLDOptions - */ - -/** - * All known (non-default) IPLD formats - * - * @type {Record} - */ -const IpldFormats = { - get [multicodec.DAG_PB] () { - return require('ipld-dag-pb') - }, - get [multicodec.DAG_CBOR] () { - return require('ipld-dag-cbor') - }, - get [multicodec.RAW] () { - return require('ipld-raw') - } -} - -/** - * @param {import('ipfs-block-service')} blockService - * @param {Partial} [options] - */ -module.exports = (blockService, options) => { - return mergeOptions.call( - // ensure we have the defaults formats even if the user overrides `formats: []` - { concatArrays: true }, - { - blockService: blockService, - formats: [], - /** - * @type {import('ipld').LoadFormatFn} - */ - loadFormat: (codec) => { - if (IpldFormats[codec]) { - return Promise.resolve(IpldFormats[codec]) - } else { - throw new Error(`Missing IPLD format "${multicodec.getName(codec)}"`) - } - } - }, options) -} diff --git a/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js b/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js index cf329268b7..0af111e881 100644 --- a/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js +++ b/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js @@ -2,6 +2,5 @@ module.exports = { gossipsub: require('libp2p-gossipsub'), - // @ts-ignore - no types floodsub: require('libp2p-floodsub') } diff --git a/packages/ipfs-core/src/runtime/repo-browser.js b/packages/ipfs-core/src/runtime/repo-browser.js index 727c5f0205..95ee8b9eea 100644 --- a/packages/ipfs-core/src/runtime/repo-browser.js +++ b/packages/ipfs-core/src/runtime/repo-browser.js @@ -1,6 +1,8 @@ 'use strict' -const IPFSRepo = require('ipfs-repo') +const { createRepo } = require('ipfs-repo') +const DatastoreLevel = require('datastore-level') +const BlockstoreDatastoreAdapter = require('blockstore-datastore-adapter') /** * @typedef {import('ipfs-repo-migrations').ProgressCallback} MigrationProgressCallback @@ -8,14 +10,24 @@ const IPFSRepo = require('ipfs-repo') /** * @param {import('../types').Print} print + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {object} options * @param {string} [options.path] * @param {boolean} [options.autoMigrate] * @param {MigrationProgressCallback} [options.onMigrationProgress] */ -module.exports = (print, options) => { +module.exports = (print, codecs, options) => { const repoPath = options.path || 'ipfs' - return new IPFSRepo(repoPath, { + + return createRepo(repoPath, (codeOrName) => codecs.getCodec(codeOrName), { + root: new DatastoreLevel(repoPath), + blocks: new BlockstoreDatastoreAdapter( + new DatastoreLevel(`${repoPath}/blocks`) + ), + datastore: new DatastoreLevel(`${repoPath}/datastore`), + keys: new DatastoreLevel(`${repoPath}/keys`), + pins: new DatastoreLevel(`${repoPath}/pins`) + }, { autoMigrate: options.autoMigrate, onMigrationProgress: options.onMigrationProgress || print }) diff --git a/packages/ipfs-core/src/runtime/repo-nodejs.js b/packages/ipfs-core/src/runtime/repo-nodejs.js index c111542c66..f521d0054d 100644 --- a/packages/ipfs-core/src/runtime/repo-nodejs.js +++ b/packages/ipfs-core/src/runtime/repo-nodejs.js @@ -1,8 +1,12 @@ 'use strict' const os = require('os') -const IPFSRepo = require('ipfs-repo') +const { createRepo } = require('ipfs-repo') const path = require('path') +const DatastoreFS = require('datastore-fs') +const DatastoreLevel = require('datastore-level') +const BlockstoreDatastoreAdapter = require('blockstore-datastore-adapter') +const { ShardingDatastore, shard: { NextToLast } } = require('datastore-core') /** * @typedef {import('ipfs-repo-migrations').ProgressCallback} MigrationProgressCallback @@ -10,12 +14,13 @@ const path = require('path') /** * @param {import('../types').Print} print + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {object} options * @param {string} [options.path] * @param {boolean} [options.autoMigrate] * @param {MigrationProgressCallback} [options.onMigrationProgress] */ -module.exports = (print, options = {}) => { +module.exports = (print, codecs, options = {}) => { const repoPath = options.path || path.join(os.homedir(), '.jsipfs') /** * @type {number} @@ -35,7 +40,20 @@ module.exports = (print, options = {}) => { print(`${percentComplete.toString().padStart(6, ' ')}% ${message}`) } - return new IPFSRepo(repoPath, { + return createRepo(repoPath, (codeOrName) => codecs.getCodec(codeOrName), { + root: new DatastoreFS(repoPath), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new DatastoreFS(`${repoPath}/blocks`, { + extension: '.data' + }), + new NextToLast(2) + ) + ), + datastore: new DatastoreLevel(`${repoPath}/datastore`), + keys: new DatastoreLevel(`${repoPath}/keys`), + pins: new DatastoreLevel(`${repoPath}/pins`) + }, { autoMigrate: options.autoMigrate != null ? options.autoMigrate : true, onMigrationProgress: onMigrationProgress }) diff --git a/packages/ipfs-core/src/types.d.ts b/packages/ipfs-core/src/types.d.ts index eb22cf8213..de398f18dc 100644 --- a/packages/ipfs-core/src/types.d.ts +++ b/packages/ipfs-core/src/types.d.ts @@ -1,15 +1,16 @@ import type { KeyType } from 'libp2p-crypto' import type PeerId from 'peer-id' import type { Config as IPFSConfig } from 'ipfs-core-types/src/config' -import type { Options as IPLDOptions } from 'ipld' -import type Libp2p from 'libp2p' -import type { Libp2pOptions } from 'libp2p' +import type Libp2p, { Libp2pOptions } from 'libp2p' + import type IPFSRepo from 'ipfs-repo' import type { ProgressCallback as MigrationProgressCallback } from 'ipfs-repo-migrations' -import type Network from './components/network' -import type { Options as NetworkOptions } from './components/network' +import type Network, { Options as NetworkOptions } from './components/network' + import type Service from './utils/service' -import CID from 'cids' +import type { CID } from 'multiformats/cid' +import type { BlockCodec, MultibaseCodec } from 'multiformats/codecs/interface' +import type { MultihashHasher } from 'multiformats/hashes/interface' export interface Options { /** @@ -18,7 +19,7 @@ export interface Options { * [`ipfs-repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor * sets many special properties when initializing a repo, so you should usually * not try and call `repoInstance.init()` yourself. - */ + */ init?: InitOptions /** @@ -92,36 +93,36 @@ export interface Options { /** * Modify the default IPLD config. This object - * will be *merged* with the default config; it will not replace it. Check IPLD - * [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information - * on the available options. (Default: [`ipld.js`] - * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) - * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld.js) - * in browsers) + * will be *merged* with the default config; it will not replace it. Check IPLD + * [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information + * on the available options. (Default: [`ipld.js`] + * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) + * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld.js) + * in browsers) */ ipld?: Partial /** * The libp2p option allows you to build - * your libp2p node by configuration, or via a bundle function. If you are - * looking to just modify the below options, using the object format is the - * quickest way to get the default features of libp2p. If you need to create a - * more customized libp2p node, such as with custom transports or peer/content - * routers that need some of the ipfs data on startup, a custom bundle is a - * great way to achieve this. - * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). - * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) - * for the list of options libp2p supports. - * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) - * in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in - * browsers. + * your libp2p node by configuration, or via a bundle function. If you are + * looking to just modify the below options, using the object format is the + * quickest way to get the default features of libp2p. If you need to create a + * more customized libp2p node, such as with custom transports or peer/content + * routers that need some of the ipfs data on startup, a custom bundle is a + * great way to achieve this. + * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). + * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) + * for the list of options libp2p supports. + * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) + * in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in + * browsers. */ libp2p?: Partial | Libp2pFactoryFn silent?: boolean } -export type Libp2pFactoryFn = ({ libp2pOptions: Libp2pOptions, options: Options, config: IPFSConfig, datastore: Datastore, peerId: PeerId }) => Libp2p +export interface Libp2pFactoryFn { ({ libp2pOptions: Libp2pOptions, options: Options, config: IPFSConfig, datastore: Datastore, peerId: PeerId }): Libp2p } /** * On first run js-IPFS will initialize a repo which can be customized through this settings @@ -213,7 +214,7 @@ export interface ExperimentalOptions { /** * Prints output to the console */ -export type Print = (...args:any[]) => void +export interface Print { (...args: any[]): void } export interface Preload { (cid: CID): void @@ -227,3 +228,29 @@ export interface MfsPreload { } export type NetworkService = Service + +export interface Block { + cid: CID + bytes: Uint8Array +} + +export interface LoadBaseFn { (codeOrName: number | string): Promise> } +export interface LoadCodecFn { (codeOrName: number | string): Promise> } +export interface LoadHasherFn { (codeOrName: number | string): Promise } + +export interface IPLDOptions { + loadBase: LoadBaseFn + loadCodec: LoadCodecFn + loadHasher: LoadHasherFn + bases: Array> + codecs: Array> + hashers: Array> +} + +export interface BlockCodecStore { + getCodec: (codeOrName: number | string) => Promise> +} + +export interface MultihashHasherStore { + getHasher: (codeOrName: number | string) => Promise> +} diff --git a/packages/ipfs-core/src/utils.js b/packages/ipfs-core/src/utils.js index 8dbb131694..aa40108b2e 100644 --- a/packages/ipfs-core/src/utils.js +++ b/packages/ipfs-core/src/utils.js @@ -2,27 +2,20 @@ 'use strict' const isIpfs = require('is-ipfs') -const CID = require('cids') +const { CID } = require('multiformats/cid') const Key = require('interface-datastore').Key const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -/** @type {typeof Object.assign} */ -const mergeOptions = require('merge-options') -const resolve = require('./components/dag/resolve') +const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') +const dagPb = require('@ipld/dag-pb') /** * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ -exports.mergeOptions = mergeOptions - const ERR_BAD_PATH = 'ERR_BAD_PATH' -exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' -exports.MFS_ROOT_KEY = new Key('/local/filesroot') -exports.MFS_MAX_CHUNK_SIZE = 262144 -exports.MFS_MAX_LINKS = 174 - /** * Returns a well-formed ipfs Path. * The returned path will always be prefixed with /ipfs/ or /ipns/. @@ -32,9 +25,16 @@ exports.MFS_MAX_LINKS = 174 * @throws on an invalid @param pathStr */ const normalizePath = (pathStr) => { - if (isIpfs.cid(pathStr) || CID.isCID(pathStr)) { - return `/ipfs/${new CID(pathStr)}` - } else if (isIpfs.path(pathStr)) { + if (pathStr instanceof CID) { + return `/ipfs/${pathStr}` + } + + try { + CID.parse(pathStr) + pathStr = `/ipfs/${pathStr}` + } catch {} + + if (isIpfs.path(pathStr)) { return pathStr } else { throw errCode(new Error(`invalid path: ${pathStr}`), ERR_BAD_PATH) @@ -49,9 +49,9 @@ const normalizePath = (pathStr) => { */ const normalizeCidPath = (path) => { if (path instanceof Uint8Array) { - return new CID(path).toString() + return CID.decode(path).toString() } - if (CID.isCID(path)) { + if (path instanceof CID) { return path.toString() } if (path.indexOf('/ipfs/') === 0) { @@ -65,27 +65,57 @@ const normalizeCidPath = (path) => { /** * Resolve various styles of an ipfs-path to the hash of the target node. - * Follows links in the path. - * - * Accepts formats: - * - - * - /link/to/venus - * - /ipfs//link/to/pluto - * - multihash Buffer + * Follows links in the path * - * @param {import('ipld')} ipld + * @param {import('ipfs-repo').IPFSRepo} repo + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {CID | string} ipfsPath - A CID or IPFS path - * @param {Object} [options] - Optional options passed directly to dag.resolve - * @returns {Promise} + * @param {{ path?: string, signal?: AbortSignal }} [options] - Optional options passed directly to dag.resolve + * @returns {Promise<{ cid: CID, remainderPath: string}>} */ -const resolvePath = async function (ipld, ipfsPath, options = {}) { - const preload = () => {} - preload.stop = () => {} - preload.start = () => {} +const resolvePath = async function (repo, codecs, ipfsPath, options = {}) { + const { + cid, + path + } = toCidAndPath(ipfsPath) + + if (path) { + options.path = path + } + + let lastCid = cid + let lastRemainderPath = options.path || '' + + if (lastRemainderPath.startsWith('/')) { + lastRemainderPath = lastRemainderPath.substring(1) + } - const { cid } = await resolve({ ipld, preload })(ipfsPath, { preload: false }) + if (options.path) { + try { + for await (const { value, remainderPath } of resolve(cid, options.path, codecs, repo, { + signal: options.signal + })) { + if (!(value instanceof CID)) { + break + } - return cid + lastRemainderPath = remainderPath + lastCid = value + } + } catch (err) { + // TODO: add error codes to IPLD + if (err.message.startsWith('Object has no property')) { + err.message = `no link named "${lastRemainderPath.split('/')[0]}" under ${lastCid}` + err.code = 'ERR_NO_LINK' + } + throw err + } + } + + return { + cid: lastCid, + remainderPath: lastRemainderPath || '' + } } /** @@ -148,9 +178,90 @@ const withTimeout = withTimeoutOption( async (promise, _options) => await promise ) -exports.normalizePath = normalizePath -exports.normalizeCidPath = normalizeCidPath -exports.resolvePath = resolvePath -exports.mapFile = mapFile -exports.withTimeoutOption = withTimeoutOption -exports.withTimeout = withTimeout +/** + * Retrieves IPLD Nodes along the `path` that is rooted at `cid`. + * + * @param {CID} cid - the CID where the resolving starts + * @param {string} path - the path that should be resolved + * @param {import('ipfs-core-utils/src/multicodecs')} codecs + * @param {import('ipfs-repo').IPFSRepo} repo + * @param {AbortOptions} [options] + */ +const resolve = async function * (cid, path, codecs, repo, options) { + /** + * @param {CID} cid + */ + const load = async (cid) => { + const codec = await codecs.getCodec(cid.code) + const block = await repo.blocks.get(cid, options) + + return codec.decode(block) + } + + const parts = path.split('/').filter(Boolean) + let value = await load(cid) + let lastCid = cid + + if (!parts.length) { + yield { + value, + remainderPath: '' + } + } + + // End iteration if there isn't a CID to follow any more + while (parts.length) { + const key = parts.shift() + + if (!key) { + throw errCode(new Error(`Could not resolve path "${path}"`), 'ERR_INVALID_PATH') + } + + // special case for dag-pb, use the link name as the path segment + if (cid.code === dagPb.code && Array.isArray(value.Links)) { + const link = value.Links.find((/** @type {PBLink} */ l) => l.Name === key) + + if (link) { + yield { + value: link.Hash, + remainderPath: parts.join('/') + } + + value = await load(link.Hash) + lastCid = link.Hash + + continue + } + } + + if (Object.prototype.hasOwnProperty.call(value, key)) { + value = value[key] + + yield { + value, + remainderPath: parts.join('/') + } + } else { + throw errCode(new Error(`no link named "${key}" under ${lastCid}`), 'ERR_NO_LINK') + } + + if (value instanceof CID) { + lastCid = value + value = await load(value) + } + } +} + +module.exports = { + normalizePath, + normalizeCidPath, + resolvePath, + mapFile, + withTimeout, + resolve, + + OFFLINE_ERROR: 'This command must be run in online mode. Try running \'ipfs daemon\' first.', + MFS_ROOT_KEY: new Key('/local/filesroot'), + MFS_MAX_CHUNK_SIZE: 262144, + MFS_MAX_LINKS: 174 +} diff --git a/packages/ipfs-core/src/utils/service.js b/packages/ipfs-core/src/utils/service.js index c9b8b50709..c9bf149cf2 100644 --- a/packages/ipfs-core/src/utils/service.js +++ b/packages/ipfs-core/src/utils/service.js @@ -237,4 +237,5 @@ class Service { return Service.try(this) } } + module.exports = Service diff --git a/packages/ipfs-core/test/block-storage.spec.js b/packages/ipfs-core/test/block-storage.spec.js new file mode 100644 index 0000000000..6f2c1d96c8 --- /dev/null +++ b/packages/ipfs-core/test/block-storage.spec.js @@ -0,0 +1,41 @@ +/* eslint-env mocha */ +'use strict' + +const { MemoryBlockstore } = require('interface-blockstore') +const suite = require('interface-blockstore-tests') +const BlockStorage = require('../src/block-storage') + +/** + * @typedef {import('ipfs-repo').IPFSRepo} IPFSRepo + * @typedef {import('interface-blockstore').Blockstore} Blockstore + */ + +describe('block-storage', () => { + describe('interface-blockstore (bitswap online)', () => { + suite({ + setup: () => { + // bitswap forwards on to the blockstore so just + // use the same instance to represent both + const blockstore = new MemoryBlockstore() + blockstore.isStarted = () => true + + return new BlockStorage(blockstore, blockstore) + }, + teardown: () => {} + }) + }) + + describe('interface-blockstore (bitswap offline)', () => { + suite({ + setup: () => { + // bitswap forwards on to the blockstore so just + // use the same instance to represent both + const blockstore = new MemoryBlockstore() + blockstore.isStarted = () => false + + return new BlockStorage(blockstore, blockstore) + }, + teardown: () => {} + }) + }) +}) diff --git a/packages/ipfs-core/test/create-node.spec.js b/packages/ipfs-core/test/create-node.spec.js index 70d220ea8a..d0d4dba41f 100644 --- a/packages/ipfs-core/test/create-node.spec.js +++ b/packages/ipfs-core/test/create-node.spec.js @@ -11,9 +11,7 @@ const { supportedKeys } = require('libp2p-crypto/src/keys') const IPFS = require('../src') const defer = require('p-defer') const uint8ArrayToString = require('uint8arrays/to-string') - -// This gets replaced by `create-repo-browser.js` in the browser -const createTempRepo = require('./utils/create-repo-nodejs.js') +const createTempRepo = require('./utils/create-repo') describe('create node', function () { let tempRepo @@ -22,10 +20,6 @@ describe('create node', function () { tempRepo = await createTempRepo() }) - afterEach(() => { - tempRepo.teardown() - }) - it('should create a node with a custom repo path', async function () { this.timeout(80 * 1000) @@ -252,7 +246,6 @@ describe('create node', function () { expect(idA.id).to.not.equal(idB.id) await Promise.all([nodeA.stop(), nodeB.stop()]) - await Promise.all([repoA.teardown(), repoB.teardown()]) }) it('should not error with empty IPLD config', async function () { @@ -307,20 +300,20 @@ describe('create node', function () { PeerId: id.toString(), PrivKey: uint8ArrayToString(id.marshalPrivKey(), 'base64pad') } - } - }) - - const node = await IPFS.create({ - repo: repo.path, + }, + autoMigrate: true, onMigrationProgress: () => { // migrations are happening deferred.resolve() } }) + const node = await IPFS.create({ + repo + }) + await deferred.promise await node.stop() - await repo.teardown() }) }) diff --git a/packages/ipfs-core/test/exports.spec.js b/packages/ipfs-core/test/exports.spec.js index e4b63d24f7..14da7cff22 100644 --- a/packages/ipfs-core/test/exports.spec.js +++ b/packages/ipfs-core/test/exports.spec.js @@ -3,12 +3,8 @@ const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { multiaddr } = require('multiaddr') -const multibase = require('multibase') -const multihashing = require('multihashing-async') -const multihash = multihashing.multihash -const multicodec = require('multicodec') const PeerId = require('peer-id') const { expect } = require('aegir/utils/chai') @@ -20,10 +16,6 @@ describe('exports', () => { expect(Ipfs.isIPFS).to.equal(isIPFS) expect(Ipfs.CID).to.equal(CID) expect(Ipfs.multiaddr).to.equal(multiaddr) - expect(Ipfs.multibase).to.equal(multibase) - expect(Ipfs.multihash).to.equal(multihash) - expect(Ipfs.multihashing).to.equal(multihashing) - expect(Ipfs.multicodec).to.equal(multicodec) expect(Ipfs.PeerId).to.equal(PeerId) }) }) diff --git a/packages/ipfs-core/test/fixtures/planets/mercury/wiki.md b/packages/ipfs-core/test/fixtures/planets/mercury/wiki.md deleted file mode 100644 index 1b4039ba80..0000000000 --- a/packages/ipfs-core/test/fixtures/planets/mercury/wiki.md +++ /dev/null @@ -1,12 +0,0 @@ -# Mercury (planet) -> From Wikipedia, the free encyclopedia - -Mercury is the smallest and innermost planet in the Solar System. Its orbital period around the Sun of 87.97 days is the shortest of all the planets in the Solar System. It is named after the Roman deity Mercury, the messenger of the gods. - -Like Venus, Mercury orbits the Sun within Earth's orbit as an inferior planet, and never exceeds 28° away from the Sun. When viewed from Earth, this proximity to the Sun means the planet can only be seen near the western or eastern horizon during the early evening or early morning. At this time it may appear as a bright star-like object, but is often far more difficult to observe than Venus. The planet telescopically displays the complete range of phases, similar to Venus and the Moon, as it moves in its inner orbit relative to Earth, which reoccurs over the so-called synodic period approximately every 116 days. - -Mercury is gravitationally locked with the Sun in a 3:2 spin-orbit resonance, and rotates in a way that is unique in the Solar System. As seen relative to the fixed stars, it rotates on its axis exactly three times for every two revolutions it makes around the Sun. As seen from the Sun, in a frame of reference that rotates with the orbital motion, it appears to rotate only once every two Mercurian years. An observer on Mercury would therefore see only one day every two years. - -Mercury's axis has the smallest tilt of any of the Solar System's planets (about ​1â„30 degree). Its orbital eccentricity is the largest of all known planets in the Solar System; at perihelion, Mercury's distance from the Sun is only about two-thirds (or 66%) of its distance at aphelion. Mercury's surface appears heavily cratered and is similar in appearance to the Moon's, indicating that it has been geologically inactive for billions of years. Having almost no atmosphere to retain heat, it has surface temperatures that vary diurnally more than on any other planet in the Solar System, ranging from 100 K (−173 °C; −280 °F) at night to 700 K (427 °C; 800 °F) during the day across the equatorial regions. The polar regions are constantly below 180 K (−93 °C; −136 °F). The planet has no known natural satellites. - -Two spacecraft have visited Mercury: Mariner 10 flew by in 1974 and 1975; and MESSENGER, launched in 2004, orbited Mercury over 4,000 times in four years before exhausting its fuel and crashing into the planet's surface on April 30, 2015. diff --git a/packages/ipfs-core/test/fixtures/planets/solar-system.md b/packages/ipfs-core/test/fixtures/planets/solar-system.md deleted file mode 100644 index f249cd3a53..0000000000 --- a/packages/ipfs-core/test/fixtures/planets/solar-system.md +++ /dev/null @@ -1,10 +0,0 @@ -# Solar System -> From Wikipedia, the free encyclopedia - -The Solar System is the gravitationally bound system comprising the Sun and the objects that orbit it, either directly or indirectly. Of those objects that orbit the Sun directly, the largest eight are the planets, with the remainder being smaller objects, such as dwarf planets and small Solar System bodies. Of the objects that orbit the Sun indirectly, the moons, two are larger than the smallest planet, Mercury. - -The Solar System formed 4.6 billion years ago from the gravitational collapse of a giant interstellar molecular cloud. The vast majority of the system's mass is in the Sun, with the majority of the remaining mass contained in Jupiter. The four smaller inner planets, Mercury, Venus, Earth and Mars, are terrestrial planets, being primarily composed of rock and metal. The four outer planets are giant planets, being substantially more massive than the terrestrials. The two largest, Jupiter and Saturn, are gas giants, being composed mainly of hydrogen and helium; the two outermost planets, Uranus and Neptune, are ice giants, being composed mostly of substances with relatively high melting points compared with hydrogen and helium, called volatiles, such as water, ammonia and methane. All eight planets have almost circular orbits that lie within a nearly flat disc called the ecliptic. - -The Solar System also contains smaller objects. The asteroid belt, which lies between the orbits of Mars and Jupiter, mostly contains objects composed, like the terrestrial planets, of rock and metal. Beyond Neptune's orbit lie the Kuiper belt and scattered disc, which are populations of trans-Neptunian objects composed mostly of ices, and beyond them a newly discovered population of sednoids. Within these populations are several dozen to possibly tens of thousands of objects large enough that they have been rounded by their own gravity. Such objects are categorized as dwarf planets. Identified dwarf planets include the asteroid Ceres and the trans-Neptunian objects Pluto and Eris. In addition to these two regions, various other small-body populations, including comets, centaurs and interplanetary dust clouds, freely travel between regions. Six of the planets, at least four of the dwarf planets, and many of the smaller bodies are orbited by natural satellites, usually termed "moons" after the Moon. Each of the outer planets is encircled by planetary rings of dust and other small objects. - -The solar wind, a stream of charged particles flowing outwards from the Sun, creates a bubble-like region in the interstellar medium known as the heliosphere. The heliopause is the point at which pressure from the solar wind is equal to the opposing pressure of the interstellar medium; it extends out to the edge of the scattered disc. The Oort cloud, which is thought to be the source for long-period comets, may also exist at a distance roughly a thousand times further than the heliosphere. The Solar System is located in the Orion Arm, 26,000 light-years from the center of the Milky Way. diff --git a/packages/ipfs-core/test/init.spec.js b/packages/ipfs-core/test/init.spec.js index 273594a7fb..93c1074708 100644 --- a/packages/ipfs-core/test/init.spec.js +++ b/packages/ipfs-core/test/init.spec.js @@ -4,7 +4,7 @@ const { expect } = require('aegir/utils/chai') const { isNode } = require('ipfs-utils/src/env') -const uint8ArrayFromString = require('uint8arrays/from-string') +const { CID } = require('multiformats/cid') const { nanoid } = require('nanoid') const PeerId = require('peer-id') const { supportedKeys } = require('libp2p-crypto/src/keys') @@ -109,7 +109,7 @@ describe('init', function () { it('should write init docs', async () => { await init({ bits: 512 }) - const multihash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' + const multihash = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') const node = await ipfs.object.get(multihash, { enc: 'base58' }) expect(node.Links).to.exist() @@ -119,8 +119,8 @@ describe('init', function () { await init({ bits: 512, emptyRepo: true }) // Should not have default assets - const multihash = uint8ArrayFromString('12205e7c3ce237f936c76faf625e90f7751a9f5eeb048f59873303c215e9cce87599', 'base16') - await expect(ipfs.object.get(multihash, {})).to.eventually.be.rejected() + const multihash = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB') + await expect(ipfs.object.get(multihash, {})).to.eventually.be.rejected().with.property('code', 'ERR_NOT_FOUND') }) it('should apply one profile', async () => { diff --git a/packages/ipfs-core/test/ipld.spec.js b/packages/ipfs-core/test/ipld.spec.js index 86826e803e..e2adcad5a6 100644 --- a/packages/ipfs-core/test/ipld.spec.js +++ b/packages/ipfs-core/test/ipld.spec.js @@ -2,8 +2,9 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const ipldDagPb = require('ipld-dag-pb') const createNode = require('./utils/create-node') +const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') describe('ipld', function () { this.timeout(10 * 1000) @@ -12,10 +13,17 @@ describe('ipld', function () { let cleanup before(async () => { + const customCodec = { + name: 'custom-codec', + code: 1337, + encode: (str) => uint8ArrayFromString(str), + decode: (buf) => uint8ArrayToString(buf) + } + const res = await createNode({ ipld: { - formats: [ - require('ipld-git') + codecs: [ + customCodec ] } }) @@ -25,7 +33,7 @@ describe('ipld', function () { after(() => cleanup()) - it('should allow formats to be specified without overwriting others', async () => { + it('should allow codecs to be specified without overwriting others', async () => { const dagCborNode = { hello: 'world' } @@ -34,13 +42,23 @@ describe('ipld', function () { hashAlg: 'sha2-256' }) - const dagPbNode = new ipldDagPb.DAGNode(new Uint8Array(0), [], 0) + const dagPbNode = { + Data: new Uint8Array(0), + Links: [] + } const cid2 = await ipfs.dag.put(dagPbNode, { format: 'dag-pb', hashAlg: 'sha2-256' }) + const customNode = 'totally custom' + const cid3 = await ipfs.dag.put(customNode, { + format: 'custom-codec', + hashAlg: 'sha2-256' + }) + await expect(ipfs.dag.get(cid1)).to.eventually.have.property('value').that.deep.equals(dagCborNode) await expect(ipfs.dag.get(cid2)).to.eventually.have.property('value').that.deep.equals(dagPbNode) + await expect(ipfs.dag.get(cid3)).to.eventually.have.property('value').that.deep.equals(customNode) }) }) diff --git a/packages/ipfs-core/test/mfs-preload.spec.js b/packages/ipfs-core/test/mfs-preload.spec.js index 5b5f2c422a..1ab979f5e7 100644 --- a/packages/ipfs-core/test/mfs-preload.spec.js +++ b/packages/ipfs-core/test/mfs-preload.spec.js @@ -3,16 +3,16 @@ const { expect } = require('aegir/utils/chai') const delay = require('delay') -const multihashing = require('multihashing-async') +const { sha256 } = require('multiformats/hashes/sha2') const { nanoid } = require('nanoid') const uint8ArrayFromString = require('uint8arrays/from-string') -const CID = require('cids') +const { CID } = require('multiformats/cid') const waitFor = require('./utils/wait-for') const mfsPreload = require('../src/mfs-preload') const fakeCid = async () => { - const mh = await multihashing(uint8ArrayFromString(nanoid()), 'sha2-256') - return new CID(mh) + const mh = await sha256.digest(uint8ArrayFromString(nanoid())) + return CID.createV0(mh) } const createMockFilesStat = (cids = []) => { diff --git a/packages/ipfs-core/test/name.spec.js b/packages/ipfs-core/test/name.spec.js index 5612f9ec51..403906a768 100644 --- a/packages/ipfs-core/test/name.spec.js +++ b/packages/ipfs-core/test/name.spec.js @@ -4,7 +4,6 @@ const { expect } = require('aegir/utils/chai') const sinon = require('sinon') const delay = require('delay') -const { Key } = require('interface-datastore') const PeerId = require('peer-id') const errCode = require('err-code') const ipns = require('ipns') @@ -118,26 +117,6 @@ describe('name', function () { .with.property('code', 'ERR_INVALID_PEER_ID') }) - it('should fail to publish if receives an invalid datastore key', async () => { - const routing = { - get: sinon.stub().rejects(errCode(new Error('not found'), 'ERR_NOT_FOUND')) - } - const datastore = { - get: sinon.stub().rejects(errCode(new Error('not found'), 'ERR_NOT_FOUND')), - put: sinon.stub().resolves() - } - const publisher = new IpnsPublisher(routing, datastore) - const peerId = await PeerId.create() - - const stub = sinon.stub(Key, 'isKey').returns(false) - - await expect(publisher.publish(peerId.privKey, ipfsRef)) - .to.eventually.be.rejected() - .with.property('code', 'ERR_INVALID_DATASTORE_KEY') - - stub.restore() - }) - it('should fail to publish if we receive a unexpected error getting from datastore', async () => { const routing = {} const datastore = { diff --git a/packages/ipfs-core/test/node.js b/packages/ipfs-core/test/node.js deleted file mode 100644 index afa37ca944..0000000000 --- a/packages/ipfs-core/test/node.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -require('./utils') diff --git a/packages/ipfs-core/test/preload.spec.js b/packages/ipfs-core/test/preload.spec.js index f565a69822..70a7b6834c 100644 --- a/packages/ipfs-core/test/preload.spec.js +++ b/packages/ipfs-core/test/preload.spec.js @@ -7,6 +7,7 @@ const { expect } = require('aegir/utils/chai') const all = require('it-all') const MockPreloadNode = require('./utils/mock-preload-node-utils') const createNode = require('./utils/create-node') +const dagPb = require('@ipld/dag-pb') describe('preload', () => { let ipfs @@ -174,18 +175,19 @@ describe('preload', () => { const linkCid = await ipfs.object.put({ Data: uint8ArrayFromString(nanoid()), Links: [] }) const linkNode = await ipfs.object.get(linkCid) + const linkBuf = dagPb.encode(linkNode) const parentCid = await ipfs.object.put({ Data: uint8ArrayFromString(nanoid()), Links: [{ - name: 'link', - cid: linkCid, - size: linkNode.size + Name: 'link', + Hash: linkCid, + Tsize: linkBuf.length }] }) await MockPreloadNode.clearPreloadCids() - const cid = await ipfs.object.patch.rmLink(parentCid, { name: 'link' }) + const cid = await ipfs.object.patch.rmLink(parentCid, { Name: 'link' }) await MockPreloadNode.waitForCids(cid) }) @@ -215,24 +217,24 @@ describe('preload', () => { it('should preload content added with block.put', async function () { this.timeout(50 * 1000) - const block = await ipfs.block.put(uint8ArrayFromString(nanoid())) - await MockPreloadNode.waitForCids(block.cid) + const cid = await ipfs.block.put(uint8ArrayFromString(nanoid())) + await MockPreloadNode.waitForCids(cid) }) it('should preload content retrieved with block.get', async function () { this.timeout(50 * 1000) - const block = await ipfs.block.put(uint8ArrayFromString(nanoid()), { preload: false }) + const cid = await ipfs.block.put(uint8ArrayFromString(nanoid()), { preload: false }) await MockPreloadNode.clearPreloadCids() - await ipfs.block.get(block.cid) - await MockPreloadNode.waitForCids(block.cid) + await ipfs.block.get(cid) + await MockPreloadNode.waitForCids(cid) }) it('should preload content retrieved with block.stat', async function () { this.timeout(50 * 1000) - const block = await ipfs.block.put(uint8ArrayFromString(nanoid()), { preload: false }) + const cid = await ipfs.block.put(uint8ArrayFromString(nanoid()), { preload: false }) await MockPreloadNode.clearPreloadCids() - await ipfs.block.stat(block.cid) - await MockPreloadNode.waitForCids(block.cid) + await ipfs.block.stat(cid) + await MockPreloadNode.waitForCids(cid) }) it('should preload content added with dag.put', async function () { diff --git a/packages/ipfs-core/test/utils.js b/packages/ipfs-core/test/utils.js deleted file mode 100644 index 30b4029530..0000000000 --- a/packages/ipfs-core/test/utils.js +++ /dev/null @@ -1,79 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 8] */ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('aegir/utils/chai') -const fs = require('fs') -const { fromB58String } = require('multihashing-async').multihash -const utils = require('../src/utils') -const createNode = require('./utils/create-node') - -describe('utils', () => { - const rootHash = 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys' - const rootPath = `/ipfs/${rootHash}` - const rootMultihash = fromB58String(rootHash) - const aboutHash = 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' - const aboutPath = `${rootPath}/mercury` - const aboutMultihash = fromB58String(aboutHash) - - describe('resolvePath', function () { - this.timeout(100 * 1000) - const fixtures = [ - 'test/fixtures/planets/mercury/wiki.md', - 'test/fixtures/planets/solar-system.md' - ].map(path => ({ - path, - content: fs.readFileSync(path) - })) - - let ipfs - let cleanup - - before(async () => { - const res = await createNode({ - config: { - Pubsub: { - Enabled: false - } - } - }) - ipfs = res.ipfs - cleanup = res.cleanup - - await ipfs.add(fixtures) - }) - - after(() => cleanup()) - - it('handles base58 hash format', async () => { - const hash = await utils.resolvePath(ipfs.ipld, rootHash) - - expect(hash).to.have.property('bytes').that.deep.equals(rootMultihash) - }) - - it('handles multihash format', async () => { - const hash = await utils.resolvePath(ipfs.ipld, aboutMultihash) - - expect(hash).to.have.property('bytes').that.deep.equals(aboutMultihash) - }) - - it('handles ipfs paths format', async function () { - this.timeout(200 * 1000) - const hash = await utils.resolvePath(ipfs.ipld, aboutPath) - - expect(hash).to.have.property('bytes').that.deep.equals(aboutMultihash) - }) - - it('should error on invalid hashes', () => { - return expect(utils.resolvePath(ipfs.ipld, '/ipfs/asdlkjahsdfkjahsdfd')) - .to.eventually.be.rejected() - }) - - it('should error when a link doesn\'t exist', () => { - return expect(utils.resolvePath(ipfs.ipld, `${aboutPath}/fusion`)) - .to.eventually.be.rejected() - .and.have.property('message') - .that.includes('no link named "fusion" under QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q') - }) - }) -}) diff --git a/packages/ipfs-core/test/utils.spec.js b/packages/ipfs-core/test/utils.spec.js new file mode 100644 index 0000000000..ed4f9e0632 --- /dev/null +++ b/packages/ipfs-core/test/utils.spec.js @@ -0,0 +1,75 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const utils = require('../src/utils') +const createTempRepo = require('./utils/create-repo') +const { importer } = require('ipfs-unixfs-importer') +const all = require('it-all') +const codecs = require('./utils/codecs') + +describe('utils', () => { + let rootCid + let aboutCid + let aboutPath + let aboutMultihash + + describe('resolvePath', function () { + this.timeout(100 * 1000) + + /** @type {import('ipfs-repo').IPFSRepo} */ + let repo + + before(async () => { + repo = await createTempRepo() + + const res = await all(importer([{ + path: '/dir/contents.txt', + content: Uint8Array.from([0, 1, 2, 3]) + }], repo.blocks, { + wrapWithDirectory: true + })) + + rootCid = res[2].cid + + aboutCid = res[0].cid + aboutPath = `/ipfs/${aboutCid}` + aboutMultihash = aboutCid.multihash.bytes + }) + + it('handles base58 hash format', async () => { + const { cid, remainderPath } = await utils.resolvePath(repo, codecs, rootCid) + + expect(cid.toString()).to.equal(rootCid.toString()) + expect(remainderPath).to.be.empty() + }) + + it('handles multihash format', async () => { + const { cid, remainderPath } = await utils.resolvePath(repo, codecs, aboutMultihash) + + expect(cid.toString()).to.equal(aboutCid.toString()) + expect(remainderPath).to.be.empty() + }) + + it('handles ipfs paths format', async function () { + this.timeout(200 * 1000) + const { cid, remainderPath } = await utils.resolvePath(repo, codecs, aboutPath) + + expect(cid.toString()).to.equal(aboutCid.toString()) + expect(remainderPath).to.be.empty() + }) + + it('should error on invalid hashes', () => { + return expect(utils.resolvePath(repo, codecs, '/ipfs/asdlkjahsdfkjahsdfd')) + .to.eventually.be.rejected() + }) + + it('should error when a link doesn\'t exist', () => { + return expect(utils.resolvePath(repo, codecs, `${aboutPath}/fusion`)) + .to.eventually.be.rejected() + .and.have.property('message') + .that.includes(`no link named "fusion" under ${aboutCid}`) + }) + }) +}) diff --git a/packages/ipfs-core/test/utils/codecs.js b/packages/ipfs-core/test/utils/codecs.js new file mode 100644 index 0000000000..0e8233c840 --- /dev/null +++ b/packages/ipfs-core/test/utils/codecs.js @@ -0,0 +1,12 @@ +/* eslint-env mocha */ +'use strict' + +const Multicodecs = require('ipfs-core-utils/src/multicodecs') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') + +module.exports = new Multicodecs({ + codecs: [dagPb, dagCbor, raw], + loadCodec: () => Promise.reject(new Error('No extra codecs configured')) +}) diff --git a/packages/ipfs-core/test/utils/create-backend.js b/packages/ipfs-core/test/utils/create-backend.js new file mode 100644 index 0000000000..7ceec596c0 --- /dev/null +++ b/packages/ipfs-core/test/utils/create-backend.js @@ -0,0 +1,19 @@ +'use strict' + +const { MemoryDatastore } = require('interface-datastore') +const BlockstoreDatastoreAdapter = require(('blockstore-datastore-adapter')) + +function createBackend (overrides = {}) { + return { + datastore: new MemoryDatastore(), + blocks: new BlockstoreDatastoreAdapter( + new MemoryDatastore() + ), + pins: new MemoryDatastore(), + keys: new MemoryDatastore(), + root: new MemoryDatastore(), + ...overrides + } +} + +module.exports = createBackend diff --git a/packages/ipfs-core/test/utils/create-node.js b/packages/ipfs-core/test/utils/create-node.js index a3fb8a878f..3770de5b2e 100644 --- a/packages/ipfs-core/test/utils/create-node.js +++ b/packages/ipfs-core/test/utils/create-node.js @@ -2,7 +2,7 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const IPFS = require('../../') -const createTempRepo = require('./create-repo-nodejs') +const createTempRepo = require('./create-repo') module.exports = async (config = {}) => { const repo = await createTempRepo() @@ -25,7 +25,6 @@ module.exports = async (config = {}) => { repo, cleanup: async () => { await ipfs.stop() - await repo.teardown() } } } diff --git a/packages/ipfs-core/test/utils/create-repo-browser.js b/packages/ipfs-core/test/utils/create-repo-browser.js deleted file mode 100644 index 109ea400be..0000000000 --- a/packages/ipfs-core/test/utils/create-repo-browser.js +++ /dev/null @@ -1,95 +0,0 @@ -/* global self */ -'use strict' - -const IPFSRepo = require('ipfs-repo') -const { nanoid } = require('nanoid') - -const idb = self.indexedDB || - self.mozIndexedDB || - self.webkitIndexedDB || - self.msIndexedDB - -/** - * @param {object} options - * @param {string} [options.path] - * @param {number} [options.version] - * @param {number} [options.spec] - * @param {import('ipfs-core-types/src/config').Config} [options.config] - */ -module.exports = async function createTempRepo (options = {}) { - options.path = options.path || `ipfs-${nanoid()}` - - await createDB(options.path, (objectStore) => { - const encoder = new TextEncoder() - - if (options.version) { - objectStore.put(encoder.encode(`${options.version}`), '/version') - } - - if (options.spec) { - objectStore.put(encoder.encode(`${options.spec}`), '/datastore_spec') - } - - if (options.config) { - objectStore.put(encoder.encode(JSON.stringify(options.config)), '/config') - } - }) - - const repo = new IPFSRepo(options.path) - - repo.teardown = async () => { - try { - await repo.close() - } catch (err) { - if (!err.message.includes('already closed')) { - throw err - } - } - - idb.deleteDatabase(options.path) - idb.deleteDatabase(options.path + '/blocks') - } - - return repo -} - -/** - * Allows pre-filling the root IndexedDB object store with data - * - * @param {string} path - * @param {(objectStore: IDBObjectStore) => void} fn - */ -function createDB (path, fn) { - return new Promise((resolve, reject) => { - const request = idb.open(path, 1) - - request.onupgradeneeded = () => { - const db = request.result - - db.onerror = () => { - reject(new Error('Could not create database')) - } - - db.createObjectStore(path) - } - - request.onsuccess = () => { - const db = request.result - - const transaction = db.transaction(path, 'readwrite') - transaction.onerror = () => { - reject(new Error('Could not add data to database')) - } - transaction.oncomplete = () => { - db.close() - resolve() - } - - const objectStore = transaction.objectStore(path) - - fn(objectStore) - - transaction.commit() - } - }) -} diff --git a/packages/ipfs-core/test/utils/create-repo-nodejs.js b/packages/ipfs-core/test/utils/create-repo-nodejs.js deleted file mode 100644 index 53e75be67b..0000000000 --- a/packages/ipfs-core/test/utils/create-repo-nodejs.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict' - -const IPFSRepo = require('ipfs-repo') -const clean = require('./clean') -const os = require('os') -const path = require('path') -const { nanoid } = require('nanoid') -const fs = require('fs').promises - -/** - * @param {object} options - * @param {string} [options.path] - * @param {number} [options.version] - * @param {number} [options.spec] - * @param {import('ipfs-core-types/src/config').Config} [options.config] - */ -module.exports = async function createTempRepo (options = {}) { - options.path = options.path || path.join(os.tmpdir(), '/ipfs-test-' + nanoid()) - - await fs.mkdir(options.path) - - if (options.version) { - await fs.writeFile(path.join(options.path, 'version'), `${options.version}`) - } - - if (options.spec) { - await fs.writeFile(path.join(options.path, 'spec'), `${options.spec}`) - } - - if (options.config) { - await fs.writeFile(path.join(options.path, 'config'), JSON.stringify(options.config)) - } - - const repo = new IPFSRepo(options.path) - - repo.teardown = async () => { - try { - await repo.close() - } catch (err) { - if (!err.message.includes('already closed')) { - throw err - } - } - - await clean(options.path) - } - - return repo -} diff --git a/packages/ipfs-core/test/utils/create-repo.js b/packages/ipfs-core/test/utils/create-repo.js new file mode 100644 index 0000000000..d6cebf0a82 --- /dev/null +++ b/packages/ipfs-core/test/utils/create-repo.js @@ -0,0 +1,47 @@ +'use strict' + +const { nanoid } = require('nanoid') +const { createRepo, locks: { memory } } = require('ipfs-repo') +const codecs = require('./codecs') +const createBackend = require('./create-backend') +const { Key } = require('interface-datastore') + +/** + * @param {object} options + * @param {string} [options.path] + * @param {number} [options.version] + * @param {number} [options.spec] + * @param {boolean} [options.true] + * @param {(version: number, percentComplete: string, message: string) => void} [options.onMigrationProgress] + * @param {import('ipfs-core-types/src/config').Config} [options.config] + */ +module.exports = async function createTempRepo (options = {}) { + const path = options.path || 'ipfs-test-' + nanoid() + + const backend = createBackend() + const encoder = new TextEncoder() + + if (options.version) { + await backend.root.open() + await backend.root.put(new Key('/version'), encoder.encode(`${options.version}`)) + await backend.root.close() + } + + if (options.spec) { + await backend.root.open() + await backend.root.put(new Key('/datastore_spec'), encoder.encode(`${options.spec}`)) + await backend.root.close() + } + + if (options.config) { + await backend.root.open() + await backend.root.put(new Key('/config'), encoder.encode(JSON.stringify(options.config))) + await backend.root.close() + } + + return createRepo(path, (codeOrName) => codecs.getCodec(codeOrName), backend, { + repoLock: memory, + autoMigrate: options.autoMigrate, + onMigrationProgress: options.onMigrationProgress + }) +} diff --git a/packages/ipfs-core/test/utils/mock-preload-node-utils.js b/packages/ipfs-core/test/utils/mock-preload-node-utils.js index 300adf5585..fe71a6d79a 100644 --- a/packages/ipfs-core/test/utils/mock-preload-node-utils.js +++ b/packages/ipfs-core/test/utils/mock-preload-node-utils.js @@ -49,7 +49,7 @@ module.exports.waitForCids = async (cids, opts) => { }, { missing: [], duplicates: [] }) if (duplicates.length) { - throw errCode(new Error(`Multiple occurances of ${duplicates} found`), 'ERR_DUPLICATE') + throw errCode(new Error(`Multiple occurrences of ${duplicates} found`), 'ERR_DUPLICATE') } return missing.length === 0 diff --git a/packages/ipfs-core/tsconfig.json b/packages/ipfs-core/tsconfig.json index bf6e95d877..fe926578b4 100644 --- a/packages/ipfs-core/tsconfig.json +++ b/packages/ipfs-core/tsconfig.json @@ -13,6 +13,9 @@ }, { "path": "../ipfs-core-utils" + }, + { + "path": "../ipfs-http-client" } ] } diff --git a/packages/ipfs-daemon/package.json b/packages/ipfs-daemon/package.json index 8142cd0919..074868a94b 100644 --- a/packages/ipfs-daemon/package.json +++ b/packages/ipfs-daemon/package.json @@ -23,7 +23,7 @@ "scripts": { "lint": "aegir lint", "test": "npm run test:node", - "test:node": "aegir test -t node", + "test:node": "aegir test -t node -- --exit", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i ipfs-core-types -i @mapbox/node-pre-gyp", @@ -32,23 +32,18 @@ "dependencies": { "@mapbox/node-pre-gyp": "^1.0.5", "debug": "^4.1.1", - "dlv": "^1.1.3", "ipfs-core": "^0.8.0", "ipfs-core-types": "^0.5.2", "ipfs-grpc-server": "^0.3.4", - "ipfs-http-client": "^50.1.2", "ipfs-http-gateway": "^0.4.3", "ipfs-http-server": "^0.5.2", - "ipfs-utils": "^8.1.2", + "ipfs-utils": "^8.1.4", "just-safe-set": "^2.2.1", - "libp2p": "^0.31.6", - "libp2p-delegated-content-routing": "^0.10.0", - "libp2p-delegated-peer-routing": "^0.9.0", - "libp2p-webrtc-star": "^0.22.2", - "multiaddr": "^9.0.1" + "libp2p": "^0.32.0", + "libp2p-webrtc-star": "^0.23.0" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "node-fetch": "npm:@achingbrain/node-fetch@^2.6.4", "ws": "^7.3.1" }, diff --git a/packages/ipfs-daemon/src/index.js b/packages/ipfs-daemon/src/index.js index 44c33dee30..0cfc55f9d5 100644 --- a/packages/ipfs-daemon/src/index.js +++ b/packages/ipfs-daemon/src/index.js @@ -1,21 +1,13 @@ 'use strict' const log = require('debug')('ipfs:daemon') -const get = require('dlv') const set = require('just-safe-set') -const { Multiaddr } = require('multiaddr') // @ts-ignore - no types const WebRTCStar = require('libp2p-webrtc-star') -// @ts-ignore - no types -const DelegatedPeerRouter = require('libp2p-delegated-peer-routing') -// @ts-ignore - no types -const DelegatedContentRouter = require('libp2p-delegated-content-routing') -const { create: ipfsHttpClient } = require('ipfs-http-client') const IPFS = require('ipfs-core') const HttpApi = require('ipfs-http-server') const HttpGateway = require('ipfs-http-gateway') const gRPCServer = require('ipfs-grpc-server') -const createRepo = require('ipfs-core/src/runtime/repo-nodejs') const { isElectron } = require('ipfs-utils/src/env') class Daemon { @@ -43,33 +35,23 @@ class Daemon { async start () { log('starting') - const repo = typeof this._options.repo === 'string' || this._options.repo == null - ? createRepo(console.info, { // eslint-disable-line no-console - path: this._options.repo, - autoMigrate: Boolean(this._options.repoAutoMigrate) - }) - : this._options.repo - // start the daemon - const ipfsOpts = Object.assign({}, { start: true, libp2p: getLibp2p }, this._options, { repo }) - this._ipfs = await IPFS.create(ipfsOpts) + this._ipfs = await IPFS.create( + Object.assign({}, { start: true, libp2p: getLibp2p }, this._options) + ) // start HTTP servers (if API or Gateway is enabled in options) - // @ts-ignore http api expects .libp2p and .ipld properties - const httpApi = new HttpApi(this._ipfs) - this._httpApi = await httpApi.start() - - const httpGateway = new HttpGateway(this._ipfs) - this._httpGateway = await httpGateway.start() - - // for the CLI to know the whereabouts of the API - // @ts-ignore - _apiServers is possibly undefined - if (this._httpApi._apiServers.length) { - // @ts-ignore - _apiServers is possibly undefined - await repo.apiAddr.set(this._httpApi._apiServers[0].info.ma) - } + this._httpApi = new HttpApi(this._ipfs) + await this._httpApi.start() - this._grpcServer = await gRPCServer(this._ipfs) + this._httpGateway = new HttpGateway(this._ipfs) + await this._httpGateway.start() + + const config = await this._ipfs.config.getAll() + + if (config.Addresses && config.Addresses.RPC) { + this._grpcServer = await gRPCServer(this._ipfs) + } log('started') } @@ -120,32 +102,6 @@ function getLibp2p ({ libp2pOptions, options, config, peerId }) { libp2pOptions.modules.transport.push(WebRTCStar) } - // Set up Delegate Routing based on the presence of Delegates in the config - const delegateHosts = get(options, 'config.Addresses.Delegates', - get(config, 'Addresses.Delegates', []) - ) - - if (delegateHosts.length > 0) { - // Pick a random delegate host - const delegateString = delegateHosts[Math.floor(Math.random() * delegateHosts.length)] - const delegateAddr = new Multiaddr(delegateString).toOptions() - const delegateApiOptions = { - host: delegateAddr.host, - // port is a string atm, so we need to convert for the check - // @ts-ignore - parseInt(input:string) => number - protocol: parseInt(delegateAddr.port) === 443 ? 'https' : 'http', - port: delegateAddr.port - } - - const delegateHttpClient = ipfsHttpClient(delegateApiOptions) - - libp2pOptions.modules.contentRouting = libp2pOptions.modules.contentRouting || [] - libp2pOptions.modules.contentRouting.push(new DelegatedContentRouter(peerId, delegateHttpClient)) - - libp2pOptions.modules.peerRouting = libp2pOptions.modules.peerRouting || [] - libp2pOptions.modules.peerRouting.push(new DelegatedPeerRouter(delegateHttpClient)) - } - const Libp2p = require('libp2p') return new Libp2p(libp2pOptions) } diff --git a/packages/ipfs-daemon/tsconfig.json b/packages/ipfs-daemon/tsconfig.json index eb2e789b86..219e6201e9 100644 --- a/packages/ipfs-daemon/tsconfig.json +++ b/packages/ipfs-daemon/tsconfig.json @@ -13,15 +13,9 @@ { "path": "../ipfs-core-types" }, - { - "path": "../ipfs-core-utils" - }, { "path": "../ipfs-grpc-server" }, - { - "path": "../ipfs-http-client" - }, { "path": "../ipfs-http-gateway" }, diff --git a/packages/ipfs-grpc-client/.aegir.js b/packages/ipfs-grpc-client/.aegir.js index 4dc0a9fba2..f205b27e57 100644 --- a/packages/ipfs-grpc-client/.aegir.js +++ b/packages/ipfs-grpc-client/.aegir.js @@ -3,6 +3,6 @@ /** @type {import('aegir').PartialOptions} */ module.exports = { build: { - bundlesizeMax: '56KB' + bundlesizeMax: '53KB' } } diff --git a/packages/ipfs-grpc-client/package.json b/packages/ipfs-grpc-client/package.json index 6897f19a59..a4474643c4 100644 --- a/packages/ipfs-grpc-client/package.json +++ b/packages/ipfs-grpc-client/package.json @@ -34,25 +34,25 @@ "dependencies": { "@improbable-eng/grpc-web": "^0.14.0", "change-case": "^4.1.1", - "cids": "^1.1.6", "debug": "^4.1.1", "err-code": "^3.0.1", "ipfs-core-types": "^0.5.2", "ipfs-core-utils": "^0.8.3", "ipfs-grpc-protocol": "^0.3.0", - "ipfs-unixfs": "^4.0.3", + "ipfs-unixfs": "^5.0.0", "it-first": "^1.0.4", "it-pushable": "^1.4.0", - "multiaddr": "^9.0.1", + "multiaddr": "^10.0.0", + "multiformats": "^9.4.1", "protobufjs": "^6.10.2", "wherearewe": "1.0.0", "ws": "^7.3.1" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "it-all": "^1.0.4", "rimraf": "^3.0.2", - "sinon": "^10.0.1" + "sinon": "^11.1.1" }, "eslintConfig": { "extends": "ipfs" diff --git a/packages/ipfs-grpc-client/src/core-api/add-all.js b/packages/ipfs-grpc-client/src/core-api/add-all.js index 2e0dc703b0..e85244118e 100644 --- a/packages/ipfs-grpc-client/src/core-api/add-all.js +++ b/packages/ipfs-grpc-client/src/core-api/add-all.js @@ -1,7 +1,7 @@ 'use strict' const normaliseInput = require('ipfs-core-utils/src/files/normalise-input') -const CID = require('cids') +const { CID } = require('multiformats/cid') const bidiToDuplex = require('../utils/bidi-to-duplex') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') @@ -147,7 +147,7 @@ module.exports = function grpcAddAll (grpc, service, opts) { // received file/dir import result yield { path: result.path, - cid: new CID(result.cid), + cid: CID.parse(result.cid), mode: result.mode, mtime: { secs: result.mtime || 0, diff --git a/packages/ipfs-grpc-client/src/core-api/files/ls.js b/packages/ipfs-grpc-client/src/core-api/files/ls.js index cd97fc7553..fbe279a7aa 100644 --- a/packages/ipfs-grpc-client/src/core-api/files/ls.js +++ b/packages/ipfs-grpc-client/src/core-api/files/ls.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const serverStreamToIterator = require('../../utils/server-stream-to-iterator') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') @@ -28,7 +28,7 @@ module.exports = function grpcMfsLs (grpc, service, opts) { name: result.name, type: result.type.toLowerCase(), size: result.size, - cid: new CID(result.cid), + cid: CID.parse(result.cid), mode: result.mode, mtime: { secs: result.mtime || 0, diff --git a/packages/ipfs-grpc-server/package.json b/packages/ipfs-grpc-server/package.json index 1241a09228..f0a5765d97 100644 --- a/packages/ipfs-grpc-server/package.json +++ b/packages/ipfs-grpc-server/package.json @@ -41,18 +41,18 @@ "it-peekable": "^1.0.1", "it-pipe": "^1.1.0", "it-pushable": "^1.4.0", - "multiaddr": "^9.0.1", + "multiaddr": "^10.0.0", "protobufjs": "^6.10.2", "ws": "^7.3.1" }, "devDependencies": { "@types/ws": "^7.4.0", - "aegir": "^33.0.0", + "aegir": "^34.0.2", "ipfs-core": "^0.8.0", "it-all": "^1.0.4", "it-drain": "^1.0.3", "rimraf": "^3.0.2", - "sinon": "^10.0.1", - "uint8arrays": "^2.1.3" + "sinon": "^11.1.1", + "uint8arrays": "^2.1.6" } } diff --git a/packages/ipfs-grpc-server/src/types.d.ts b/packages/ipfs-grpc-server/src/types.d.ts index 413f60da84..f651214690 100644 --- a/packages/ipfs-grpc-server/src/types.d.ts +++ b/packages/ipfs-grpc-server/src/types.d.ts @@ -5,10 +5,10 @@ export interface Options { socket?: WebsocketServer } -export type UnaryEndpoint = (input: InputMessage, metadata: Metadata) => Promise -export type BidirectionalStreamingEndpoint = (source: AsyncIterable, sink: Pushable, metadata: Metadata) => Promise -export type ClientStreamingEndpoint = (source: AsyncIterable, metadata: Metadata) => Promise -export type ServerStreamingEndpoint = (input: InputMessage, sink: Pushable, metadata: Metadata) => Promise +export interface UnaryEndpoint { (input: InputMessage, metadata: Metadata): Promise } +export interface BidirectionalStreamingEndpoint { (source: AsyncIterable, sink: Pushable, metadata: Metadata): Promise } +export interface ClientStreamingEndpoint { (source: AsyncIterable, metadata: Metadata): Promise } +export interface ServerStreamingEndpoint { (input: InputMessage, sink: Pushable, metadata: Metadata): Promise } export interface WebsocketMessage { path: string @@ -18,7 +18,6 @@ export interface WebsocketMessage { export interface WebsocketServer extends EventEmitter { // events - on(event: 'error', listener: (err: Error) => void): this - on(event: 'data', listener: (message: WebsocketMessage) => void): this + on: ((event: 'error', listener: (err: Error) => void) => this) & ((event: 'data', listener: (message: WebsocketMessage) => void) => this) stop: () => Promise } diff --git a/packages/ipfs-grpc-server/tsconfig.json b/packages/ipfs-grpc-server/tsconfig.json index 5fe8ea40d7..03031d1601 100644 --- a/packages/ipfs-grpc-server/tsconfig.json +++ b/packages/ipfs-grpc-server/tsconfig.json @@ -5,5 +5,10 @@ }, "include": [ "src" + ], + "references": [ + { + "path": "../ipfs-core-types" + } ] } diff --git a/packages/ipfs-http-client/.aegir.js b/packages/ipfs-http-client/.aegir.js index fec9ee097f..5dcbcb3398 100644 --- a/packages/ipfs-http-client/.aegir.js +++ b/packages/ipfs-http-client/.aegir.js @@ -6,7 +6,7 @@ const getPort = require('aegir/utils/get-port') /** @type {import('aegir').PartialOptions} */ module.exports = { build: { - bundlesizeMax: '89KB' + bundlesizeMax: '66KB' }, test: { async before (options) { diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index f3f260c651..00ccbac4e5 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -41,42 +41,38 @@ "lint": "aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", - "dep-check": "aegir dep-check -i ipfs-core -i rimraf -i ipfs-core-types -i abort-controller -i ipld" + "dep-check": "aegir dep-check -i ipfs-core -i rimraf -i ipfs-core-types -i abort-controller" }, "dependencies": { + "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-pb": "^2.1.3", "abort-controller": "^3.0.0", "any-signal": "^2.1.2", - "cids": "^1.1.6", + "err-code": "^3.0.1", "debug": "^4.1.1", "form-data": "^4.0.0", "ipfs-core-types": "^0.5.2", "ipfs-core-utils": "^0.8.3", - "ipfs-unixfs": "^4.0.3", - "ipfs-utils": "^8.1.2", - "ipld-block": "^0.11.0", - "ipld-dag-cbor": "^1.0.0", - "ipld-dag-pb": "^0.22.1", - "ipld-raw": "^7.0.0", + "ipfs-utils": "^8.1.4", + "it-first": "^1.0.6", "it-last": "^1.0.4", "it-map": "^1.0.4", "it-tar": "^3.0.0", "it-to-stream": "^1.0.0", "merge-options": "^3.0.4", - "multiaddr": "^9.0.1", - "multibase": "^4.0.2", - "multicodec": "^3.0.1", - "multihashes": "^4.0.2", + "multiaddr": "^10.0.0", + "multiformats": "^9.4.1", "nanoid": "^3.1.12", "native-abort-controller": "^1.0.3", "parse-duration": "^1.0.0", "stream-to-it": "^0.2.2", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "delay": "^5.0.0", "go-ipfs": "0.8.0", - "ipfsd-ctl": "^8.0.1", + "ipfsd-ctl": "^9.0.0", "it-all": "^1.0.4", "it-concat": "^2.0.0", "it-first": "^1.0.4", diff --git a/packages/ipfs-http-client/src/add-all.js b/packages/ipfs-http-client/src/add-all.js index 4c8ca20ba8..8c85b873d8 100644 --- a/packages/ipfs-http-client/src/add-all.js +++ b/packages/ipfs-http-client/src/add-all.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const toCamel = require('./lib/object-to-camel') const configure = require('./lib/configure') const multipartRequest = require('./lib/multipart-request') @@ -107,13 +107,19 @@ const createOnUploadProgress = (size, parts, progress) => { } /** - * @param {any} input + * @param {object} input + * @param {string} input.name + * @param {string} input.hash + * @param {string} input.size + * @param {string} [input.mode] + * @param {number} [input.mtime] + * @param {number} [input.mtimeNsecs] */ function toCoreInterface ({ name, hash, size, mode, mtime, mtimeNsecs }) { /** @type {AddResult} */ const output = { path: name, - cid: new CID(hash), + cid: CID.parse(hash), size: parseInt(size) } diff --git a/packages/ipfs-http-client/src/bitswap/stat.js b/packages/ipfs-http-client/src/bitswap/stat.js index 0c06048eca..35d2978768 100644 --- a/packages/ipfs-http-client/src/bitswap/stat.js +++ b/packages/ipfs-http-client/src/bitswap/stat.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -32,7 +32,7 @@ module.exports = configure(api => { function toCoreInterface (res) { return { provideBufLen: res.ProvideBufLen, - wantlist: (res.Wantlist || []).map((/** @type {{ '/': string }} */ k) => new CID(k['/'])), + wantlist: (res.Wantlist || []).map((/** @type {{ '/': string }} */ k) => CID.parse(k['/'])), peers: (res.Peers || []), blocksReceived: BigInt(res.BlocksReceived), dataReceived: BigInt(res.DataReceived), diff --git a/packages/ipfs-http-client/src/bitswap/unwant.js b/packages/ipfs-http-client/src/bitswap/unwant.js index 5774dd819e..847b38397a 100644 --- a/packages/ipfs-http-client/src/bitswap/unwant.js +++ b/packages/ipfs-http-client/src/bitswap/unwant.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -18,8 +17,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - // @ts-ignore - CID|string seems to confuse typedef - arg: typeof cid === 'string' ? cid : new CID(cid).toString(), + arg: cid.toString(), ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js b/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js index 85fe17760b..8dac0bf4d8 100644 --- a/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js +++ b/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -14,20 +14,17 @@ module.exports = configure(api => { * @type {BitswapAPI["wantlistForPeer"]} */ async function wantlistForPeer (peerId, options = {}) { - // @ts-ignore - CID|string seems to confuse typedef - peerId = typeof peerId === 'string' ? peerId : new CID(peerId).toString() - const res = await (await api.post('bitswap/wantlist', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ ...options, - peer: peerId + peer: peerId.toString() }), headers: options.headers })).json() - return (res.Keys || []).map((/** @type {{ '/': string }} */ k) => new CID(k['/'])) + return (res.Keys || []).map((/** @type {{ '/': string }} */ k) => CID.parse(k['/'])) } return wantlistForPeer }) diff --git a/packages/ipfs-http-client/src/bitswap/wantlist.js b/packages/ipfs-http-client/src/bitswap/wantlist.js index 536d43812f..60d37d183b 100644 --- a/packages/ipfs-http-client/src/bitswap/wantlist.js +++ b/packages/ipfs-http-client/src/bitswap/wantlist.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -21,7 +21,7 @@ module.exports = configure(api => { headers: options.headers })).json() - return (res.Keys || []).map((/** @type {{ '/': string }} */ k) => new CID(k['/'])) + return (res.Keys || []).map((/** @type {{ '/': string }} */ k) => CID.parse(k['/'])) } return wantlist }) diff --git a/packages/ipfs-http-client/src/block/get.js b/packages/ipfs-http-client/src/block/get.js index ca9d09b138..75d4f5fe75 100644 --- a/packages/ipfs-http-client/src/block/get.js +++ b/packages/ipfs-http-client/src/block/get.js @@ -1,7 +1,5 @@ 'use strict' -const Block = require('ipld-block') -const CID = require('cids') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -15,9 +13,6 @@ module.exports = configure(api => { * @type {BlockAPI["get"]} */ async function get (cid, options = {}) { - // @ts-ignore - CID|string seems to confuse typedef - cid = new CID(cid) - const res = await api.post('block/get', { timeout: options.timeout, signal: options.signal, @@ -28,7 +23,7 @@ module.exports = configure(api => { headers: options.headers }) - return new Block(new Uint8Array(await res.arrayBuffer()), cid) + return new Uint8Array(await res.arrayBuffer()) } return get }) diff --git a/packages/ipfs-http-client/src/block/put.js b/packages/ipfs-http-client/src/block/put.js index cd96ae7232..57ba474dbe 100644 --- a/packages/ipfs-http-client/src/block/put.js +++ b/packages/ipfs-http-client/src/block/put.js @@ -1,8 +1,6 @@ 'use strict' -const Block = require('ipld-block') -const CID = require('cids') -const multihash = require('multihashes') +const { CID } = require('multiformats/cid') const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -19,38 +17,12 @@ module.exports = configure(api => { * @type {BlockAPI["put"]} */ async function put (data, options = {}) { - if (Block.isBlock(data)) { - const { name, length } = multihash.decode(data.cid.multihash) - options = { - ...options, - format: data.cid.codec, - mhtype: name, - mhlen: length, - version: data.cid.version - } - // @ts-ignore - data is typed as block so TS complains about - // Uint8Array assignment. - data = data.data - } else if (options.cid) { - const cid = new CID(options.cid) - const { name, length } = multihash.decode(cid.multihash) - options = { - ...options, - format: cid.codec, - mhtype: name, - mhlen: length, - version: cid.version - } - delete options.cid - } - // allow aborting requests on body errors const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) let res try { - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const response = await api.post('block/put', { timeout: options.timeout, signal: signal, @@ -72,7 +44,7 @@ module.exports = configure(api => { throw err } - return new Block((/** @type {Uint8Array} */ data), new CID(res.Key)) + return CID.parse(res.Key) } return put diff --git a/packages/ipfs-http-client/src/block/rm.js b/packages/ipfs-http-client/src/block/rm.js index 7f9d6b0d7e..18d9e17c1e 100644 --- a/packages/ipfs-http-client/src/block/rm.js +++ b/packages/ipfs-http-client/src/block/rm.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -23,7 +23,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: cid.map(cid => new CID(cid).toString()), + arg: cid.map(cid => cid.toString()), 'stream-channels': true, ...options }), @@ -44,7 +44,7 @@ module.exports = configure(api => { function toCoreInterface (removed) { /** @type {RmResult} */ const out = { - cid: new CID(removed.Hash) + cid: CID.parse(removed.Hash) } if (removed.Error) { diff --git a/packages/ipfs-http-client/src/block/stat.js b/packages/ipfs-http-client/src/block/stat.js index e0c6c4bf64..8e6d143642 100644 --- a/packages/ipfs-http-client/src/block/stat.js +++ b/packages/ipfs-http-client/src/block/stat.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -18,14 +18,14 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: new CID(cid).toString(), + arg: cid.toString(), ...options }), headers: options.headers }) const data = await res.json() - return { cid: new CID(data.Key), size: data.Size } + return { cid: CID.parse(data.Key), size: data.Size } } return stat diff --git a/packages/ipfs-http-client/src/cat.js b/packages/ipfs-http-client/src/cat.js index 3ff6240f46..6d0e21fa60 100644 --- a/packages/ipfs-http-client/src/cat.js +++ b/packages/ipfs-http-client/src/cat.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') @@ -18,7 +17,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: typeof path === 'string' ? path : new CID(path).toString(), + arg: path.toString(), ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/dag/get.js b/packages/ipfs-http-client/src/dag/get.js index 21f30e27aa..f010881945 100644 --- a/packages/ipfs-http-client/src/dag/get.js +++ b/packages/ipfs-http-client/src/dag/get.js @@ -1,35 +1,54 @@ 'use strict' const configure = require('../lib/configure') -const multicodec = require('multicodec') -const loadFormat = require('../lib/ipld-formats') +const resolve = require('../lib/resolve') +const first = require('it-first') +const last = require('it-last') +const errCode = require('err-code') /** * @typedef {import('../types').HTTPClientExtraOptions} HTTPClientExtraOptions * @typedef {import('ipfs-core-types/src/dag').API} DAGAPI */ -module.exports = configure((api, opts) => { - const getBlock = require('../block/get')(opts) - const dagResolve = require('./resolve')(opts) - const load = loadFormat(opts.ipld) - - /** - * @type {DAGAPI["get"]} - */ - const get = async (cid, options = {}) => { - const resolved = await dagResolve(cid, options) - const block = await getBlock(resolved.cid, options) - - const codecName = multicodec.getName(resolved.cid.code) - const format = await load(codecName) - - if (resolved.cid.code === multicodec.RAW && !resolved.remainderPath) { - resolved.remainderPath = '/' +/** + * @param {import('ipfs-core-utils/src/multicodecs')} codecs + * @param {import('../types').Options} options + */ +module.exports = (codecs, options) => { + const fn = configure((api, opts) => { + const getBlock = require('../block/get')(opts) + + /** + * @type {DAGAPI["get"]} + */ + const get = async (cid, options = {}) => { + if (options.path) { + const entry = options.localResolve + ? await first(resolve(cid, options.path, codecs, getBlock, options)) + : await last(resolve(cid, options.path, codecs, getBlock, options)) + /** @type {import('ipfs-core-types/src/dag').GetResult} - first and last will return undefined when empty */ + const result = (entry) + + if (!result) { + throw errCode(new Error('Not found'), 'ERR_NOT_FOUND') + } + + return result + } + + const codec = await codecs.getCodec(cid.code) + const block = await getBlock(cid, options) + const node = codec.decode(block) + + return { + value: node, + remainderPath: '' + } } - return format.resolver.resolve(block.data, resolved.remainderPath || '') - } + return get + }) - return get -}) + return fn(options) +} diff --git a/packages/ipfs-http-client/src/dag/index.js b/packages/ipfs-http-client/src/dag/index.js index 2754bb6dd7..ec7f0ac5c3 100644 --- a/packages/ipfs-http-client/src/dag/index.js +++ b/packages/ipfs-http-client/src/dag/index.js @@ -1,11 +1,11 @@ 'use strict' /** + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {import('../types').Options} config */ -module.exports = config => ({ - get: require('./get')(config), - put: require('./put')(config), - resolve: require('./resolve')(config), - tree: require('./tree')(config) +module.exports = (codecs, config) => ({ + get: require('./get')(codecs, config), + put: require('./put')(codecs, config), + resolve: require('./resolve')(config) }) diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index 9290b4844b..49c7549054 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -1,74 +1,57 @@ 'use strict' -const CID = require('cids') -const multihash = require('multihashes') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const multipartRequest = require('../lib/multipart-request') const toUrlSearchParams = require('../lib/to-url-search-params') const abortSignal = require('../lib/abort-signal') const { AbortController } = require('native-abort-controller') -const multicodec = require('multicodec') -const loadFormat = require('../lib/ipld-formats') /** * @typedef {import('../types').HTTPClientExtraOptions} HTTPClientExtraOptions * @typedef {import('ipfs-core-types/src/dag').API} DAGAPI */ -module.exports = configure((api, opts) => { - const load = loadFormat(opts.ipld) - - /** - * @type {DAGAPI["put"]} - */ - const put = async (dagNode, options = {}) => { - if (options.cid && (options.format || options.hashAlg)) { - throw new Error('Failed to put DAG node. Provide either `cid` OR `format` and `hashAlg` options') - } else if ((options.format && !options.hashAlg) || (!options.format && options.hashAlg)) { - throw new Error('Failed to put DAG node. Provide `format` AND `hashAlg` options') - } - - let encodingOptions - if (options.cid) { - const cid = new CID(options.cid) - encodingOptions = { - ...options, - format: multicodec.getName(cid.code), - hashAlg: multihash.decode(cid.multihash).name +/** + * @param {import('ipfs-core-utils/src/multicodecs')} codecs + * @param {import('../types').Options} options + */ +module.exports = (codecs, options) => { + const fn = configure((api) => { + /** + * @type {DAGAPI["put"]} + */ + const put = async (dagNode, options = {}) => { + const settings = { + format: 'dag-cbor', + hashAlg: 'sha2-256', + inputEnc: 'raw', + ...options } - delete options.cid - } else { - encodingOptions = options - } - const settings = { - format: 'dag-cbor', - hashAlg: 'sha2-256', - inputEnc: 'raw', - ...encodingOptions + const codec = await codecs.getCodec(settings.format) + const serialized = codec.encode(dagNode) + + // allow aborting requests on body errors + const controller = new AbortController() + const signal = abortSignal(controller.signal, settings.signal) + + // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 + const res = await api.post('dag/put', { + timeout: settings.timeout, + signal, + searchParams: toUrlSearchParams(settings), + ...( + await multipartRequest(serialized, controller, settings.headers) + ) + }) + const data = await res.json() + + return CID.parse(data.Cid['/']) } - // @ts-ignore settings.format might be an invalid CodecName - const format = await load(settings.format) - const serialized = format.util.serialize(dagNode) - - // allow aborting requests on body errors - const controller = new AbortController() - const signal = abortSignal(controller.signal, settings.signal) - - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 - const res = await api.post('dag/put', { - timeout: settings.timeout, - signal, - searchParams: toUrlSearchParams(settings), - ...( - await multipartRequest(serialized, controller, settings.headers) - ) - }) - const data = await res.json() - - return new CID(data.Cid['/']) - } + return put + }) - return put -}) + return fn(options) +} diff --git a/packages/ipfs-http-client/src/dag/resolve.js b/packages/ipfs-http-client/src/dag/resolve.js index c0e69354b6..ad2d7a58f6 100644 --- a/packages/ipfs-http-client/src/dag/resolve.js +++ b/packages/ipfs-http-client/src/dag/resolve.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -26,7 +26,7 @@ module.exports = configure(api => { const data = await res.json() - return { cid: new CID(data.Cid['/']), remainderPath: data.RemPath } + return { cid: CID.parse(data.Cid['/']), remainderPath: data.RemPath } } return resolve diff --git a/packages/ipfs-http-client/src/dag/tree.js b/packages/ipfs-http-client/src/dag/tree.js deleted file mode 100644 index 43809ed84d..0000000000 --- a/packages/ipfs-http-client/src/dag/tree.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -const configure = require('../lib/configure') - -/** - * @typedef {import('../types').HTTPClientExtraOptions} HTTPClientExtraOptions - * @typedef {import('ipfs-core-types/src/dag').API} DAGAPI - */ - -module.exports = configure(api => { - /** - * @type {DAGAPI["tree"]} - */ - const tree = async (ipfsPath, options = {}) => { - throw new Error('Not implemented') - } - - return tree -}) diff --git a/packages/ipfs-http-client/src/dht/find-provs.js b/packages/ipfs-http-client/src/dht/find-provs.js index c431648ae6..91b4cd63a6 100644 --- a/packages/ipfs-http-client/src/dht/find-provs.js +++ b/packages/ipfs-http-client/src/dht/find-provs.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const { Multiaddr } = require('multiaddr') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -20,7 +19,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: `${new CID(cid)}`, + arg: cid.toString(), ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/dht/provide.js b/packages/ipfs-http-client/src/dht/provide.js index 878880398f..01aedbce1a 100644 --- a/packages/ipfs-http-client/src/dht/provide.js +++ b/packages/ipfs-http-client/src/dht/provide.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const { Multiaddr } = require('multiaddr') const toCamel = require('../lib/object-to-camel') const configure = require('../lib/configure') @@ -9,6 +8,7 @@ const toUrlSearchParams = require('../lib/to-url-search-params') /** * @typedef {import('../types').HTTPClientExtraOptions} HTTPClientExtraOptions * @typedef {import('ipfs-core-types/src/dht').API} DHTAPI + * @typedef {import('multiformats/cid').CID} CID */ module.exports = configure(api => { @@ -16,13 +16,14 @@ module.exports = configure(api => { * @type {DHTAPI["provide"]} */ async function * provide (cids, options = { recursive: false }) { - cids = Array.isArray(cids) ? cids : [cids] + /** @type {CID[]} */ + const cidArr = Array.isArray(cids) ? cids : [cids] const res = await api.post('dht/provide', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: cids.map(cid => new CID(cid).toString()), + arg: cidArr.map(cid => cid.toString()), ...options }), headers: options.headers @@ -30,7 +31,6 @@ module.exports = configure(api => { for await (let message of res.ndjson()) { message = toCamel(message) - message.id = new CID(message.id) if (message.responses) { message.responses = message.responses.map((/** @type {{ ID: string, Addrs: string[] }} */ { ID, Addrs }) => ({ id: ID, diff --git a/packages/ipfs-http-client/src/dht/put.js b/packages/ipfs-http-client/src/dht/put.js index d2b88aa89a..1e7de6b49d 100644 --- a/packages/ipfs-http-client/src/dht/put.js +++ b/packages/ipfs-http-client/src/dht/put.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const { Multiaddr } = require('multiaddr') const toCamel = require('../lib/object-to-camel') const configure = require('../lib/configure') @@ -38,7 +37,6 @@ module.exports = configure(api => { for await (let message of res.ndjson()) { message = toCamel(message) - message.id = new CID(message.id) if (message.responses) { message.responses = message.responses.map((/** @type {{ ID: string, Addrs: string[] }} */ { ID, Addrs }) => ({ id: ID, diff --git a/packages/ipfs-http-client/src/dht/query.js b/packages/ipfs-http-client/src/dht/query.js index 429f487ef2..14f3326c7b 100644 --- a/packages/ipfs-http-client/src/dht/query.js +++ b/packages/ipfs-http-client/src/dht/query.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const { Multiaddr } = require('multiaddr') const toCamel = require('../lib/object-to-camel') const configure = require('../lib/configure') @@ -20,7 +19,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: new CID(`${peerId}`), + arg: peerId.toString(), ...options }), headers: options.headers @@ -28,7 +27,6 @@ module.exports = configure(api => { for await (let message of res.ndjson()) { message = toCamel(message) - message.id = new CID(message.id) message.responses = (message.responses || []).map((/** @type {{ ID: string, Addrs: string[] }} */ { ID, Addrs }) => ({ id: ID, addrs: (Addrs || []).map((/** @type {string} **/ a) => new Multiaddr(a)) diff --git a/packages/ipfs-http-client/src/files/cp.js b/packages/ipfs-http-client/src/files/cp.js index f87bcef5e5..4dbe59ef0f 100644 --- a/packages/ipfs-http-client/src/files/cp.js +++ b/packages/ipfs-http-client/src/files/cp.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -14,15 +14,14 @@ module.exports = configure(api => { * @type {FilesAPI["cp"]} */ async function cp (sources, destination, options = {}) { - if (!Array.isArray(sources)) { - sources = [sources] - } + /** @type {import('ipfs-core-types/src/utils').IPFSPath[]} */ + const sourceArr = Array.isArray(sources) ? sources : [sources] const res = await api.post('files/cp', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: sources.concat(destination).map(src => CID.isCID(src) ? `/ipfs/${src}` : src), + arg: sourceArr.concat(destination).map(src => src instanceof CID ? `/ipfs/${src}` : src), ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/files/flush.js b/packages/ipfs-http-client/src/files/flush.js index 33bbdae1aa..0c04cc809f 100644 --- a/packages/ipfs-http-client/src/files/flush.js +++ b/packages/ipfs-http-client/src/files/flush.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -29,7 +29,7 @@ module.exports = configure(api => { }) const data = await res.json() - return new CID(data.Cid) + return CID.parse(data.Cid) } return flush }) diff --git a/packages/ipfs-http-client/src/files/ls.js b/packages/ipfs-http-client/src/files/ls.js index 71d05b8491..67d4b24323 100644 --- a/packages/ipfs-http-client/src/files/ls.js +++ b/packages/ipfs-http-client/src/files/ls.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -14,7 +14,7 @@ module.exports = configure(api => { * @type {FilesAPI["ls"]} */ async function * ls (path, options = {}) { - if (!path || typeof path !== 'string') { + if (!path) { throw new Error('ipfs.files.ls requires a path') } @@ -22,7 +22,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: CID.isCID(path) ? `/ipfs/${path}` : path, + arg: path instanceof CID ? `/ipfs/${path}` : path, // default long to true, diverges from go-ipfs where its false by default long: true, ...options, @@ -50,7 +50,7 @@ module.exports = configure(api => { */ function toCoreInterface (entry) { if (entry.hash) { - entry.cid = new CID(entry.hash) + entry.cid = CID.parse(entry.hash) } delete entry.hash diff --git a/packages/ipfs-http-client/src/files/mv.js b/packages/ipfs-http-client/src/files/mv.js index c8c4cf7ced..92ab4fe0f9 100644 --- a/packages/ipfs-http-client/src/files/mv.js +++ b/packages/ipfs-http-client/src/files/mv.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -22,7 +21,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: sources.concat(destination).map(src => CID.isCID(src) ? `/ipfs/${src}` : src), + arg: sources.concat(destination), ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/files/stat.js b/packages/ipfs-http-client/src/files/stat.js index da07682204..f5f678590f 100644 --- a/packages/ipfs-http-client/src/files/stat.js +++ b/packages/ipfs-http-client/src/files/stat.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -15,11 +15,13 @@ module.exports = configure(api => { * @type {FilesAPI["stat"]} */ async function stat (path, options = {}) { - if (path && !CID.isCID(path) && typeof path !== 'string') { + if (path && !(path instanceof CID) && typeof path !== 'string') { options = path || {} path = '/' } + options = options || {} + const res = await api.post('files/stat', { timeout: options.timeout, signal: options.signal, @@ -41,7 +43,7 @@ module.exports = configure(api => { * @param {*} entry */ function toCoreInterface (entry) { - entry.cid = new CID(entry.hash) + entry.cid = CID.parse(entry.hash) delete entry.hash return entry } diff --git a/packages/ipfs-http-client/src/files/write.js b/packages/ipfs-http-client/src/files/write.js index f553478280..0f88fe133e 100644 --- a/packages/ipfs-http-client/src/files/write.js +++ b/packages/ipfs-http-client/src/files/write.js @@ -1,7 +1,7 @@ 'use strict' const modeToString = require('../lib/mode-to-string') -const { parseMtime } = require('ipfs-unixfs') +const parseMtime = require('../lib/parse-mtime') const configure = require('../lib/configure') const multipartRequest = require('../lib/multipart-request') const toUrlSearchParams = require('../lib/to-url-search-params') diff --git a/packages/ipfs-http-client/src/get.js b/packages/ipfs-http-client/src/get.js index 5573c85c26..a836285366 100644 --- a/packages/ipfs-http-client/src/get.js +++ b/packages/ipfs-http-client/src/get.js @@ -2,7 +2,7 @@ // @ts-ignore no types const Tar = require('it-tar') -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') const map = require('it-map') @@ -21,7 +21,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: `${path instanceof Uint8Array ? new CID(path) : path}`, + arg: `${path instanceof Uint8Array ? CID.decode(path) : path}`, ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/index.js b/packages/ipfs-http-client/src/index.js index c9c9551b29..562b0a7944 100644 --- a/packages/ipfs-http-client/src/index.js +++ b/packages/ipfs-http-client/src/index.js @@ -1,23 +1,54 @@ 'use strict' /* eslint-env browser */ -const CID = require('cids') +const { CID } = require('multiformats/cid') const { multiaddr } = require('multiaddr') -const multibase = require('multibase') -const multicodec = require('multicodec') -const multihash = require('multihashes') const globSource = require('ipfs-utils/src/files/glob-source') const urlSource = require('ipfs-utils/src/files/url-source') +const Multicodecs = require('ipfs-core-utils/src/multicodecs') +const Multihashes = require('ipfs-core-utils/src/multihashes') +const Multibases = require('ipfs-core-utils/src/multibases') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') +const json = require('multiformats/codecs/json') +const { sha256, sha512 } = require('multiformats/hashes/sha2') +const { identity } = require('multiformats/hashes/identity') +const { base58btc } = require('multiformats/bases/base58') /** * @typedef {import('./types').EndpointConfig} EndpointConfig * @typedef {import('./types').Options} Options + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec */ /** * @param {Options} options */ function create (options = {}) { + /** + * @type {BlockCodec} + */ + const id = { + name: identity.name, + code: identity.code, + encode: (id) => id, + decode: (id) => id + } + + const bases = new Multibases({ + bases: [base58btc].concat(options.ipld && options.ipld.bases ? options.ipld.bases : []), + loadBase: options.ipld && options.ipld.loadBase + }) + const codecs = new Multicodecs({ + codecs: [dagPb, dagCbor, raw, json, id].concat(options.ipld?.codecs || []), + loadCodec: options.ipld && options.ipld.loadCodec + }) + const hashers = new Multihashes({ + hashers: [sha256, sha512, identity].concat(options.ipld && options.ipld.hashers ? options.ipld.hashers : []), + loadHasher: options.ipld && options.ipld.loadHasher + }) + /** @type {import('ipfs-core-types').IPFS & { getEndpointConfig: () => EndpointConfig }} */ const client = { add: require('./add')(options), @@ -28,7 +59,7 @@ function create (options = {}) { cat: require('./cat')(options), commands: require('./commands')(options), config: require('./config')(options), - dag: require('./dag')(options), + dag: require('./dag')(codecs, options), dht: require('./dht')(options), diag: require('./diag')(options), dns: require('./dns')(options), @@ -42,7 +73,7 @@ function create (options = {}) { ls: require('./ls')(options), mount: require('./mount')(options), name: require('./name')(options), - object: require('./object')(options), + object: require('./object')(codecs, options), pin: require('./pin')(options), ping: require('./ping')(options), pubsub: require('./pubsub')(options), @@ -53,7 +84,10 @@ function create (options = {}) { stats: require('./stats')(options), stop: require('./stop')(options), swarm: require('./swarm')(options), - version: require('./version')(options) + version: require('./version')(options), + bases, + codecs, + hashers } return client @@ -63,9 +97,6 @@ module.exports = { create, CID, multiaddr, - multibase, - multicodec, - multihash, globSource, urlSource } diff --git a/packages/ipfs-http-client/src/lib/core.js b/packages/ipfs-http-client/src/lib/core.js index e03592bd9a..fc964f883f 100644 --- a/packages/ipfs-http-client/src/lib/core.js +++ b/packages/ipfs-http-client/src/lib/core.js @@ -5,7 +5,7 @@ const { isBrowser, isWebWorker, isNode } = require('ipfs-utils/src/env') const { default: parseDuration } = require('parse-duration') const log = require('debug')('ipfs-http-client:lib:error-handler') const HTTP = require('ipfs-utils/src/http') -const merge = require('merge-options') +const merge = require('merge-options').bind({ ignoreUndefined: true }) const toUrlString = require('ipfs-core-utils/src/to-url-string') const http = require('http') const https = require('https') diff --git a/packages/ipfs-http-client/src/lib/ipld-formats.js b/packages/ipfs-http-client/src/lib/ipld-formats.js deleted file mode 100644 index 69d47bae0b..0000000000 --- a/packages/ipfs-http-client/src/lib/ipld-formats.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict' - -const dagPB = require('ipld-dag-pb') -const dagCBOR = require('ipld-dag-cbor') -const raw = require('ipld-raw') -const multicodec = require('multicodec') - -/** - * @typedef {import('cids')} CID - * @typedef {import('interface-ipld-format').Format} IPLDFormat - * @typedef {import('multicodec').CodecName} CodecName - * @typedef {import('../types').LoadFormatFn} LoadFormatFn - */ - -/** - * @type {LoadFormatFn} - */ -const noop = (codec) => { - return Promise.reject(new Error(`Missing IPLD format "${codec}"`)) -} - -/** - * Return an object containing supported IPLD Formats - * - * @param {object} [options] - IPLD options passed to the http client constructor - * @param {IPLDFormat[]} [options.formats] - A list of IPLD Formats to use - * @param {LoadFormatFn} [options.loadFormat] - An async function that can load a format when passed a codec name - */ -module.exports = ({ formats = [], loadFormat = noop } = {}) => { - formats = formats || [] - loadFormat = loadFormat || noop - - const configuredFormats = { - [multicodec.DAG_PB]: dagPB, - [multicodec.DAG_CBOR]: dagCBOR, - [multicodec.RAW]: raw - } - - formats.forEach(format => { - configuredFormats[format.codec] = format - }) - - /** - * Attempts to load an IPLD format for the passed CID - * - * @param {CodecName} codec - The code to load the format for - */ - const loadResolver = async (codec) => { - const number = multicodec.getCodeFromName(codec) - const format = configuredFormats[number] || await loadFormat(codec) - - if (!format) { - throw Object.assign( - new Error(`Missing IPLD format "${codec}"`), - { missingMulticodec: codec } - ) - } - - return format - } - - return loadResolver -} diff --git a/packages/ipfs-http-client/src/lib/parse-mtime.js b/packages/ipfs-http-client/src/lib/parse-mtime.js new file mode 100644 index 0000000000..0ea352acba --- /dev/null +++ b/packages/ipfs-http-client/src/lib/parse-mtime.js @@ -0,0 +1,77 @@ +'use strict' + +const errCode = require('err-code') + +/** + * @param {any} input + */ +function parseMtime (input) { + if (input == null) { + return undefined + } + + /** @type {{ secs: number, nsecs?: number } | undefined} */ + let mtime + + // { secs, nsecs } + if (input.secs != null) { + mtime = { + secs: input.secs, + nsecs: input.nsecs + } + } + + // UnixFS TimeSpec + if (input.Seconds != null) { + mtime = { + secs: input.Seconds, + nsecs: input.FractionalNanoseconds + } + } + + // process.hrtime() + if (Array.isArray(input)) { + mtime = { + secs: input[0], + nsecs: input[1] + } + } + + // Javascript Date + if (input instanceof Date) { + const ms = input.getTime() + const secs = Math.floor(ms / 1000) + + mtime = { + secs: secs, + nsecs: (ms - (secs * 1000)) * 1000 + } + } + + /* + TODO: https://github.com/ipfs/aegir/issues/487 + + // process.hrtime.bigint() + if (input instanceof BigInt) { + const secs = input / BigInt(1e9) + const nsecs = input - (secs * BigInt(1e9)) + + mtime = { + secs: parseInt(secs.toString()), + nsecs: parseInt(nsecs.toString()) + } + } + */ + + if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) { + return undefined + } + + if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) { + throw errCode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS') + } + + return mtime +} + +module.exports = parseMtime diff --git a/packages/ipfs-http-client/src/lib/resolve.js b/packages/ipfs-http-client/src/lib/resolve.js new file mode 100644 index 0000000000..8fb5d2294a --- /dev/null +++ b/packages/ipfs-http-client/src/lib/resolve.js @@ -0,0 +1,67 @@ +'use strict' + +const { CID } = require('multiformats/cid') +const errCode = require('err-code') + +/** + * @typedef {import('ipfs-core-types/src/utils').AbortOptions} AbortOptions + */ + +/** + * Retrieves IPLD Nodes along the `path` that is rooted at `cid`. + * + * @param {CID} cid - the CID where the resolving starts + * @param {string} path - the path that should be resolved + * @param {import('ipfs-core-utils/src/multicodecs')} codecs + * @param {(cid: CID, options?: AbortOptions) => Promise} getBlock + * @param {AbortOptions} [options] + */ +const resolve = async function * (cid, path, codecs, getBlock, options) { + /** + * @param {CID} cid + */ + const load = async (cid) => { + const codec = await codecs.getCodec(cid.code) + const block = await getBlock(cid, options) + + return codec.decode(block) + } + + const parts = path.split('/').filter(Boolean) + let value = await load(cid) + let lastCid = cid + + if (!parts.length) { + yield { + value, + remainderPath: '' + } + } + + // End iteration if there isn't a CID to follow any more + while (parts.length) { + const key = parts.shift() + + if (!key) { + throw errCode(new Error(`Could not resolve path "${path}"`), 'ERR_INVALID_PATH') + } + + if (Object.prototype.hasOwnProperty.call(value, key)) { + value = value[key] + + yield { + value, + remainderPath: parts.join('/') + } + } else { + throw errCode(new Error(`no link named "${key}" under ${lastCid}`), 'ERR_NO_LINK') + } + + if (value instanceof CID) { + lastCid = value + value = await load(value) + } + } +} + +module.exports = resolve diff --git a/packages/ipfs-http-client/src/lib/to-url-search-params.js b/packages/ipfs-http-client/src/lib/to-url-search-params.js index 1125959b8a..681e0921d9 100644 --- a/packages/ipfs-http-client/src/lib/to-url-search-params.js +++ b/packages/ipfs-http-client/src/lib/to-url-search-params.js @@ -1,7 +1,7 @@ 'use strict' const modeToString = require('./mode-to-string') -const { parseMtime } = require('ipfs-unixfs') +const parseMtime = require('../lib/parse-mtime') /** * @param {*} params diff --git a/packages/ipfs-http-client/src/ls.js b/packages/ipfs-http-client/src/ls.js index cec925d12b..1e3fd98af4 100644 --- a/packages/ipfs-http-client/src/ls.js +++ b/packages/ipfs-http-client/src/ls.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') const stat = require('./files/stat') @@ -15,7 +15,7 @@ module.exports = configure((api, opts) => { * @type {RootAPI["ls"]} */ async function * ls (path, options = {}) { - const pathStr = `${path instanceof Uint8Array ? new CID(path) : path}` + const pathStr = `${path instanceof Uint8Array ? CID.decode(path) : path}` /** * @param {*} link @@ -29,6 +29,8 @@ module.exports = configure((api, opts) => { const stats = await stat(opts)(ipfsPath) hash = stats.cid + } else { + hash = CID.parse(hash) } /** @type {import('ipfs-core-types/src/root').IPFSEntry} */ @@ -36,7 +38,7 @@ module.exports = configure((api, opts) => { name: link.Name, path: pathStr + (link.Name ? `/${link.Name}` : ''), size: link.Size, - cid: new CID(hash), + cid: hash, type: typeOf(link), depth: link.Depth || 1 } diff --git a/packages/ipfs-http-client/src/object/data.js b/packages/ipfs-http-client/src/object/data.js index 805472c3e9..e651aa31bf 100644 --- a/packages/ipfs-http-client/src/object/data.js +++ b/packages/ipfs-http-client/src/object/data.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -18,7 +18,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + arg: `${cid instanceof Uint8Array ? CID.decode(cid) : cid}`, ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/object/get.js b/packages/ipfs-http-client/src/object/get.js index 0d6a8953d1..562aaad264 100644 --- a/packages/ipfs-http-client/src/object/get.js +++ b/packages/ipfs-http-client/src/object/get.js @@ -1,7 +1,6 @@ 'use strict' -const CID = require('cids') -const { DAGNode, DAGLink } = require('ipld-dag-pb') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -20,7 +19,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + arg: `${cid instanceof Uint8Array ? CID.decode(cid) : cid}`, dataEncoding: 'base64', ...options }), @@ -28,10 +27,14 @@ module.exports = configure(api => { }) const data = await res.json() - return new DAGNode( - uint8ArrayFromString(data.Data, 'base64pad'), - (data.Links || []).map((/** @type {any} */ l) => new DAGLink(l.Name, l.Size, l.Hash)) - ) + return { + Data: uint8ArrayFromString(data.Data, 'base64pad'), + Links: (data.Links || []).map((/** @type {any} */ link) => ({ + Name: link.Name, + Hash: CID.parse(link.Hash), + Tsize: link.Size + })) + } } return get }) diff --git a/packages/ipfs-http-client/src/object/index.js b/packages/ipfs-http-client/src/object/index.js index e0c83027e5..c8c4871ef5 100644 --- a/packages/ipfs-http-client/src/object/index.js +++ b/packages/ipfs-http-client/src/object/index.js @@ -1,14 +1,15 @@ 'use strict' /** + * @param {import('ipfs-core-utils/src/multicodecs')} codecs * @param {import('../types').Options} config */ -module.exports = config => ({ +module.exports = (codecs, config) => ({ data: require('./data')(config), get: require('./get')(config), links: require('./links')(config), new: require('./new')(config), patch: require('./patch')(config), - put: require('./put')(config), + put: require('./put')(codecs, config), stat: require('./stat')(config) }) diff --git a/packages/ipfs-http-client/src/object/links.js b/packages/ipfs-http-client/src/object/links.js index 2582c51c6e..02e230b9aa 100644 --- a/packages/ipfs-http-client/src/object/links.js +++ b/packages/ipfs-http-client/src/object/links.js @@ -1,7 +1,6 @@ 'use strict' -const CID = require('cids') -const { DAGLink } = require('ipld-dag-pb') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -19,14 +18,18 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + arg: `${cid instanceof Uint8Array ? CID.decode(cid) : cid}`, ...options }), headers: options.headers }) const data = await res.json() - return (data.Links || []).map((/** @type {any} */ l) => new DAGLink(l.Name, l.Size, l.Hash)) + return (data.Links || []).map((/** @type {any} */ l) => ({ + Name: l.Name, + Tsize: l.Size, + Hash: CID.parse(l.Hash) + })) } return links }) diff --git a/packages/ipfs-http-client/src/object/new.js b/packages/ipfs-http-client/src/object/new.js index eee675f39b..718688d748 100644 --- a/packages/ipfs-http-client/src/object/new.js +++ b/packages/ipfs-http-client/src/object/new.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -26,7 +26,7 @@ module.exports = configure(api => { const { Hash } = await res.json() - return new CID(Hash) + return CID.parse(Hash) } return newObject }) diff --git a/packages/ipfs-http-client/src/object/patch/add-link.js b/packages/ipfs-http-client/src/object/patch/add-link.js index ef45de5c20..b9d20626d8 100644 --- a/packages/ipfs-http-client/src/object/patch/add-link.js +++ b/packages/ipfs-http-client/src/object/patch/add-link.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') @@ -19,7 +19,7 @@ module.exports = configure(api => { signal: options.signal, searchParams: toUrlSearchParams({ arg: [ - `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + `${cid}`, // @ts-ignore loose types dLink.Name || dLink.name || '', // @ts-ignore loose types @@ -32,7 +32,8 @@ module.exports = configure(api => { const { Hash } = await res.json() - return new CID(Hash) + return CID.parse(Hash) } + return addLink }) diff --git a/packages/ipfs-http-client/src/object/patch/append-data.js b/packages/ipfs-http-client/src/object/patch/append-data.js index ee6deb8953..6eab020370 100644 --- a/packages/ipfs-http-client/src/object/patch/append-data.js +++ b/packages/ipfs-http-client/src/object/patch/append-data.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const multipartRequest = require('../../lib/multipart-request') const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') @@ -26,7 +26,7 @@ module.exports = configure(api => { timeout: options.timeout, signal, searchParams: toUrlSearchParams({ - arg: `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + arg: `${cid}`, ...options }), ...( @@ -36,7 +36,7 @@ module.exports = configure(api => { const { Hash } = await res.json() - return new CID(Hash) + return CID.parse(Hash) } return appendData }) diff --git a/packages/ipfs-http-client/src/object/patch/rm-link.js b/packages/ipfs-http-client/src/object/patch/rm-link.js index 8881e577ee..4c6faa26fc 100644 --- a/packages/ipfs-http-client/src/object/patch/rm-link.js +++ b/packages/ipfs-http-client/src/object/patch/rm-link.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') @@ -19,7 +19,7 @@ module.exports = configure(api => { signal: options.signal, searchParams: toUrlSearchParams({ arg: [ - `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + `${cid}`, // @ts-ignore loose types dLink.Name || dLink.name || null ], @@ -30,7 +30,7 @@ module.exports = configure(api => { const { Hash } = await res.json() - return new CID(Hash) + return CID.parse(Hash) } return rmLink }) diff --git a/packages/ipfs-http-client/src/object/patch/set-data.js b/packages/ipfs-http-client/src/object/patch/set-data.js index 06b4f21193..b9fa089274 100644 --- a/packages/ipfs-http-client/src/object/patch/set-data.js +++ b/packages/ipfs-http-client/src/object/patch/set-data.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const multipartRequest = require('../../lib/multipart-request') const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') @@ -22,21 +22,23 @@ module.exports = configure(api => { const signal = abortSignal(controller.signal, options.signal) // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 - const { Hash } = await (await api.post('object/patch/set-data', { + const res = await api.post('object/patch/set-data', { timeout: options.timeout, signal, searchParams: toUrlSearchParams({ arg: [ - `${cid instanceof Uint8Array ? new CID(cid) : cid}` + `${cid}` ], ...options }), ...( await multipartRequest(data, controller, options.headers) ) - })).json() + }) - return new CID(Hash) + const { Hash } = await res.json() + + return CID.parse(Hash) } return setData }) diff --git a/packages/ipfs-http-client/src/object/put.js b/packages/ipfs-http-client/src/object/put.js index 81974122bf..8f3297927c 100644 --- a/packages/ipfs-http-client/src/object/put.js +++ b/packages/ipfs-http-client/src/object/put.js @@ -1,89 +1,33 @@ 'use strict' -const CID = require('cids') -const { DAGNode } = require('ipld-dag-pb') -const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') -const toUrlSearchParams = require('../lib/to-url-search-params') -const abortSignal = require('../lib/abort-signal') -const { AbortController } = require('native-abort-controller') -const uint8ArrayToString = require('uint8arrays/to-string') -const uint8ArrayFromString = require('uint8arrays/from-string') /** * @typedef {import('../types').HTTPClientExtraOptions} HTTPClientExtraOptions * @typedef {import('ipfs-core-types/src/object').API} ObjectAPI */ -module.exports = configure(api => { - /** - * @type {ObjectAPI["put"]} - */ - async function put (obj, options = {}) { - let tmpObj = { - /** @type {string | undefined} */ - Data: undefined, - /** @type {{ Name: string, Hash: string, Size: number }[]} */ - Links: [] - } - - if (obj instanceof Uint8Array) { - if (!options.enc) { - tmpObj = { - // FIXME: this will corrupt data for byte values over 127 - Data: uint8ArrayToString(obj), - Links: [] - } - } - } else if (obj instanceof DAGNode) { - tmpObj = { - // FIXME: this will corrupt data for byte values over 127 - Data: uint8ArrayToString(obj.Data), - Links: obj.Links.map(l => ({ - Name: l.Name, - Hash: l.Hash.toString(), - Size: l.Tsize - })) - } - } else if (typeof obj === 'object') { - // FIXME: this will corrupt data for for byte values over 127 - if (obj.Data) { - tmpObj.Data = uint8ArrayToString(obj.Data) - } - - if (obj.Links) { - // @ts-ignore Size is Tsize - tmpObj.Links = obj.Links - } - } else { - throw new Error('obj not recognized') - } - - let buf - if (obj instanceof Uint8Array && options.enc) { - buf = obj - } else { - options.enc = 'json' - buf = uint8ArrayFromString(JSON.stringify(tmpObj)) +/** + * @param {import('ipfs-core-utils/src/multicodecs')} codecs + * @param {import('../types').Options} options + */ +module.exports = (codecs, options) => { + const fn = configure((api) => { + const dagPut = require('../dag/put')(codecs, options) + + /** + * @type {ObjectAPI["put"]} + */ + async function put (obj, options = {}) { + return dagPut(obj, { + ...options, + format: 'dag-pb', + hashAlg: 'sha2-256', + version: 0 + }) } + return put + }) - // allow aborting requests on body errors - const controller = new AbortController() - const signal = abortSignal(controller.signal, options.signal) - - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 - const res = await api.post('object/put', { - timeout: options.timeout, - signal, - searchParams: toUrlSearchParams(options), - ...( - await multipartRequest(buf, controller, options.headers) - ) - }) - - const { Hash } = await res.json() - - return new CID(Hash) - } - return put -}) + return fn(options) +} diff --git a/packages/ipfs-http-client/src/object/stat.js b/packages/ipfs-http-client/src/object/stat.js index 70307ba597..4d7077a1f7 100644 --- a/packages/ipfs-http-client/src/object/stat.js +++ b/packages/ipfs-http-client/src/object/stat.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -18,13 +18,18 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: `${cid instanceof Uint8Array ? new CID(cid) : cid}`, + arg: `${cid}`, ...options }), headers: options.headers }) - return res.json() + const output = await res.json() + + return { + ...output, + Hash: CID.parse(output.Hash) + } } return stat }) diff --git a/packages/ipfs-http-client/src/pin/add-all.js b/packages/ipfs-http-client/src/pin/add-all.js index fbfa2317b5..bf0a4e2901 100644 --- a/packages/ipfs-http-client/src/pin/add-all.js +++ b/packages/ipfs-http-client/src/pin/add-all.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -32,12 +32,12 @@ module.exports = configure(api => { for await (const pin of res.ndjson()) { if (pin.Pins) { // non-streaming response for (const cid of pin.Pins) { - yield new CID(cid) + yield CID.parse(cid) } continue } - yield new CID(pin) + yield CID.parse(pin) } } } diff --git a/packages/ipfs-http-client/src/pin/ls.js b/packages/ipfs-http-client/src/pin/ls.js index 07850063ce..859f1499dd 100644 --- a/packages/ipfs-http-client/src/pin/ls.js +++ b/packages/ipfs-http-client/src/pin/ls.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -18,7 +18,7 @@ function toPin (type, cid, metadata) { /** @type {import('ipfs-core-types/src/pin').LsResult} */ const pin = { type, - cid: new CID(cid) + cid: CID.parse(cid) } if (metadata) { diff --git a/packages/ipfs-http-client/src/pin/remote/index.js b/packages/ipfs-http-client/src/pin/remote/index.js index aa8ffd66d4..29db834840 100644 --- a/packages/ipfs-http-client/src/pin/remote/index.js +++ b/packages/ipfs-http-client/src/pin/remote/index.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const Client = require('../../lib/core') const Service = require('./service') const toUrlSearchParams = require('../../lib/to-url-search-params') @@ -96,7 +96,7 @@ Remote.prototype.rmAll = async function ({ timeout, signal, headers, ...query }) */ const decodePin = ({ Name: name, Status: status, Cid: cid }) => { return { - cid: new CID(cid), + cid: CID.parse(cid), name, status } @@ -119,10 +119,10 @@ const encodeService = (service) => { * @returns {string} */ const encodeCID = (cid) => { - if (CID.isCID(cid)) { + if (cid instanceof CID) { return cid.toString() } else { - throw new TypeError(`CID instance expected instead of ${cid}`) + throw new TypeError(`CID instance expected instead of ${typeof cid}`) } } diff --git a/packages/ipfs-http-client/src/pin/remote/service.js b/packages/ipfs-http-client/src/pin/remote/service.js index f5cc658613..a6892955e9 100644 --- a/packages/ipfs-http-client/src/pin/remote/service.js +++ b/packages/ipfs-http-client/src/pin/remote/service.js @@ -11,6 +11,7 @@ const toUrlSearchParams = require('../../lib/to-url-search-params') * @typedef {import('ipfs-core-types/src/pin/remote/service').RemotePinServiceWithStat} RemotePinServiceWithStat * @typedef {import('../../types').HTTPClientExtraOptions} HTTPClientExtraOptions * @typedef {import('ipfs-core-types/src/pin/remote/service').API} RemotePiningServiceAPI + * @typedef {import('ipfs-core-types/src/pin/remote/service').Stat} Stat */ class Service { /** @@ -120,8 +121,7 @@ Service.prototype.ls = async function ls (options = {}) { /** @type {{RemoteServices: Object[]}} */ const { RemoteServices } = await response.json() - /** @type {Stat extends true ? RemotePinServiceWithStat[] : RemotePinService []} */ - return (RemoteServices.map(Service.decodeRemoteService)) + return RemoteServices.map(Service.decodeRemoteService) } module.exports = Service diff --git a/packages/ipfs-http-client/src/pin/rm-all.js b/packages/ipfs-http-client/src/pin/rm-all.js index d4d0ca65b7..98a61dc31c 100644 --- a/packages/ipfs-http-client/src/pin/rm-all.js +++ b/packages/ipfs-http-client/src/pin/rm-all.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -34,10 +34,10 @@ module.exports = configure(api => { for await (const pin of res.ndjson()) { if (pin.Pins) { // non-streaming response - yield * pin.Pins.map((/** @type {string} */ cid) => new CID(cid)) + yield * pin.Pins.map((/** @type {string} */ cid) => CID.parse(cid)) continue } - yield new CID(pin) + yield CID.parse(pin) } } } diff --git a/packages/ipfs-http-client/src/refs/index.js b/packages/ipfs-http-client/src/refs/index.js index cdab0085b2..6f610d7fd1 100644 --- a/packages/ipfs-http-client/src/refs/index.js +++ b/packages/ipfs-http-client/src/refs/index.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const toCamel = require('../lib/object-to-camel') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -15,15 +15,14 @@ module.exports = configure((api, opts) => { * @type {RefsAPI["refs"]} */ const refs = async function * (args, options = {}) { - if (!Array.isArray(args)) { - args = [args] - } + /** @type {import('ipfs-core-types/src/utils').IPFSPath[]} */ + const argsArr = Array.isArray(args) ? args : [args] const res = await api.post('refs', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: args.map(arg => `${arg instanceof Uint8Array ? new CID(arg) : arg}`), + arg: argsArr.map(arg => `${arg instanceof Uint8Array ? CID.decode(arg) : arg}`), ...options }), headers: options.headers, diff --git a/packages/ipfs-http-client/src/repo/gc.js b/packages/ipfs-http-client/src/repo/gc.js index a1199f21f6..49f4805069 100644 --- a/packages/ipfs-http-client/src/repo/gc.js +++ b/packages/ipfs-http-client/src/repo/gc.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') @@ -22,7 +22,7 @@ module.exports = configure(api => { transform: (res) => { return { err: res.Error ? new Error(res.Error) : null, - cid: (res.Key || {})['/'] ? new CID(res.Key['/']) : null + cid: (res.Key || {})['/'] ? CID.parse(res.Key['/']) : null } } }) diff --git a/packages/ipfs-http-client/src/types.d.ts b/packages/ipfs-http-client/src/types.d.ts index 0579c49520..f2403a6f73 100644 --- a/packages/ipfs-http-client/src/types.d.ts +++ b/packages/ipfs-http-client/src/types.d.ts @@ -1,10 +1,7 @@ -import { Format as IPLDFormat } from 'interface-ipld-format' import { Agent as HttpAgent } from 'http' import { Agent as HttpsAgent } from 'https' import { Multiaddr } from 'multiaddr' -import { CodecName } from 'multicodec' - -export type LoadFormatFn = (name: CodecName) => Promise> +import type { BlockCodec } from 'multiformats/codecs/interface' export interface Options { host?: string @@ -18,9 +15,17 @@ export interface Options { agent?: HttpAgent | HttpsAgent } +export interface LoadBaseFn { (codeOrName: number | string): Promise> } +export interface LoadCodecFn { (codeOrName: number | string): Promise> } +export interface LoadHasherFn { (codeOrName: number | string): Promise } + export interface IPLDOptions { - formats?: IPLDFormat[] - loadFormat?: LoadFormatFn + loadBase: LoadBaseFn + loadCodec: LoadCodecFn + loadHasher: LoadHasherFn + bases: Array> + codecs: Array> + hashers: MultihashHasher[] } export interface HTTPClientExtraOptions { @@ -29,9 +34,9 @@ export interface HTTPClientExtraOptions { } export interface EndpointConfig { - host: string, - port: string, - protocol: string, + host: string + port: string + protocol: string pathname: string 'api-path': string } diff --git a/packages/ipfs-http-client/test/dag.spec.js b/packages/ipfs-http-client/test/dag.spec.js index 34cbaf4970..40ba0fceea 100644 --- a/packages/ipfs-http-client/test/dag.spec.js +++ b/packages/ipfs-http-client/test/dag.spec.js @@ -5,9 +5,11 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { expect } = require('aegir/utils/chai') -const ipldDagPb = require('ipld-dag-pb') -const { DAGNode } = ipldDagPb -const CID = require('cids') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const raw = require('multiformats/codecs/raw') +const { base58btc } = require('multiformats/bases/base58') +const { base32 } = require('multiformats/bases/base32') const f = require('./utils/factory')() const ipfsHttpClient = require('../src') @@ -23,13 +25,14 @@ describe('.dag', function () { it('should be able to put and get a DAG node with format dag-pb', async () => { const data = uint8ArrayFromString('some data') - const node = new DAGNode(data) + const node = { + Data: data, + Links: [] + } - let cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }) - cid = cid.toV0() - expect(cid.codec).to.equal('dag-pb') - cid = cid.toBaseEncodedString('base58btc') - expect(cid).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + const cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256', cidVersion: 0 }) + expect(cid.code).to.equal(dagPb.code) + expect(cid.toString(base58btc)).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') const result = await ipfs.dag.get(cid) @@ -38,11 +41,10 @@ describe('.dag', function () { it('should be able to put and get a DAG node with format dag-cbor', async () => { const cbor = { foo: 'dag-cbor-bar' } - let cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - expect(cid.codec).to.equal('dag-cbor') - cid = cid.toBaseEncodedString('base32') - expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') + expect(cid.code).to.equal(dagCbor.code) + expect(cid.toString(base32)).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') const result = await ipfs.dag.get(cid) @@ -51,11 +53,10 @@ describe('.dag', function () { it('should be able to put and get a DAG node with format raw', async () => { const node = uint8ArrayFromString('some data') - let cid = await ipfs.dag.put(node, { format: 'raw', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(node, { format: 'raw', hashAlg: 'sha2-256' }) - expect(cid.codec).to.equal('raw') - cid = cid.toBaseEncodedString('base32') - expect(cid).to.equal('bafkreiata6mq425fzikf5m26temcvg7mizjrxrkn35swuybmpah2ajan5y') + expect(cid.code).to.equal(raw.code) + expect(cid.toString(base32)).to.equal('bafkreiata6mq425fzikf5m26temcvg7mizjrxrkn35swuybmpah2ajan5y') const result = await ipfs.dag.get(cid) @@ -63,17 +64,17 @@ describe('.dag', function () { }) it('should error when missing DAG resolver for multicodec from requested CID', async () => { - const block = await ipfs.block.put(Uint8Array.from([0, 1, 2, 3]), { - cid: new CID('z8mWaJ1dZ9fH5EetPuRsj8jj26pXsgpsr') + const cid = await ipfs.block.put(Uint8Array.from([0, 1, 2, 3]), { + format: 'git-raw' }) - await expect(ipfs.dag.get(block.cid)).to.eventually.be.rejectedWith('Missing IPLD format "git-raw"') + await expect(ipfs.dag.get(cid)).to.eventually.be.rejectedWith(/No codec found/) }) it('should error when putting node with esoteric format', () => { const node = uint8ArrayFromString('some data') - return expect(ipfs.dag.put(node, { format: 'git-raw', hashAlg: 'sha2-256' })).to.eventually.be.rejectedWith(/Missing IPLD format/) + return expect(ipfs.dag.put(node, { format: 'git-raw', hashAlg: 'sha2-256' })).to.eventually.be.rejectedWith(/No codec found/) }) it('should attempt to load an unsupported format', async () => { @@ -81,12 +82,10 @@ describe('.dag', function () { const ipfs2 = ipfsHttpClient.create({ url: `http://${ipfs.apiHost}:${ipfs.apiPort}`, ipld: { - loadFormat: (format) => { + loadCodec: (format) => { askedToLoadFormat = format === 'git-raw' return { - util: { - serialize: (buf) => buf - } + encode: (buf) => buf } } } @@ -104,9 +103,12 @@ describe('.dag', function () { const ipfs2 = ipfsHttpClient.create({ url: `http://${ipfs.apiHost}:${ipfs.apiPort}`, ipld: { - formats: [ - ipldDagPb - ] + codecs: [{ + name: 'custom-codec', + code: 1337, + encode: (thing) => thing, + decode: (thing) => thing + }] } }) @@ -118,7 +120,10 @@ describe('.dag', function () { hashAlg: 'sha2-256' }) - const dagPbNode = new DAGNode(new Uint8Array(0), [], 0) + const dagPbNode = { + Data: new Uint8Array(0), + Links: [] + } const cid2 = await ipfs2.dag.put(dagPbNode, { format: 'dag-pb', hashAlg: 'sha2-256' diff --git a/packages/ipfs-http-client/test/exports.spec.js b/packages/ipfs-http-client/test/exports.spec.js index b13008847e..104133d9ab 100644 --- a/packages/ipfs-http-client/test/exports.spec.js +++ b/packages/ipfs-http-client/test/exports.spec.js @@ -1,11 +1,8 @@ /* eslint-env mocha, browser */ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const { multiaddr } = require('multiaddr') -const multibase = require('multibase') -const multicodec = require('multicodec') -const multihash = require('multihashes') const { expect } = require('aegir/utils/chai') const IpfsHttpClient = require('../') @@ -14,8 +11,5 @@ describe('exports', () => { it('should export the expected types and utilities', () => { expect(IpfsHttpClient.CID).to.equal(CID) expect(IpfsHttpClient.multiaddr).to.equal(multiaddr) - expect(IpfsHttpClient.multibase).to.equal(multibase) - expect(IpfsHttpClient.multicodec).to.equal(multicodec) - expect(IpfsHttpClient.multihash).to.equal(multihash) }) }) diff --git a/packages/ipfs-http-client/test/files.spec.js b/packages/ipfs-http-client/test/files.spec.js index 10e2d1c844..2f4056f30d 100644 --- a/packages/ipfs-http-client/test/files.spec.js +++ b/packages/ipfs-http-client/test/files.spec.js @@ -5,6 +5,7 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { expect } = require('aegir/utils/chai') const f = require('./utils/factory')() +const dagPb = require('@ipld/dag-pb') describe('.add', function () { this.timeout(20 * 1000) @@ -32,7 +33,7 @@ describe('.add', function () { expect(result).to.have.property('cid') const { cid } = result - expect(cid).to.have.property('codec', 'dag-pb') + expect(cid).to.have.property('code', dagPb.code) expect(cid.toString()).to.equal('QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS') }) }) diff --git a/packages/ipfs-http-client/test/utils/factory.js b/packages/ipfs-http-client/test/utils/factory.js index 3a0bd3961b..a3d87237bc 100644 --- a/packages/ipfs-http-client/test/utils/factory.js +++ b/packages/ipfs-http-client/test/utils/factory.js @@ -2,7 +2,7 @@ // @ts-ignore no types const { createFactory } = require('ipfsd-ctl') -const merge = require('merge-options') +const merge = require('merge-options').bind({ ignoreUndefined: true }) const { isNode } = require('ipfs-utils/src/env') const commonOptions = { diff --git a/packages/ipfs-http-gateway/package.json b/packages/ipfs-http-gateway/package.json index 6b92348079..bcc294f06c 100644 --- a/packages/ipfs-http-gateway/package.json +++ b/packages/ipfs-http-gateway/package.json @@ -44,26 +44,24 @@ "@hapi/ammo": "^5.0.1", "@hapi/boom": "^9.1.0", "@hapi/hapi": "^20.0.0", - "cids": "^1.1.6", "debug": "^4.1.1", "hapi-pino": "^8.3.0", "ipfs-core-types": "^0.5.2", - "ipfs-core-utils": "^0.8.3", - "ipfs-http-response": "^0.6.0", - "is-ipfs": "^5.0.0", + "ipfs-http-response": "^0.7.0", + "is-ipfs": "^6.0.1", "it-last": "^1.0.4", "it-to-stream": "^1.0.0", "joi": "^17.2.1", - "multibase": "^4.0.2", - "uint8arrays": "^2.1.3", - "uri-to-multiaddr": "^5.0.0" + "multiformats": "^9.4.1", + "uint8arrays": "^2.1.6", + "uri-to-multiaddr": "^6.0.0" }, "devDependencies": { "@types/hapi-pino": "^8.0.1", "@types/hapi__hapi": "^20.0.5", - "aegir": "^33.0.0", + "aegir": "^34.0.2", "file-type": "^16.0.0", "rimraf": "^3.0.2", - "sinon": "^10.0.1" + "sinon": "^11.1.1" } } diff --git a/packages/ipfs-http-gateway/src/index.js b/packages/ipfs-http-gateway/src/index.js index e3a0229a12..00511b3f48 100644 --- a/packages/ipfs-http-gateway/src/index.js +++ b/packages/ipfs-http-gateway/src/index.js @@ -77,7 +77,6 @@ class HttpGateway { this._gatewayServers = await serverCreator(gatewayAddrs, this._createGatewayServer, ipfs) this._log('started') - return this } /** diff --git a/packages/ipfs-http-gateway/src/resources/gateway.js b/packages/ipfs-http-gateway/src/resources/gateway.js index e33a41bb4a..f56f32492f 100644 --- a/packages/ipfs-http-gateway/src/resources/gateway.js +++ b/packages/ipfs-http-gateway/src/resources/gateway.js @@ -2,11 +2,11 @@ const debug = require('debug') const uint8ArrayFromString = require('uint8arrays/from-string') -const uint8ArrayToString = require('uint8arrays/to-string') const Boom = require('@hapi/boom') const Ammo = require('@hapi/ammo') // HTTP Range processing utilities const last = require('it-last') -const multibase = require('multibase') +const { CID } = require('multiformats/cid') +const { base32 } = require('multiformats/bases/base32') // @ts-ignore no types const { resolver } = require('ipfs-http-response') // @ts-ignore no types @@ -15,7 +15,6 @@ const isIPFS = require('is-ipfs') // @ts-ignore no types const toStream = require('it-to-stream') const PathUtils = require('../utils/path') -const { cidToString } = require('ipfs-core-utils/src/cid') const log = Object.assign(debug('ipfs:http-gateway'), { error: debug('ipfs:http-gateway:error') @@ -206,15 +205,15 @@ module.exports = { response.header('Last-Modified', 'Thu, 01 Jan 1970 00:00:01 GMT') // Suborigin for /ipfs/: https://github.com/ipfs/in-web-browsers/issues/66 const rootCid = path.split('/')[2] - const ipfsOrigin = cidToString(rootCid, { base: 'base32' }) + const ipfsOrigin = CID.parse(rootCid).toV1().toString(base32) response.header('Suborigin', `ipfs000${ipfsOrigin}`) } else if (path.startsWith('/ipns/')) { // Suborigin for /ipns/: https://github.com/ipfs/in-web-browsers/issues/66 const root = path.split('/')[2] // encode CID/FQDN in base32 (Suborigin allows only a-z) const ipnsOrigin = isIPFS.cid(root) - ? cidToString(root, { base: 'base32' }) - : uint8ArrayToString(multibase.encode('base32', uint8ArrayFromString(root))) + ? CID.parse(root).toV1().toString(base32) + : base32.encode(uint8ArrayFromString(root)) response.header('Suborigin', `ipns000${ipnsOrigin}`) } } diff --git a/packages/ipfs-http-gateway/test/routes.spec.js b/packages/ipfs-http-gateway/test/routes.spec.js index 5eae5547e3..3b02002be4 100644 --- a/packages/ipfs-http-gateway/test/routes.spec.js +++ b/packages/ipfs-http-gateway/test/routes.spec.js @@ -5,7 +5,8 @@ const { expect } = require('aegir/utils/chai') const uint8ArrayFromString = require('uint8arrays/from-string') const FileType = require('file-type') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base32 } = require('multiformats/bases/base32') const http = require('./utils/http') const sinon = require('sinon') const fs = require('fs') @@ -60,7 +61,7 @@ describe('HTTP Gateway', function () { }) it('returns 400 for service worker registration outside of an IPFS content root', async () => { - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, type: 'file' @@ -78,7 +79,7 @@ describe('HTTP Gateway', function () { }) it('valid CIDv0', async () => { - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString('hello world\n') ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -140,7 +141,7 @@ describe('HTTP Gateway', function () { */ it('return 304 Not Modified if client announces cached CID in If-None-Match', async () => { - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString('hello world\n') ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -178,7 +179,7 @@ describe('HTTP Gateway', function () { }) it('return 304 Not Modified if /ipfs/ was requested with any If-Modified-Since', async () => { - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString('hello world\n') ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -216,7 +217,7 @@ describe('HTTP Gateway', function () { }) it('return proper Content-Disposition if ?filename=foo is included in URL', async () => { - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString('hello world\n') ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -238,7 +239,7 @@ describe('HTTP Gateway', function () { }) it('load a big file (15MB)', async () => { - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString(new Array(15000000).fill('0').join('')) ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -268,7 +269,7 @@ describe('HTTP Gateway', function () { // use 12 byte text file to make it easier to debug ;-) const fileLength = 12 const range = { from: 1, length: 11 } - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString(new Array(fileLength).fill('0').join('')) ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -328,7 +329,7 @@ describe('HTTP Gateway', function () { // use 12 byte text file to make it easier to debug ;-) const fileLength = 12 const range = { from: 1, to: 3, length: 3 } - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString(new Array(fileLength).fill('0').join('')) ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -391,7 +392,7 @@ describe('HTTP Gateway', function () { // use 12 byte text file to make it easier to debug ;-) const fileLength = 12 const range = { tail: 7, from: 5, to: 11, length: 7 } - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString(new Array(fileLength).fill('0').join('')) ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -453,7 +454,7 @@ describe('HTTP Gateway', function () { // use 12 byte text file to make it easier to debug ;-) const fileLength = 12 - const cid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const cid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = uint8ArrayFromString(new Array(fileLength).fill('0').join('')) ipfs.files.stat.withArgs(`/ipfs/${cid}`).resolves({ cid, @@ -476,8 +477,8 @@ describe('HTTP Gateway', function () { }) it('load a jpg file', async () => { - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = fs.readFileSync('test/fixtures/cat-folder/cat.jpg') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/cat.jpg`).resolves({ cid: fileCid, @@ -506,7 +507,7 @@ describe('HTTP Gateway', function () { expect(res.headers['cache-control']).to.equal('public, max-age=29030400, immutable') expect(res.headers['last-modified']).to.equal('Thu, 01 Jan 1970 00:00:01 GMT') expect(res.headers.etag).to.equal(`"${fileCid}"`) - expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString(base32)}`) const fileSignature = await FileType.fromBuffer(res.rawPayload) expect(fileSignature.mime).to.equal('image/jpeg') @@ -514,8 +515,8 @@ describe('HTTP Gateway', function () { }) it('load a svg file (unsniffable)', async () => { - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = fs.readFileSync('test/fixtures/unsniffable-folder/hexagons.svg') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/hexagons.svg`).resolves({ cid: fileCid, @@ -541,8 +542,8 @@ describe('HTTP Gateway', function () { }) it('load a svg file with xml leading declaration (unsniffable)', async () => { - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') const content = fs.readFileSync('test/fixtures/unsniffable-folder/hexagons-xml.svg') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/hexagons-xml.svg`).resolves({ cid: fileCid, @@ -568,7 +569,7 @@ describe('HTTP Gateway', function () { }) it('load a directory', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/`).resolves({ cid: dirCid, type: 'directory' @@ -597,7 +598,7 @@ describe('HTTP Gateway', function () { expect(res.headers['last-modified']).to.equal('Thu, 01 Jan 1970 00:00:01 GMT') expect(res.headers['content-length']).to.equal(res.rawPayload.length) expect(res.headers.etag).to.equal(undefined) - expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString(base32)}`) // check if the cat picture is in the payload as a way to check // if this is an index of this directory @@ -606,8 +607,8 @@ describe('HTTP Gateway', function () { }) it('load a webpage index.html', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') const content = fs.readFileSync('test/fixtures/index.html') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/index.html`).resolves({ cid: fileCid, @@ -635,13 +636,13 @@ describe('HTTP Gateway', function () { expect(res.headers['last-modified']).to.equal('Thu, 01 Jan 1970 00:00:01 GMT') expect(res.headers['content-length']).to.equal(res.rawPayload.length.toString()) expect(res.headers.etag).to.equal(`"${fileCid}"`) - expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString(base32)}`) expect(res.rawPayload).to.deep.equal(content) }) it('load a webpage {hash}/nested-folder/nested.html', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') const content = fs.readFileSync('test/fixtures/index.html') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/nested-folder/nested.html`).resolves({ cid: fileCid, @@ -669,12 +670,12 @@ describe('HTTP Gateway', function () { expect(res.headers['last-modified']).to.equal('Thu, 01 Jan 1970 00:00:01 GMT') expect(res.headers['content-length']).to.equal(res.rawPayload.length.toString()) expect(res.headers.etag).to.equal(`"${fileCid}"`) - expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString(base32)}`) expect(res.rawPayload).to.deep.equal(content) }) it('redirects to generated index', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') ipfs.files.stat.withArgs(`/ipfs/${dirCid}`).resolves({ cid: dirCid, type: 'directory' @@ -699,8 +700,8 @@ describe('HTTP Gateway', function () { }) it('redirect to a directory with index.html', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') ipfs.files.stat.withArgs(`/ipfs/${dirCid}`).resolves({ cid: dirCid, type: 'directory' @@ -722,8 +723,8 @@ describe('HTTP Gateway', function () { }) it('load a directory with index.html', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') const content = fs.readFileSync('test/fixtures/index.html') ipfs.files.stat.withArgs(`/ipfs/${dirCid}/`).resolves({ cid: dirCid, @@ -761,13 +762,13 @@ describe('HTTP Gateway', function () { expect(res.headers['last-modified']).to.equal('Thu, 01 Jan 1970 00:00:01 GMT') expect(res.headers['content-length']).to.equal(res.rawPayload.length.toString()) expect(res.headers.etag).to.equal(`"${fileCid}"`) - expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString(base32)}`) expect(res.rawPayload).to.deep.equal(content) }) it('test(gateway): load from URI-encoded path', async () => { - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') // non-ascii characters will be URI-encoded by the browser const utf8path = `/ipfs/${dirCid}/cat-with-óąśśł-and-أعظم._.jpg` const escapedPath = encodeURI(utf8path) // this is what will be actually requested @@ -798,13 +799,13 @@ describe('HTTP Gateway', function () { expect(res.headers['last-modified']).to.equal('Thu, 01 Jan 1970 00:00:01 GMT') expect(res.headers['content-length']).to.equal(res.rawPayload.length.toString()) expect(res.headers.etag).to.equal(`"${fileCid}"`) - expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipfs000${dirCid.toV1().toString(base32)}`) }) it('load a file from IPNS', async () => { const id = 'Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7A' const ipnsPath = `/ipns/${id}/cat.jpg` - const fileCid = new CID('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') + const fileCid = CID.parse('Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7u') const content = fs.readFileSync('test/fixtures/cat-folder/cat.jpg') ipfs.name.resolve.withArgs(ipnsPath).returns([`/ipfs/${fileCid}`]) @@ -835,7 +836,7 @@ describe('HTTP Gateway', function () { expect(res.headers['cache-control']).to.equal('no-cache') // TODO: should be record TTL expect(res.headers['last-modified']).to.equal(undefined) expect(res.headers.etag).to.equal(`"${fileCid}"`) - expect(res.headers.suborigin).to.equal(`ipns000${new CID(id).toV1().toBaseEncodedString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipns000${CID.parse(id).toV1().toString()}`) const fileSignature = await FileType.fromBuffer(res.rawPayload) expect(fileSignature.mime).to.equal('image/jpeg') @@ -845,7 +846,7 @@ describe('HTTP Gateway', function () { it('load a directory from IPNS', async () => { const id = 'Qmd286K6pohQcTKYqnS1YhWrCiS4gz7Xi34sdwMe9USZ7A' const ipnsPath = `/ipns/${id}/` - const dirCid = new CID('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + const dirCid = CID.parse('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') ipfs.name.resolve.withArgs(ipnsPath).returns([`/ipfs/${dirCid}`]) ipfs.files.stat.withArgs(`/ipfs/${dirCid}`).resolves({ cid: dirCid, @@ -875,7 +876,7 @@ describe('HTTP Gateway', function () { expect(res.headers['last-modified']).to.equal(undefined) expect(res.headers['content-length']).to.equal(res.rawPayload.length) expect(res.headers.etag).to.equal(undefined) - expect(res.headers.suborigin).to.equal(`ipns000${new CID(id).toV1().toBaseEncodedString('base32')}`) + expect(res.headers.suborigin).to.equal(`ipns000${CID.parse(id).toV1().toString()}`) // check if the cat picture is in the payload as a way to check // if this is an index of this directory diff --git a/packages/ipfs-http-gateway/tsconfig.json b/packages/ipfs-http-gateway/tsconfig.json index 35715d452d..03031d1601 100644 --- a/packages/ipfs-http-gateway/tsconfig.json +++ b/packages/ipfs-http-gateway/tsconfig.json @@ -8,10 +8,7 @@ ], "references": [ { - "path": "../ipfs-core" - }, - { - "path": "../ipfs-core-utils" + "path": "../ipfs-core-types" } ] } diff --git a/packages/ipfs-http-server/package.json b/packages/ipfs-http-server/package.json index 82ee76ccbd..9459b93e46 100644 --- a/packages/ipfs-http-server/package.json +++ b/packages/ipfs-http-server/package.json @@ -33,8 +33,8 @@ "@hapi/boom": "^9.1.0", "@hapi/content": "^5.0.2", "@hapi/hapi": "^20.0.0", + "@ipld/dag-pb": "^2.1.3", "abort-controller": "^3.0.0", - "cids": "^1.1.6", "debug": "^4.1.1", "dlv": "^1.1.3", "err-code": "^3.0.1", @@ -42,9 +42,7 @@ "ipfs-core-types": "^0.5.2", "ipfs-core-utils": "^0.8.3", "ipfs-http-gateway": "^0.4.3", - "ipfs-unixfs": "^4.0.3", - "ipld-block": "^0.11.1", - "ipld-dag-pb": "^0.22.1", + "ipfs-unixfs": "^5.0.0", "it-all": "^1.0.4", "it-drain": "^1.0.3", "it-filter": "^1.0.2", @@ -59,27 +57,25 @@ "it-tar": "^3.0.0", "joi": "^17.2.1", "just-safe-set": "^2.2.1", - "multiaddr": "^9.0.1", - "multibase": "^4.0.2", - "multicodec": "^3.0.1", - "multihashing-async": "^2.1.2", + "multiaddr": "^10.0.0", + "multiformats": "^9.4.1", "native-abort-controller": "^1.0.3", "parse-duration": "^1.0.0", "stream-to-it": "^0.2.2", - "uint8arrays": "^2.1.3", - "uri-to-multiaddr": "^5.0.0" + "uint8arrays": "^2.1.6", + "uri-to-multiaddr": "^6.0.0" }, "devDependencies": { "@types/hapi-pino": "^8.0.1", "@types/hapi__hapi": "^20.0.5", - "aegir": "^33.0.0", + "aegir": "^34.0.2", "form-data": "^4.0.0", "ipfs-http-client": "^50.1.2", "iso-random-stream": "^2.0.0", "it-to-buffer": "^2.0.0", "qs": "^6.9.4", "rimraf": "^3.0.2", - "sinon": "^10.0.1", + "sinon": "^11.1.1", "stream-to-promise": "^3.0.0" }, "optionalDependencies": { diff --git a/packages/ipfs-http-server/src/api/resources/bitswap.js b/packages/ipfs-http-server/src/api/resources/bitswap.js index 74c078c569..82eaf1c1d9 100644 --- a/packages/ipfs-http-server/src/api/resources/bitswap.js +++ b/packages/ipfs-http-server/src/api/resources/bitswap.js @@ -1,7 +1,6 @@ 'use strict' const Joi = require('../../utils/joi') -const { cidToString } = require('ipfs-core-utils/src/cid') exports.wantlist = { options: { @@ -11,8 +10,8 @@ exports.wantlist = { stripUnknown: true }, query: Joi.object().keys({ - peer: Joi.cid(), - cidBase: Joi.cidBase(), + peer: Joi.string(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -57,9 +56,11 @@ exports.wantlist = { }) } + const base = await ipfs.bases.getBase(cidBase) + return h.response({ Keys: list.map(cid => ({ - '/': cidToString(cid, { base: cidBase, upgrade: false }) + '/': cid.toString(base.encoder) })) }) } @@ -73,7 +74,7 @@ exports.stat = { stripUnknown: true }, query: Joi.object().keys({ - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -108,11 +109,13 @@ exports.stat = { timeout }) + const base = await ipfs.bases.getBase(cidBase) + return h.response({ ProvideBufLen: stats.provideBufLen, BlocksReceived: stats.blocksReceived.toString(), Wantlist: stats.wantlist.map(cid => ({ - '/': cidToString(cid, { base: cidBase, upgrade: false }) + '/': cid.toString(base.encoder) })), Peers: stats.peers, DupBlksReceived: stats.dupBlksReceived.toString(), @@ -133,7 +136,7 @@ exports.unwant = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('arg', 'cid', { @@ -173,6 +176,8 @@ exports.unwant = { timeout }) - return h.response({ key: cidToString(cid, { base: cidBase, upgrade: false }) }) + const base = await ipfs.bases.getBase(cidBase) + + return h.response({ key: cid.toString(base.encoder) }) } } diff --git a/packages/ipfs-http-server/src/api/resources/block.js b/packages/ipfs-http-server/src/api/resources/block.js index 51704e1f04..10d57ef3b3 100644 --- a/packages/ipfs-http-server/src/api/resources/block.js +++ b/packages/ipfs-http-server/src/api/resources/block.js @@ -1,11 +1,8 @@ 'use strict' -const multihash = require('multihashing-async').multihash -const { nameToCode: codecs } = require('multicodec') const multipart = require('../../utils/multipart-request-parser') const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') -const { cidToString } = require('ipfs-core-utils/src/cid') const all = require('it-all') const { pipe } = require('it-pipe') const map = require('it-map') @@ -67,7 +64,7 @@ exports.get = { throw Boom.notFound('Block was unwanted before it could be remotely retrieved') } - return h.response(Buffer.from(block.data.buffer, block.data.byteOffset, block.data.byteLength)).header('X-Stream-Output', '1') + return h.response(Buffer.from(block.buffer, block.byteOffset, block.byteLength)).header('X-Stream-Output', '1') } } exports.put = { @@ -110,12 +107,12 @@ exports.put = { stripUnknown: true }, query: Joi.object().keys({ - cidBase: Joi.cidBase(), - format: Joi.string().valid(...Object.keys(codecs)), - mhtype: Joi.string().valid(...Object.keys(multihash.names)), + cidBase: Joi.string().default('base32'), + format: Joi.string().default('dag-pb'), + mhtype: Joi.string().default('sha2-256'), mhlen: Joi.number(), pin: Joi.bool().default(false), - version: Joi.number(), + version: Joi.number().default(0), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -146,7 +143,6 @@ exports.put = { }, query: { mhtype, - mhlen, format, version, pin, @@ -155,13 +151,15 @@ exports.put = { } } = request - let block + const codec = format === 'v0' ? 'dag-pb' : format + const cidVersion = codec === 'dag-pb' && mhtype === 'sha2-256' ? version : 1 + let cid + try { - block = await ipfs.block.put(data, { + cid = await ipfs.block.put(data, { mhtype, - mhlen, - format, - version, + format: codec, + version: cidVersion, pin, signal, timeout @@ -170,9 +168,11 @@ exports.put = { throw Boom.boomify(err, { message: 'Failed to put block' }) } + const base = await ipfs.bases.getBase(cidVersion === 0 ? 'base58btc' : cidBase) + return h.response({ - Key: cidToString(block.cid, { base: cidBase }), - Size: block.data.length + Key: cid.toString(base.encoder), + Size: data.length }) } } @@ -188,7 +188,7 @@ exports.rm = { cids: Joi.array().single().items(Joi.cid()).min(1).required(), force: Joi.boolean().default(false), quiet: Joi.boolean().default(false), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -233,7 +233,9 @@ exports.rm = { signal }), async function * (source) { - yield * map(source, ({ cid, error }) => ({ Hash: cidToString(cid, { base: cidBase }), Error: error ? error.message : undefined })) + const base = await ipfs.bases.getBase(cidBase) + + yield * map(source, ({ cid, error }) => ({ Hash: cid.toString(base.encoder), Error: error ? error.message : undefined })) } )) } @@ -248,7 +250,7 @@ exports.stat = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('arg', 'cid', { @@ -293,8 +295,10 @@ exports.stat = { throw Boom.boomify(err, { message: 'Failed to get block stats' }) } + const base = await ipfs.bases.getBase(cidBase) + return h.response({ - Key: cidToString(stats.cid, { base: cidBase }), + Key: stats.cid.toString(base.encoder), Size: stats.size }) } diff --git a/packages/ipfs-http-server/src/api/resources/dag.js b/packages/ipfs-http-server/src/api/resources/dag.js index 0fe44cddf7..fec23bf3c9 100644 --- a/packages/ipfs-http-server/src/api/resources/dag.js +++ b/packages/ipfs-http-server/src/api/resources/dag.js @@ -1,21 +1,14 @@ 'use strict' const multipart = require('../../utils/multipart-request-parser') -const mha = require('multihashing-async') -const mh = mha.multihash const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') -const { - cidToString -} = require('ipfs-core-utils/src/cid') const all = require('it-all') const uint8ArrayToString = require('uint8arrays/to-string') -const Block = require('ipld-block') -const CID = require('cids') /** * @param {undefined | Uint8Array | Record} obj - * @param {import('multibase').BaseName | 'utf8' | 'utf-8' | 'ascii'} encoding + * @param {'base64pad' | 'base16' | 'utf8'} encoding */ const encodeBufferKeys = (obj, encoding) => { if (!obj) { @@ -174,10 +167,13 @@ exports.put = { // the node is an uncommon format which the client should have // serialized so add it to the block store and fetch it deserialized // before continuing - const hash = await mha(data, request.query.hash) - const cid = new CID(request.query.cidVersion, format, hash) + const cidVersion = format === 'dag-pb' && request.query.hashAlg === 'sha2-256' ? request.query.version : 1 - await request.server.app.ipfs.block.put(new Block(data, cid)) + const cid = await request.server.app.ipfs.block.put(data, { + version: cidVersion, + format, + mhtype: request.query.hash + }) const { value @@ -201,9 +197,9 @@ exports.put = { format: Joi.string().default('cbor'), inputEncoding: Joi.string().default('json'), pin: Joi.boolean().default(false), - hash: Joi.string().valid(...Object.keys(mh.names)).default('sha2-256'), - cidBase: Joi.cidBase(), - cidVersion: Joi.number().integer().valid(0, 1).default(1), + hash: Joi.string().default('sha2-256'), + cidBase: Joi.string().default('base32'), + version: Joi.number().integer().valid(0, 1).default(1), timeout: Joi.timeout() }) .rename('input-enc', 'inputEncoding', { @@ -214,10 +210,6 @@ exports.put = { override: true, ignoreUndefined: true }) - .rename('cid-version', 'cidVersion', { - override: true, - ignoreUndefined: true - }) } }, @@ -245,16 +237,19 @@ exports.put = { query: { pin, cidBase, - timeout + timeout, + version } } = request + const cidVersion = format === 'dag-pb' && hashAlg === 'sha2-256' ? version : 1 let cid try { cid = await ipfs.dag.put(node, { format, hashAlg, + version: cidVersion, pin, signal, timeout @@ -263,11 +258,11 @@ exports.put = { throw Boom.boomify(err, { message: 'Failed to put node' }) } + const base = await ipfs.bases.getBase(cidVersion === 0 ? 'base58btc' : cidBase) + return h.response({ Cid: { - '/': cidToString(cid, { - base: cidBase - }) + '/': cid.toString(base.encoder) } }) } @@ -282,7 +277,7 @@ exports.resolve = { }, query: Joi.object().keys({ arg: Joi.cidAndPath().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout(), path: Joi.string() }) @@ -327,11 +322,11 @@ exports.resolve = { timeout }) + const base = await ipfs.bases.getBase(cidBase) + return h.response({ Cid: { - '/': cidToString(result.cid, { - base: cidBase - }) + '/': result.cid.toString(base.encoder) }, RemPath: result.remainderPath }) diff --git a/packages/ipfs-http-server/src/api/resources/dht.js b/packages/ipfs-http-server/src/api/resources/dht.js index feb58fcb67..12594a7caf 100644 --- a/packages/ipfs-http-server/src/api/resources/dht.js +++ b/packages/ipfs-http-server/src/api/resources/dht.js @@ -14,7 +14,7 @@ exports.findPeer = { stripUnknown: true }, query: Joi.object().keys({ - peerId: Joi.cid().required(), + peerId: Joi.string().required(), timeout: Joi.timeout() }) .rename('arg', 'peerId', { @@ -287,7 +287,7 @@ exports.query = { stripUnknown: true }, query: Joi.object().keys({ - peerId: Joi.cid().required(), + peerId: Joi.string().required(), timeout: Joi.timeout() }) .rename('arg', 'peerId', { diff --git a/packages/ipfs-http-server/src/api/resources/files-regular.js b/packages/ipfs-http-server/src/api/resources/files-regular.js index ea39d7481c..60f19dedec 100644 --- a/packages/ipfs-http-server/src/api/resources/files-regular.js +++ b/packages/ipfs-http-server/src/api/resources/files-regular.js @@ -5,7 +5,6 @@ const multipart = require('../../utils/multipart-request-parser') const tar = require('it-tar') const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') -const { cidToString } = require('ipfs-core-utils/src/cid') const { pipe } = require('it-pipe') const all = require('it-all') const streamResponse = require('../../utils/stream-response') @@ -165,7 +164,7 @@ exports.add = { .keys({ cidVersion: Joi.number().integer().min(0).max(1), hashAlg: Joi.string(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), rawLeaves: Joi.boolean(), onlyHash: Joi.boolean(), pin: Joi.boolean(), @@ -328,10 +327,12 @@ exports.add = { timeout }), async function * (source) { + const base = await ipfs.bases.getBase(cidBase) + yield * map(source, file => { return { Name: file.path, - Hash: cidToString(file.cid, { base: cidBase }), + Hash: file.cid.toString(base.encoder), Size: file.size, Mode: file.mode === undefined ? undefined : file.mode.toString(8).padStart(4, '0'), Mtime: file.mtime ? file.mtime.secs : undefined, @@ -359,7 +360,7 @@ exports.ls = { query: Joi.object() .keys({ path: Joi.ipfsPath().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), stream: Joi.boolean().default(false), recursive: Joi.boolean().default(false), timeout: Joi.timeout() @@ -398,6 +399,8 @@ exports.ls = { } } = request + const base = await ipfs.bases.getBase(cidBase) + /** * TODO: can be ipfs.files.stat result or ipfs.ls result * @@ -405,7 +408,7 @@ exports.ls = { */ const mapLink = link => { return { - Hash: cidToString(link.cid, { base: cidBase }), + Hash: link.cid.toString(base.encoder), Size: link.size, Type: toTypeCode(link.type), Depth: link.depth, diff --git a/packages/ipfs-http-server/src/api/resources/files/flush.js b/packages/ipfs-http-server/src/api/resources/files/flush.js index 7fc3398671..d48929d7ac 100644 --- a/packages/ipfs-http-server/src/api/resources/files/flush.js +++ b/packages/ipfs-http-server/src/api/resources/files/flush.js @@ -1,7 +1,6 @@ 'use strict' const Joi = require('../../../utils/joi') -const { cidToString } = require('ipfs-core-utils/src/cid') const mfsFlush = { options: { @@ -12,7 +11,7 @@ const mfsFlush = { }, query: Joi.object().keys({ path: Joi.string().default('/'), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('arg', 'path', { @@ -52,8 +51,10 @@ const mfsFlush = { timeout }) + const base = await ipfs.bases.getBase(cidBase) + return h.response({ - Cid: cidToString(cid, { base: cidBase, upgrade: false }) + Cid: cid.toString(base.encoder) }) } } diff --git a/packages/ipfs-http-server/src/api/resources/files/ls.js b/packages/ipfs-http-server/src/api/resources/files/ls.js index 44a4de10b6..2c053aff53 100644 --- a/packages/ipfs-http-server/src/api/resources/files/ls.js +++ b/packages/ipfs-http-server/src/api/resources/files/ls.js @@ -8,16 +8,17 @@ const streamResponse = require('../../../utils/stream-response') /** * @param {*} entry - * @param {{ cidBase?: string, long?: boolean }} options + * @param {import('multiformats/bases/interface').MultibaseCodec} base + * @param {boolean} [long] */ -const mapEntry = (entry, options = {}) => { +const mapEntry = (entry, base, long) => { const type = entry.type === 'file' ? 0 : 1 return { Name: entry.name, - Type: options.long ? type : 0, - Size: options.long ? entry.size || 0 : 0, - Hash: entry.cid.toString(options.cidBase), + Type: long ? type : 0, + Size: long ? entry.size || 0 : 0, + Hash: entry.cid.toString(base.encoder), Mtime: entry.mtime ? entry.mtime.secs : undefined, MtimeNsecs: entry.mtime ? entry.mtime.nsecs : undefined, Mode: entry.mode != null ? entry.mode.toString(8).padStart(4, '0') : undefined @@ -34,7 +35,7 @@ const mfsLs = { query: Joi.object().keys({ path: Joi.string().default('/'), long: Joi.boolean().default(false), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), stream: Joi.boolean().default(false), timeout: Joi.timeout() }) @@ -68,13 +69,15 @@ const mfsLs = { } } = request + const base = await ipfs.bases.getBase(cidBase) + if (stream) { return streamResponse(request, h, () => pipe( ipfs.files.ls(path, { signal, timeout }), - source => map(source, (entry) => mapEntry(entry, { cidBase, long })) + source => map(source, (entry) => mapEntry(entry, base, long)) )) } @@ -84,7 +87,7 @@ const mfsLs = { })) return h.response({ - Entries: files.map(entry => mapEntry(entry, { cidBase, long })) + Entries: files.map(entry => mapEntry(entry, base, long)) }) } } diff --git a/packages/ipfs-http-server/src/api/resources/files/stat.js b/packages/ipfs-http-server/src/api/resources/files/stat.js index 222a204ac6..0dc81d12a8 100644 --- a/packages/ipfs-http-server/src/api/resources/files/stat.js +++ b/packages/ipfs-http-server/src/api/resources/files/stat.js @@ -14,7 +14,7 @@ const mfsStat = { hash: Joi.boolean().default(false), size: Joi.boolean().default(false), withLocal: Joi.boolean().default(false), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) } @@ -45,11 +45,13 @@ const mfsStat = { timeout }) + const base = await ipfs.bases.getBase(cidBase) + const output = { Type: stats.type, Blocks: stats.blocks, Size: stats.size, - Hash: stats.cid.toString(cidBase), + Hash: stats.cid.toString(base.encoder), CumulativeSize: stats.cumulativeSize, WithLocality: stats.withLocality, Local: stats.local, diff --git a/packages/ipfs-http-server/src/api/resources/object.js b/packages/ipfs-http-server/src/api/resources/object.js index 53da8a6ff5..6b4bbf1ee0 100644 --- a/packages/ipfs-http-server/src/api/resources/object.js +++ b/packages/ipfs-http-server/src/api/resources/object.js @@ -2,17 +2,28 @@ const multipart = require('../../utils/multipart-request-parser') const all = require('it-all') -const dagPB = require('ipld-dag-pb') -const { DAGLink } = dagPB +const dagPB = require('@ipld/dag-pb') const Joi = require('../../utils/joi') -const multibase = require('multibase') const Boom = require('@hapi/boom') const uint8ArrayToString = require('uint8arrays/to-string') -const { cidToString } = require('ipfs-core-utils/src/cid') +const uint8ArrayFromString = require('uint8arrays/from-string') const debug = require('debug') const log = Object.assign(debug('ipfs:http-api:object'), { error: debug('ipfs:http-api:object:error') }) +const { base64pad } = require('multiformats/bases/base64') +const { base16 } = require('multiformats/bases/base16') +const { CID } = require('multiformats/cid') + +/** + * @type {Record Uint8Array>} + */ +const DECODINGS = { + ascii: (str) => uint8ArrayFromString(str), + utf8: (str) => uint8ArrayFromString(str), + base64pad: (str) => base64pad.decode(`M${str}`), + base16: (str) => base16.decode(`f${str}`) +} /** * @param {import('../../types').Request} request @@ -60,7 +71,7 @@ exports.new = { }, query: Joi.object().keys({ template: Joi.string().valid('unixfs-dir'), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -94,7 +105,7 @@ exports.new = { } } = request - let cid, node + let cid, block, node try { cid = await ipfs.object.new({ template, @@ -105,21 +116,22 @@ exports.new = { signal, timeout }) + block = dagPB.encode(node) } catch (err) { throw Boom.boomify(err, { message: 'Failed to create object' }) } - const nodeJSON = node.toJSON() + const base = await ipfs.bases.getBase(cidBase) const answer = { - Data: uint8ArrayToString(node.Data, 'base64pad'), - Hash: cidToString(cid, { base: cidBase, upgrade: false }), - Size: nodeJSON.size, - Links: nodeJSON.links.map((l) => { + Data: node.Data ? uint8ArrayToString(node.Data, 'base64pad') : '', + Hash: cid.toString(base.encoder), + Size: block.length, + Links: node.Links.map((l) => { return { - Name: l.name, - Size: l.size, - Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) + Name: l.Name, + Size: l.Tsize, + Hash: l.Hash.toString(base.encoder) } }) } @@ -137,7 +149,7 @@ exports.get = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), dataEncoding: Joi.string() .valid('ascii', 'base64pad', 'base16', 'utf8') .replace(/text/, 'ascii') @@ -182,25 +194,28 @@ exports.get = { } } = request - let node + let node, block try { node = await ipfs.object.get(cid, { signal, timeout }) + block = dagPB.encode(node) } catch (err) { throw Boom.boomify(err, { message: 'Failed to get object' }) } + const base = await ipfs.bases.getBase(cidBase) + return h.response({ - Data: uint8ArrayToString(node.Data, dataEncoding), - Hash: cidToString(cid, { base: cidBase, upgrade: false }), - Size: node.size, + Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', + Hash: cid.toString(base.encoder), + Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: cidToString(l.Hash, { base: cidBase, upgrade: false }) + Hash: l.Hash.toString(base.encoder) } }) }) @@ -223,14 +238,29 @@ exports.put = { stripUnknown: true }, query: Joi.object().keys({ - cidBase: Joi.string().valid(...Object.keys(multibase.names)), - enc: Joi.string().valid('json', 'protobuf'), + cidBase: Joi.string().default('base58btc'), + dataEncoding: Joi.string() + .valid('ascii', 'base64pad', 'base16', 'utf8') + .replace(/text/, 'ascii') + .replace(/base64/, 'base64pad') + .replace(/hex/, 'base16') + .default('base64pad'), + enc: Joi.string().valid('json', 'protobuf').default('json'), + pin: Joi.boolean().default(false), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { override: true, ignoreUndefined: true }) + .rename('datafieldenc', 'dataEncoding', { + override: true, + ignoreUndefined: true + }) + .rename('inputenc', 'enc', { + override: true, + ignoreUndefined: true + }) } }, /** @@ -253,38 +283,59 @@ exports.put = { } }, query: { - cidBase, enc, - timeout + cidBase, + dataEncoding, + timeout, + pin } } = request - let cid, node + /** @type {import('@ipld/dag-pb').PBNode} */ + let input + + if (enc === 'json') { + input = { + Data: data.Data ? DECODINGS[dataEncoding](data.Data) : undefined, + Links: (data.Links || []).map((/** @type {any} */ l) => { + return { + Name: l.Name || '', + Tsize: l.Size || l.Tsize || 0, + Hash: CID.parse(l.Hash) + } + }) + } + } else { + input = dagPB.decode(data) + } + + let cid, node, block try { - cid = await ipfs.object.put(data, { - enc, + cid = await ipfs.object.put(input, { signal, - timeout + timeout, + pin }) node = await ipfs.object.get(cid, { signal, timeout }) + block = dagPB.encode(node) } catch (err) { throw Boom.badRequest(err, { message: 'Failed to put node' }) } - const nodeJSON = node.toJSON() + const base = await ipfs.bases.getBase(cidBase) const answer = { - Data: nodeJSON.data, - Hash: cidToString(cid, { base: cidBase, upgrade: false }), - Size: nodeJSON.size, - Links: nodeJSON.links.map((l) => { + Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', + Hash: cid.toString(base.encoder), + Size: block.length, + Links: node.Links.map((l) => { return { - Name: l.name, - Size: l.size, - Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) + Name: l.Name, + Size: l.Tsize, + Hash: l.Hash.toString(base.encoder) } }) } @@ -302,7 +353,7 @@ exports.stat = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -346,7 +397,9 @@ exports.stat = { throw Boom.boomify(err, { message: 'Failed to stat object' }) } - stats.Hash = cidToString(stats.Hash, { base: cidBase, upgrade: false }) + const base = await ipfs.bases.getBase(cidBase) + + stats.Hash = stats.Hash.toString(base.encoder) return h.response(stats) } @@ -361,7 +414,7 @@ exports.data = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -417,7 +470,7 @@ exports.links = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -456,13 +509,15 @@ exports.links = { timeout }) + const base = await ipfs.bases.getBase(cidBase) + const response = { - Hash: cidToString(cid, { base: cidBase, upgrade: false }), + Hash: cid.toString(base.encoder), Links: (links || []).map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: cidToString(l.Hash, { base: cidBase, upgrade: false }) + Hash: l.Hash.toString(base.encoder) } }) } @@ -488,7 +543,13 @@ exports.patchAppendData = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), + dataEncoding: Joi.string() + .valid('ascii', 'base64pad', 'base16', 'utf8') + .replace(/text/, 'ascii') + .replace(/base64/, 'base64pad') + .replace(/hex/, 'base16') + .default('base64pad'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -527,11 +588,12 @@ exports.patchAppendData = { query: { cid, cidBase, + dataEncoding, timeout } } = request - let newCid, node + let newCid, node, block try { newCid = await ipfs.object.patch.appendData(cid, data, { signal, @@ -541,21 +603,22 @@ exports.patchAppendData = { signal, timeout }) + block = dagPB.encode(node) } catch (err) { throw Boom.boomify(err, { message: 'Failed to append data to object' }) } - const nodeJSON = node.toJSON() + const base = await ipfs.bases.getBase(cidBase) const answer = { - Data: nodeJSON.data, - Hash: cidToString(newCid, { base: cidBase, upgrade: false }), - Size: nodeJSON.size, - Links: nodeJSON.links.map((l) => { + Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', + Hash: newCid.toString(base.encoder), + Size: block.length, + Links: node.Links.map((l) => { return { - Name: l.name, - Size: l.size, - Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) + Name: l.Name, + Size: l.Tsize, + Hash: l.Hash.toString(base.encoder) } }) } @@ -581,7 +644,7 @@ exports.patchSetData = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -633,15 +696,15 @@ exports.patchSetData = { throw Boom.boomify(err, { message: 'Failed to set data on object' }) } - const nodeJSON = node.toJSON() + const base = await ipfs.bases.getBase(cidBase) return h.response({ - Hash: cidToString(newCid, { base: cidBase, upgrade: false }), - Links: nodeJSON.links.map((l) => { + Hash: newCid.toString(base.encoder), + Links: node.Links.map((l) => { return { - Name: l.name, - Size: l.size, - Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) + Name: l.Name, + Size: l.Tsize, + Hash: l.Hash.toString(base.encoder) } }) }) @@ -661,7 +724,13 @@ exports.patchAddLink = { Joi.string().required(), Joi.cid().required() ).required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), + dataEncoding: Joi.string() + .valid('ascii', 'base64pad', 'base16', 'utf8') + .replace(/text/, 'ascii') + .replace(/base64/, 'base64pad') + .replace(/hex/, 'base16') + .default('base64pad'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -695,17 +764,19 @@ exports.patchAddLink = { ref ], cidBase, + dataEncoding, timeout } } = request - let node, cid + let node, cid, block try { node = await ipfs.object.get(ref, { signal, timeout }) - cid = await ipfs.object.patch.addLink(root, new DAGLink(name, node.size, ref), { + block = dagPB.encode(node) + cid = await ipfs.object.patch.addLink(root, { Name: name, Tsize: block.length, Hash: ref }, { signal, timeout }) @@ -713,21 +784,22 @@ exports.patchAddLink = { signal, timeout }) + block = dagPB.encode(node) } catch (err) { throw Boom.boomify(err, { message: 'Failed to add link to object' }) } - const nodeJSON = node.toJSON() + const base = await ipfs.bases.getBase(cidBase) const answer = { - Data: nodeJSON.data, - Hash: cidToString(cid, { base: cidBase, upgrade: false }), - Size: nodeJSON.size, - Links: nodeJSON.links.map((l) => { + Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', + Hash: cid.toString(base.encoder), + Size: block.length, + Links: node.Links.map((l) => { return { - Name: l.name, - Size: l.size, - Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) + Name: l.Name, + Size: l.Tsize, + Hash: l.Hash.toString(base.encoder) } }) } @@ -748,7 +820,13 @@ exports.patchRmLink = { Joi.cid().required(), Joi.string().required() ).required(), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), + dataEncoding: Joi.string() + .valid('ascii', 'base64pad', 'base16', 'utf8') + .replace(/text/, 'ascii') + .replace(/base64/, 'base64pad') + .replace(/hex/, 'base16') + .default('base64pad'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -781,11 +859,12 @@ exports.patchRmLink = { link ], cidBase, + dataEncoding, timeout } } = request - let cid, node + let cid, node, block try { cid = await ipfs.object.patch.rmLink(root, link, { signal, @@ -795,21 +874,22 @@ exports.patchRmLink = { signal, timeout }) + block = dagPB.encode(node) } catch (err) { throw Boom.boomify(err, { message: 'Failed to remove link from object' }) } - const nodeJSON = node.toJSON() + const base = await ipfs.bases.getBase(cidBase) const answer = { - Data: nodeJSON.data, - Hash: cidToString(cid, { base: cidBase, upgrade: false }), - Size: nodeJSON.size, - Links: nodeJSON.links.map((l) => { + Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', + Hash: cid.toString(base.encoder), + Size: block.length, + Links: node.Links.map((l) => { return { - Name: l.name, - Size: l.size, - Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) + Name: l.Name, + Size: l.Tsize, + Hash: l.Hash.toString(base.encoder) } }) } diff --git a/packages/ipfs-http-server/src/api/resources/pin.js b/packages/ipfs-http-server/src/api/resources/pin.js index 2bb1407ee8..f4a52ce76a 100644 --- a/packages/ipfs-http-server/src/api/resources/pin.js +++ b/packages/ipfs-http-server/src/api/resources/pin.js @@ -5,12 +5,11 @@ const Boom = require('@hapi/boom') const map = require('it-map') const reduce = require('it-reduce') const { pipe } = require('it-pipe') -const { cidToString } = require('ipfs-core-utils/src/cid') const streamResponse = require('../../utils/stream-response') const all = require('it-all') /** - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID */ /** @@ -45,7 +44,7 @@ exports.ls = { query: Joi.object().keys({ paths: Joi.array().single().items(Joi.ipfsPath()), recursive: Joi.boolean().default(true), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), type: Joi.string().valid('all', 'direct', 'indirect', 'recursive').default('all'), stream: Joi.boolean().default(false), timeout: Joi.timeout() @@ -90,6 +89,8 @@ exports.ls = { timeout }) + const base = await ipfs.bases.getBase(cidBase) + if (!stream) { const res = await pipe( source, @@ -98,7 +99,7 @@ exports.ls = { const init = { Keys: {} } return reduce(source, (res, { type, cid, metadata }) => { - res.Keys[cidToString(cid, { base: cidBase })] = toPin(type, undefined, metadata) + res.Keys[cid.toString(base.encoder)] = toPin(type, undefined, metadata) return res }, init) @@ -111,7 +112,7 @@ exports.ls = { return streamResponse(request, h, () => pipe( source, async function * transform (source) { - yield * map(source, ({ type, cid, metadata }) => toPin(type, cidToString(cid, { base: cidBase }), metadata)) + yield * map(source, ({ type, cid, metadata }) => toPin(type, cid.toString(base.encoder), metadata)) } )) } @@ -127,7 +128,7 @@ exports.add = { query: Joi.object().keys({ cids: Joi.array().single().items(Joi.cid()).min(1).required(), recursive: Joi.boolean().default(true), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout(), metadata: Joi.json() }) @@ -182,8 +183,10 @@ exports.add = { throw Boom.boomify(err, { message: 'Failed to add pin' }) } + const base = await ipfs.bases.getBase(cidBase) + return h.response({ - Pins: result.map(cid => cidToString(cid, { base: cidBase })) + Pins: result.map(cid => cid.toString(base.encoder)) }) } } @@ -198,7 +201,7 @@ exports.rm = { query: Joi.object().keys({ cids: Joi.array().single().items(Joi.cid()).min(1).required(), recursive: Joi.boolean().default(true), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -247,8 +250,10 @@ exports.rm = { throw Boom.boomify(err, { message: 'Failed to remove pin' }) } + const base = await ipfs.bases.getBase(cidBase) + return h.response({ - Pins: result.map(cid => cidToString(cid, { base: cidBase })) + Pins: result.map(cid => cid.toString(base.encoder)) }) } } diff --git a/packages/ipfs-http-server/src/api/resources/ping.js b/packages/ipfs-http-server/src/api/resources/ping.js index 23b863cd05..402eda8239 100644 --- a/packages/ipfs-http-server/src/api/resources/ping.js +++ b/packages/ipfs-http-server/src/api/resources/ping.js @@ -14,7 +14,7 @@ module.exports = { }, query: Joi.object().keys({ count: Joi.number().integer().greater(0).default(10), - peerId: Joi.cid().required(), + peerId: Joi.string().required(), timeout: Joi.timeout() }) .rename('arg', 'peerId', { diff --git a/packages/ipfs-http-server/src/api/resources/resolve.js b/packages/ipfs-http-server/src/api/resources/resolve.js index 68fed902fc..0d8c947044 100644 --- a/packages/ipfs-http-server/src/api/resources/resolve.js +++ b/packages/ipfs-http-server/src/api/resources/resolve.js @@ -12,7 +12,7 @@ module.exports = { query: Joi.object().keys({ path: Joi.string().required(), recursive: Joi.boolean().default(true), - cidBase: Joi.cidBase(), + cidBase: Joi.string().default('base58btc'), timeout: Joi.timeout() }) .rename('arg', 'path', { diff --git a/packages/ipfs-http-server/src/api/resources/stats.js b/packages/ipfs-http-server/src/api/resources/stats.js index 49245582f3..923ce969ac 100644 --- a/packages/ipfs-http-server/src/api/resources/stats.js +++ b/packages/ipfs-http-server/src/api/resources/stats.js @@ -17,7 +17,7 @@ exports.bw = { stripUnknown: true }, query: Joi.object().keys({ - peer: Joi.cid(), + peer: Joi.string(), proto: Joi.string(), poll: Joi.boolean().default(false), interval: Joi.string().default('1s'), diff --git a/packages/ipfs-http-server/src/index.js b/packages/ipfs-http-server/src/index.js index 46648400e7..b44fbd65ec 100644 --- a/packages/ipfs-http-server/src/index.js +++ b/packages/ipfs-http-server/src/index.js @@ -15,7 +15,6 @@ const LOG_ERROR = 'ipfs:http-api:error' /** * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {import('./types').Server} Server - * @typedef {import('ipld')} IPLD * @typedef {import('libp2p')} libp2p */ @@ -103,8 +102,6 @@ class HttpApi { /** * Starts the IPFS HTTP server - * - * @returns {Promise} */ async start () { this._log('starting') @@ -120,8 +117,11 @@ class HttpApi { credentials: Boolean(headers['Access-Control-Allow-Credentials']) }) + // for the CLI to know the whereabouts of the API + // @ts-ignore - ipfs.repo.setApiAddr is not part of the core api + await ipfs.repo.setApiAddr(this._apiServers[0].info.ma) + this._log('started') - return this } /** diff --git a/packages/ipfs-http-server/src/types.d.ts b/packages/ipfs-http-server/src/types.d.ts index fbe0d52f75..0841fe3c70 100644 --- a/packages/ipfs-http-server/src/types.d.ts +++ b/packages/ipfs-http-server/src/types.d.ts @@ -2,8 +2,6 @@ import { IPFS } from 'ipfs-core-types' import { Request, Server } from '@hapi/hapi' import Multiaddr from 'multiaddrs' import { Mtime } from 'ipfs-unixfs' -import IPLD from 'ipld' -import libp2p from 'libp2p' declare module '@hapi/hapi' { interface ServerApplicationState { diff --git a/packages/ipfs-http-server/src/utils/joi.js b/packages/ipfs-http-server/src/utils/joi.js index 9488068fb5..f8e5c2b5fd 100644 --- a/packages/ipfs-http-server/src/utils/joi.js +++ b/packages/ipfs-http-server/src/utils/joi.js @@ -1,10 +1,9 @@ 'use strict' const Joi = require('joi') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { default: parseDuration } = require('parse-duration') const { Multiaddr } = require('multiaddr') -const multibase = require('multibase') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') /** @@ -27,7 +26,7 @@ const toIpfsPath = (value) => { const parts = value.split('/') // will throw if not valid - parts[0] = new CID(parts[0]) + parts[0] = CID.parse(parts[0]) // go-ipfs returns /ipfs/ prefix for ipfs paths when passed to the http api // and not when it isn't. E.g. @@ -40,7 +39,7 @@ const toIpfsPath = (value) => { * @param {*} value */ const toCID = (value) => { - return new CID(value.toString().replace('/ipfs/', '')) + return CID.parse(value.toString().replace('/ipfs/', '')) } /** @@ -55,7 +54,7 @@ const requireIfRequired = (value, helpers) => { module.exports = Joi .extend( - // @ts-ignore - according to typedfs coerce should always return + // @ts-ignore - according to typedefs coerce should always return // { errors?: ErrorReport[], value?: any } (joi) => { return { @@ -127,25 +126,6 @@ module.exports = Joi } } }, - (joi) => { - return { - type: 'cidBase', - base: joi.string(), - validate: requireIfRequired, - coerce (value, _helpers) { - if (!value) { - return - } - - // @ts-ignore value is not a BaseName - if (!multibase.names[value]) { - throw new Error('Invalid base name') - } - - return { value } - } - } - }, (joi) => { return { type: 'json', diff --git a/packages/ipfs-http-server/test/inject/bitswap.js b/packages/ipfs-http-server/test/inject/bitswap.js index 2fe534658d..b845a461ad 100644 --- a/packages/ipfs-http-server/test/inject/bitswap.js +++ b/packages/ipfs-http-server/test/inject/bitswap.js @@ -2,14 +2,16 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const CID = require('cids') +const { CID } = require('multiformats/cid') const sinon = require('sinon') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') describe('/bitswap', () => { - const cid = new CID('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') + const cid = CID.parse('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') let ipfs beforeEach(() => { @@ -19,6 +21,9 @@ describe('/bitswap', () => { wantlistForPeer: sinon.stub(), stat: sinon.stub(), unwant: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -34,6 +39,7 @@ describe('/bitswap', () => { }) it('/wantlist', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.bitswap.wantlist.withArgs(defaultOptions).returns([ cid ]) @@ -48,6 +54,7 @@ describe('/bitswap', () => { }) it('/wantlist?timeout=1s', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.bitswap.wantlist.withArgs({ ...defaultOptions, timeout: 1000 @@ -66,8 +73,9 @@ describe('/bitswap', () => { // TODO: unskip after switch to v1 CIDs by default it.skip('/wantlist?cid-base=base64', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.bitswap.wantlist.withArgs(defaultOptions).returns([ - cid + cid.toV1() ]) const res = await http({ @@ -76,27 +84,14 @@ describe('/bitswap', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Keys').that.deep.includes({ '/': cid.toV1().toString('base64') }) - }) - - it('/wantlist?cid-base=invalid', async () => { - ipfs.bitswap.wantlist.withArgs(defaultOptions).returns([ - cid - ]) - - const res = await http({ - method: 'POST', - url: '/api/v0/bitswap/wantlist?cid-base=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.nested.property('result.Keys').that.deep.includes({ '/': cid.toV1().toString(base64) }) }) it('/wantlist?peer=QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const peerId = 'QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D' - ipfs.bitswap.wantlistForPeer.withArgs(new CID(peerId), defaultOptions).returns([ + ipfs.bitswap.wantlistForPeer.withArgs(peerId, defaultOptions).returns([ cid ]) @@ -110,9 +105,10 @@ describe('/bitswap', () => { }) it('/wantlist?peer=QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D&timeout=1s', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const peerId = 'QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D' - ipfs.bitswap.wantlistForPeer.withArgs(new CID(peerId), { + ipfs.bitswap.wantlistForPeer.withArgs(peerId, { ...defaultOptions, timeout: 1000 }).returns([ @@ -127,17 +123,6 @@ describe('/bitswap', () => { expect(res).to.have.property('statusCode', 200) expect(res).to.have.nested.property('result.Keys').that.deep.includes({ '/': cid.toString() }) }) - - it('/wantlist?peer=invalid', async () => { - const peerId = 'invalid' - - const res = await http({ - method: 'POST', - url: `/api/v0/bitswap/wantlist?peer=${peerId}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) }) describe('/stat', () => { @@ -151,6 +136,7 @@ describe('/bitswap', () => { }) it('/stat', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.bitswap.stat.withArgs(defaultOptions).returns({ provideBufLen: 'provideBufLen', blocksReceived: 'blocksReceived', @@ -183,6 +169,7 @@ describe('/bitswap', () => { }) it('/stat?timeout=1s', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.bitswap.stat.withArgs(defaultOptions).withArgs({ signal: sinon.match.any, timeout: 1000 @@ -209,6 +196,7 @@ describe('/bitswap', () => { }) it('/stat?cid-base=base64', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.bitswap.stat.withArgs(defaultOptions).returns({ provideBufLen: 'provideBufLen', blocksReceived: 'blocksReceived', @@ -229,10 +217,10 @@ describe('/bitswap', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Wantlist').that.deep.includes({ '/': cid.toV1().toString('base64') }) + expect(res).to.have.nested.property('result.Wantlist').that.deep.includes({ '/': cid.toV1().toString(base64) }) }) - it('/stat?cid-base=invalid', async () => { + it.skip('/stat?cid-base=invalid', async () => { const res = await http({ method: 'POST', url: '/api/v0/bitswap/stat?cid-base=invalid' @@ -243,6 +231,7 @@ describe('/bitswap', () => { }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.bitswap.stat.withArgs(defaultOptions).withArgs(sinon.match({ timeout: 1000 })).returns({ @@ -279,23 +268,25 @@ describe('/bitswap', () => { }) it('/unwant', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const res = await http({ method: 'POST', url: `/api/v0/bitswap/unwant?arg=${cid}` }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(ipfs.bitswap.unwant.calledWith(new CID(cid), defaultOptions)).to.be.true() + expect(ipfs.bitswap.unwant.calledWith(cid, defaultOptions)).to.be.true() }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const res = await http({ method: 'POST', url: `/api/v0/bitswap/unwant?arg=${cid}&timeout=1s` }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(ipfs.bitswap.unwant.calledWith(new CID(cid), { + expect(ipfs.bitswap.unwant.calledWith(cid, { ...defaultOptions, timeout: 1000 })).to.be.true() diff --git a/packages/ipfs-http-server/test/inject/block.js b/packages/ipfs-http-server/test/inject/block.js index 2d8629ca37..73d285dcde 100644 --- a/packages/ipfs-http-server/test/inject/block.js +++ b/packages/ipfs-http-server/test/inject/block.js @@ -5,12 +5,14 @@ const { expect } = require('aegir/utils/chai') const FormData = require('form-data') const streamToPromise = require('stream-to-promise') -const multibase = require('multibase') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') +const { base32 } = require('multiformats/bases/base32') const sendData = async (data) => { const form = new FormData() @@ -25,7 +27,7 @@ const sendData = async (data) => { } describe('/block', () => { - const cid = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') + const cid = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Kp') const data = Buffer.from('hello world\n') const expectedResult = { Key: cid.toString(), @@ -40,16 +42,18 @@ describe('/block', () => { get: sinon.stub(), stat: sinon.stub(), rm: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) describe('/put', () => { const defaultOptions = { - mhtype: undefined, - mhlen: undefined, - format: undefined, - version: undefined, + mhtype: 'sha2-256', + format: 'dag-pb', + version: 0, pin: false, signal: sinon.match.instanceOf(AbortSignal), timeout: undefined @@ -75,10 +79,8 @@ describe('/block', () => { }) it('updates value', async () => { - ipfs.block.put.withArgs(data, defaultOptions).returns({ - cid, - data - }) + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.block.put.withArgs(data, defaultOptions).returns(cid) const res = await http({ method: 'POST', @@ -90,18 +92,13 @@ describe('/block', () => { expect(res).to.have.deep.property('result', expectedResult) }) - it('updates value and pins block', async () => { - ipfs.block.put.withArgs(data, { - ...defaultOptions, - pin: true - }).returns({ - cid, - data - }) + it('converts a v0 format to dag-pb', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + ipfs.block.put.withArgs(data, defaultOptions).returns(cid) const res = await http({ method: 'POST', - url: '/api/v0/block/put?pin=true', + url: '/api/v0/block/put?format=v0', ...await sendData(data) }, { ipfs }) @@ -109,18 +106,16 @@ describe('/block', () => { expect(res).to.have.deep.property('result', expectedResult) }) - it('updates value with a v1 CID', async () => { + it('updates value and pins block', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.put.withArgs(data, { ...defaultOptions, - version: 1 - }).returns({ - cid, - data - }) + pin: true + }).returns(cid) const res = await http({ method: 'POST', - url: '/api/v0/block/put?version=1', + url: '/api/v0/block/put?pin=true', ...await sendData(data) }, { ipfs }) @@ -128,41 +123,46 @@ describe('/block', () => { expect(res).to.have.deep.property('result', expectedResult) }) - it('should put a value and return a base64 encoded CID', async () => { - ipfs.block.put.withArgs(data, defaultOptions).returns({ - cid, - data - }) + it('defaults to base32 encoding with a v1 CID', async () => { + ipfs.bases.getBase.withArgs('base32').returns(base32) + ipfs.block.put.withArgs(data, { + ...defaultOptions, + version: 1 + }).returns(cid.toV1()) const res = await http({ method: 'POST', - url: '/api/v0/block/put?cid-base=base64', + url: '/api/v0/block/put?version=1', ...await sendData(data) }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Key)).to.equal('base64') + expect(res.result.Key).to.equal(cid.toV1().toString()) }) - it('should not put a value for invalid cid-base option', async () => { + it('should put a value and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + ipfs.block.put.withArgs(data, { + ...defaultOptions, + version: 1 + }).returns(cid.toV1()) + const res = await http({ method: 'POST', - url: '/api/v0/block/put?cid-base=invalid', + url: '/api/v0/block/put?version=1&cid-base=base64', ...await sendData(data) }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res.result.Key).to.equal(cid.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.put.withArgs(data, { ...defaultOptions, timeout: 1000 - }).returns({ - cid, - data - }) + }).returns(cid) const res = await http({ method: 'POST', @@ -206,10 +206,7 @@ describe('/block', () => { }) it('returns value', async () => { - ipfs.block.get.withArgs(cid, defaultOptions).returns({ - cid, - data - }) + ipfs.block.get.withArgs(cid, defaultOptions).returns(data) const res = await http({ method: 'POST', @@ -224,10 +221,7 @@ describe('/block', () => { ipfs.block.get.withArgs(cid, { ...defaultOptions, timeout: 1000 - }).returns({ - cid, - data - }) + }).returns(data) const res = await http({ method: 'POST', @@ -270,6 +264,7 @@ describe('/block', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.stat.withArgs(cid, defaultOptions).returns({ cid, size: data.byteLength @@ -287,8 +282,9 @@ describe('/block', () => { }) it('should stat a block and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.block.stat.withArgs(cid, defaultOptions).returns({ - cid, + cid: cid.toV1(), size: data.byteLength }) @@ -298,20 +294,11 @@ describe('/block', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Key)).to.deep.equal('base64') - }) - - it('should not stat a block for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/block/stat?cid-base=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res.result.Key).to.equal(cid.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.stat.withArgs(cid, { ...defaultOptions, timeout: 1000 @@ -363,6 +350,7 @@ describe('/block', () => { }) it('returns 200', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.rm.withArgs([cid], defaultOptions).returns([{ cid }]) const res = await http({ @@ -374,6 +362,7 @@ describe('/block', () => { }) it('returns 200 when forcing removal', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.rm.withArgs([cid], { ...defaultOptions, force: true @@ -388,6 +377,7 @@ describe('/block', () => { }) it('returns 200 when removing quietly', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.rm.withArgs([cid], { ...defaultOptions, quiet: true @@ -402,7 +392,8 @@ describe('/block', () => { }) it('returns 200 for multiple CIDs', async () => { - const cid2 = new CID('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Ka') + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const cid2 = CID.parse('QmZjTnYw2TFhn9Nn7tjmPSoTBoY7YRkwPzwSrSbabY24Ka') ipfs.block.rm.withArgs([cid, cid2], defaultOptions).returns([{ cid, cid2 }]) @@ -415,6 +406,7 @@ describe('/block', () => { }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.block.rm.withArgs([cid], { ...defaultOptions, timeout: 1000 diff --git a/packages/ipfs-http-server/test/inject/dag.js b/packages/ipfs-http-server/test/inject/dag.js index c25b2d94d2..5f847329e3 100644 --- a/packages/ipfs-http-server/test/inject/dag.js +++ b/packages/ipfs-http-server/test/inject/dag.js @@ -3,15 +3,16 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const DAGNode = require('ipld-dag-pb').DAGNode const Readable = require('stream').Readable const FormData = require('form-data') const streamToPromise = require('stream-to-promise') -const CID = require('cids') +const { CID } = require('multiformats/cid') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base32 } = require('multiformats/bases/base32') const toHeadersAndPayload = async (thing) => { const stream = new Readable() @@ -28,7 +29,7 @@ const toHeadersAndPayload = async (thing) => { } describe('/dag', () => { - const cid = new CID('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') + const cid = CID.parse('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') let ipfs beforeEach(() => { @@ -40,6 +41,9 @@ describe('/dag', () => { }, block: { put: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -74,7 +78,10 @@ describe('/dag', () => { }) it('returns value', async () => { - const node = new DAGNode(Uint8Array.from([]), []) + const node = { + Data: Uint8Array.from([]), + Links: [] + } ipfs.dag.get.withArgs(cid, defaultOptions).returns({ value: node }) const res = await http({ @@ -83,27 +90,33 @@ describe('/dag', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.links').that.is.empty() - expect(res).to.have.nested.property('result.data').that.is.empty() + expect(res).to.have.nested.property('result.Links').that.is.empty() + expect(res).to.have.nested.property('result.Data').that.is.empty() }) it('uses text encoding for data by default', async () => { - const node = new DAGNode(Uint8Array.from([0, 1, 2, 3]), []) + const node = { + Data: Uint8Array.from([0, 1, 2, 3]), + Links: [] + } ipfs.dag.get.withArgs(cid, defaultOptions).returns({ value: node }) const res = await http({ method: 'POST', - url: `/api/v0/dag/get?arg=${cid.toBaseEncodedString()}` + url: `/api/v0/dag/get?arg=${cid}` }, { ipfs }) expect(res).to.have.property('statusCode', 200) expect(res.result).to.be.ok() - expect(res).to.have.nested.property('result.links').that.is.empty() - expect(res).to.have.nested.property('result.data', '\u0000\u0001\u0002\u0003') + expect(res).to.have.nested.property('result.Links').that.is.empty() + expect(res).to.have.nested.property('result.Data', '\u0000\u0001\u0002\u0003') }) it('overrides data encoding', async () => { - const node = new DAGNode(Uint8Array.from([0, 1, 2, 3]), []) + const node = { + Data: Uint8Array.from([0, 1, 2, 3]), + Links: [] + } ipfs.dag.get.withArgs(cid, defaultOptions).returns({ value: node }) const res = await http({ @@ -112,8 +125,8 @@ describe('/dag', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.links').that.is.empty() - expect(res).to.have.nested.property('result.data').that.equals('AAECAw==') + expect(res).to.have.nested.property('result.Links').that.is.empty() + expect(res).to.have.nested.property('result.Data').that.equals('AAECAw==') }) it('returns value with a path as part of the cid', async () => { @@ -132,7 +145,10 @@ describe('/dag', () => { }) it('returns value with a path as part of the cid for dag-pb nodes', async () => { - const node = new DAGNode(Uint8Array.from([0, 1, 2, 3]), []) + const node = { + Data: Uint8Array.from([0, 1, 2, 3]), + Links: [] + } ipfs.dag.get.withArgs(cid, { ...defaultOptions, path: '/Data' @@ -204,6 +220,7 @@ describe('/dag', () => { const defaultOptions = { format: 'dag-cbor', hashAlg: 'sha2-256', + version: 1, pin: false, signal: sinon.match.instanceOf(AbortSignal), timeout: undefined @@ -224,10 +241,11 @@ describe('/dag', () => { }) it('adds a dag-cbor node by default', async () => { + ipfs.bases.getBase.withArgs('base32').returns(base32) const node = { foo: 'bar' } - ipfs.dag.put.withArgs(node, defaultOptions).returns(cid) + ipfs.dag.put.withArgs(node, defaultOptions).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -236,10 +254,11 @@ describe('/dag', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toString() }) + expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) }) it('adds a dag-pb node', async () => { + ipfs.bases.getBase.withArgs('base32').returns(base32) const node = { data: [], links: [] @@ -247,7 +266,7 @@ describe('/dag', () => { ipfs.dag.put.withArgs(node, { ...defaultOptions, format: 'dag-pb' - }).returns(cid) + }).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -255,16 +274,39 @@ describe('/dag', () => { ...await toHeadersAndPayload(JSON.stringify(node)) }, { ipfs }) + expect(res).to.have.property('statusCode', 200) + expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) + }) + + it('defaults to base58btc when adding a v0 dag-pb node', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const node = { + data: [], + links: [] + } + ipfs.dag.put.withArgs(node, { + ...defaultOptions, + version: 0, + format: 'dag-pb' + }).returns(cid) + + const res = await http({ + method: 'POST', + url: '/api/v0/dag/put?format=dag-pb&version=0', + ...await toHeadersAndPayload(JSON.stringify(node)) + }, { ipfs }) + expect(res).to.have.property('statusCode', 200) expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toString() }) }) it('adds a raw node', async () => { + ipfs.bases.getBase.withArgs('base32').returns(base32) const node = Buffer.from([0, 1, 2, 3]) ipfs.dag.put.withArgs(node, { ...defaultOptions, format: 'raw' - }).returns(cid) + }).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -273,17 +315,18 @@ describe('/dag', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toString() }) + expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) }) it('pins a node after adding', async () => { + ipfs.bases.getBase.withArgs('base32').returns(base32) const node = { foo: 'bar' } ipfs.dag.put.withArgs(node, { ...defaultOptions, pin: true - }).returns(cid) + }).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -292,21 +335,23 @@ describe('/dag', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toString() }) + expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) }) it('adds a node with an esoteric format', async () => { - const cid = new CID('baf4beiata6mq425fzikf5m26temcvg7mizjrxrkn35swuybmpah2ajan5y') + ipfs.bases.getBase.withArgs('base32').returns(base32) + const cid = CID.parse('baf4beiata6mq425fzikf5m26temcvg7mizjrxrkn35swuybmpah2ajan5y') const data = Buffer.from('some data') const codec = 'git-raw' + ipfs.block.put.withArgs(data).returns(cid) ipfs.dag.get.withArgs(cid).returns({ value: data }) ipfs.dag.put.withArgs(data, { ...defaultOptions, format: codec - }).returns(cid) + }).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -316,17 +361,18 @@ describe('/dag', () => { expect(ipfs.block.put.called).to.be.true() expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toString() }) + expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base32').returns(base32) const node = { foo: 'bar' } ipfs.dag.put.withArgs(node, { ...defaultOptions, timeout: 1000 - }).returns(cid) + }).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -335,7 +381,7 @@ describe('/dag', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toString() }) + expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) }) }) @@ -360,6 +406,7 @@ describe('/dag', () => { }) it('resolves a node', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.dag.resolve.withArgs(cid, defaultOptions).returns({ cid, remainderPath: '' @@ -376,6 +423,7 @@ describe('/dag', () => { }) it('resolves a node with a path arg', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.dag.resolve.withArgs(cid, { ...defaultOptions, path: '/foo' @@ -395,6 +443,7 @@ describe('/dag', () => { }) it('returns the remainder path from within the resolved node', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.dag.resolve.withArgs(cid, { ...defaultOptions, path: '/foo' @@ -429,7 +478,8 @@ describe('/dag', () => { }) it('resolves across multiple nodes, returning the CID of the last node traversed', async () => { - const cid2 = new CID('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNA') + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + const cid2 = CID.parse('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNA') ipfs.dag.resolve.withArgs(cid, { ...defaultOptions, @@ -450,6 +500,7 @@ describe('/dag', () => { }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.dag.resolve.withArgs(cid, { ...defaultOptions, timeout: 1000 diff --git a/packages/ipfs-http-server/test/inject/dht.js b/packages/ipfs-http-server/test/inject/dht.js index 93f7c22d06..2620d3ddbd 100644 --- a/packages/ipfs-http-server/test/inject/dht.js +++ b/packages/ipfs-http-server/test/inject/dht.js @@ -7,13 +7,13 @@ const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') const errCode = require('err-code') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { AbortSignal } = require('native-abort-controller') const allNdjson = require('../utils/all-ndjson') describe('/dht', () => { const peerId = 'QmQ2zigjQikYnyYUSXZydNXrDRhBut2mubwJBaLXobMt3A' - const cid = new CID('Qmc77hSNykXJ6Jxp1C6RpD8VENV7RK6JD7eAcWpc7nEZx2') + const cid = CID.parse('Qmc77hSNykXJ6Jxp1C6RpD8VENV7RK6JD7eAcWpc7nEZx2') let ipfs beforeEach(() => { @@ -49,8 +49,8 @@ describe('/dht', () => { expect(res).to.have.nested.property('result.Code', 1) }) - it('returns 404 if peerId is provided as there is no peers in the routing table', async () => { - ipfs.dht.findPeer.withArgs(new CID(peerId), defaultOptions).throws(errCode(new Error('Nope'), 'ERR_LOOKUP_FAILED')) + it('returns 404 if peerId is provided and there are no peers in the routing table', async () => { + ipfs.dht.findPeer.withArgs(peerId, defaultOptions).throws(errCode(new Error('Nope'), 'ERR_LOOKUP_FAILED')) const res = await http({ method: 'POST', @@ -59,11 +59,11 @@ describe('/dht', () => { expect(res).to.have.property('statusCode', 404) expect(ipfs.dht.findPeer.called).to.be.true() - expect(ipfs.dht.findPeer.getCall(0).args[0]).to.deep.equal(new CID(peerId)) + expect(ipfs.dht.findPeer.getCall(0).args[0]).to.equal(peerId) }) it('accepts a timeout', async () => { - ipfs.dht.findPeer.withArgs(new CID(peerId), { + ipfs.dht.findPeer.withArgs(peerId, { ...defaultOptions, timeout: 1000 }).returns({ @@ -392,7 +392,7 @@ describe('/dht', () => { }) it('returns 200 if key is provided', async function () { - ipfs.dht.query.withArgs(new CID(peerId), defaultOptions).returns([{ + ipfs.dht.query.withArgs(peerId, defaultOptions).returns([{ id: 'id' }]) @@ -406,7 +406,7 @@ describe('/dht', () => { }) it('accepts a timeout', async function () { - ipfs.dht.query.withArgs(new CID(peerId), { + ipfs.dht.query.withArgs(peerId, { ...defaultOptions, timeout: 1000 }).returns([{ diff --git a/packages/ipfs-http-server/test/inject/files.js b/packages/ipfs-http-server/test/inject/files.js index 47b2f4a3e0..5b13c74d8b 100644 --- a/packages/ipfs-http-server/test/inject/files.js +++ b/packages/ipfs-http-server/test/inject/files.js @@ -6,22 +6,23 @@ const { randomBytes } = require('iso-random-stream') const { expect } = require('aegir/utils/chai') const FormData = require('form-data') const streamToPromise = require('stream-to-promise') -const multibase = require('multibase') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const first = require('it-first') const toBuffer = require('it-to-buffer') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') function matchIterable () { return sinon.match((thing) => Boolean(thing[Symbol.asyncIterator]) || Boolean(thing[Symbol.iterator])) } describe('/files', () => { - const cid = new CID('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') - const cid2 = new CID('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNA') + const cid = CID.parse('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR') + const cid2 = CID.parse('QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNA') let ipfs beforeEach(() => { @@ -33,6 +34,9 @@ describe('/files', () => { refs: sinon.stub(), files: { stat: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } @@ -40,6 +44,7 @@ describe('/files', () => { }) async function assertAddArgs (url, fn) { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const content = Buffer.from('TEST\n') ipfs.addAll.callsFake(async function * (source, opts) { @@ -100,6 +105,7 @@ describe('/files', () => { }) it('should add buffer bigger than Hapi default max bytes (1024 * 1024)', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const payload = Buffer.from([ '', '------------287032381131322', @@ -134,11 +140,12 @@ describe('/files', () => { }) it('should add data and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) const content = Buffer.from('TEST' + Date.now()) ipfs.addAll.withArgs(matchIterable(), defaultOptions).returns([{ path: cid.toString(), - cid, + cid: cid.toV1(), size: content.byteLength, mode: 0o420, mtime: { @@ -160,10 +167,11 @@ describe('/files', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(JSON.parse(res.result).Hash)).to.deep.equal('base64') + expect(JSON.parse(res.result).Hash).to.equal(cid.toV1().toString(base64)) }) it('should add data without pinning and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) const content = Buffer.from('TEST' + Date.now()) ipfs.addAll.callsFake(async function * (source, opts) { @@ -174,7 +182,7 @@ describe('/files', () => { yield { path: cid.toString(), - cid, + cid: cid.toV1(), size: content.byteLength, mode: 0o420, mtime: { @@ -197,7 +205,7 @@ describe('/files', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(JSON.parse(res.result).Hash)).to.deep.equal('base64') + expect(JSON.parse(res.result).Hash).to.equal(cid.toV1().toString(base64)) }) it('should specify the cid version', () => assertAddArgs('/api/v0/add?cid-version=1', (opts) => opts.cidVersion === 1)) @@ -352,6 +360,7 @@ describe('/files', () => { }) it('should list directory contents', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.stat.withArgs(`/ipfs/${cid}`).returns({ type: 'directory' }) @@ -386,6 +395,7 @@ describe('/files', () => { }) it('should list a file', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.stat.withArgs(`/ipfs/${cid}/derp`).returns({ cid, size: 10, @@ -415,6 +425,7 @@ describe('/files', () => { }) it('should list directory contents without unixfs v1.5 fields', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.stat.withArgs(`/ipfs/${cid}`).returns({ type: 'directory' }) @@ -448,6 +459,7 @@ describe('/files', () => { }) it('should list directory contents recursively', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.stat.withArgs(`/ipfs/${cid}`).returns({ type: 'directory' }) @@ -486,6 +498,7 @@ describe('/files', () => { // TODO: unskip after switch to v1 CIDs by default it.skip('should return base64 encoded CIDs', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.ls.withArgs(`${cid}`, defaultOptions).returns([]) const res = await http({ @@ -495,12 +508,13 @@ describe('/files', () => { expect(res).to.have.property('statusCode', 200) expect(res).to.have.deep.nested.property('result.Objects[0]', { - Hash: cid.toV1().toString('base64'), + Hash: cid.toV1().toString(base64), Links: [] }) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.stat.withArgs(`/ipfs/${cid}`).returns({ type: 'directory' }) @@ -525,6 +539,7 @@ describe('/files', () => { }) it('accepts a timeout when streaming', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.stat.withArgs(`/ipfs/${cid}`).returns({ type: 'directory' }) diff --git a/packages/ipfs-http-server/test/inject/mfs/flush.js b/packages/ipfs-http-server/test/inject/mfs/flush.js index 7fbd8c5411..2e2158bd38 100644 --- a/packages/ipfs-http-server/test/inject/mfs/flush.js +++ b/packages/ipfs-http-server/test/inject/mfs/flush.js @@ -4,10 +4,12 @@ const { expect } = require('aegir/utils/chai') const http = require('../../utils/http') const sinon = require('sinon') -const CID = require('cids') -const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') +const { CID } = require('multiformats/cid') +const cid = CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const testHttpMethod = require('../../utils/test-http-method') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const defaultOptions = { timeout: undefined, @@ -22,6 +24,9 @@ describe('/files/flush', () => { ipfs = { files: { flush: sinon.stub().resolves(cid) + }, + bases: { + getBase: sinon.stub() } } }) @@ -31,6 +36,7 @@ describe('/files/flush', () => { }) it('should flush a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: `/api/v0/files/flush?arg=${path}` @@ -42,6 +48,7 @@ describe('/files/flush', () => { }) it('should flush without a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: '/api/v0/files/flush' @@ -53,6 +60,7 @@ describe('/files/flush', () => { }) it('should flush with a different CID base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.files.flush.resolves(cid.toV1()) const response = await http({ @@ -62,10 +70,11 @@ describe('/files/flush', () => { expect(ipfs.files.flush.callCount).to.equal(1) expect(ipfs.files.flush.calledWith('/', defaultOptions)).to.be.true() - expect(response).to.have.nested.property('result.Cid', cid.toV1().toString('base64')) + expect(response).to.have.nested.property('result.Cid', cid.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: '/api/v0/files/flush?timeout=1s' diff --git a/packages/ipfs-http-server/test/inject/mfs/ls.js b/packages/ipfs-http-server/test/inject/mfs/ls.js index 7a1e6410f3..12f7512c13 100644 --- a/packages/ipfs-http-server/test/inject/mfs/ls.js +++ b/packages/ipfs-http-server/test/inject/mfs/ls.js @@ -4,10 +4,11 @@ const { expect } = require('aegir/utils/chai') const http = require('../../utils/http') const sinon = require('sinon') -const CID = require('cids') -const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') +const { CID } = require('multiformats/cid') +const fileCid = CID.parse('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') const testHttpMethod = require('../../utils/test-http-method') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') const defaultOptions = { timeout: undefined, @@ -33,6 +34,9 @@ describe('/files/ls', () => { ipfs = { files: { ls: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -42,6 +46,7 @@ describe('/files/ls', () => { }) it('should list a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.ls.withArgs(path, defaultOptions).returns([file]) const response = await http({ @@ -55,7 +60,7 @@ describe('/files/ls', () => { expect(response).to.have.nested.property('result.Entries[0].Name', file.name) expect(response).to.have.nested.property('result.Entries[0].Type', 0) expect(response).to.have.nested.property('result.Entries[0].Size', 0) - expect(response).to.have.nested.property('result.Entries[0].Hash', file.cid.toString()) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.cid.toString(base58btc)) }) it('should list without a path', async () => { @@ -71,6 +76,7 @@ describe('/files/ls', () => { }) it('should list a path with details', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.files.ls.withArgs(path, defaultOptions).returns([file]) const response = await http({ @@ -85,7 +91,7 @@ describe('/files/ls', () => { expect(response).to.have.nested.property('result.Entries[0].Name', file.name) expect(response).to.have.nested.property('result.Entries[0].Type', 1) expect(response).to.have.nested.property('result.Entries[0].Size', file.size) - expect(response).to.have.nested.property('result.Entries[0].Hash', file.cid.toString()) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.cid.toString(base58btc)) expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) diff --git a/packages/ipfs-http-server/test/inject/mfs/stat.js b/packages/ipfs-http-server/test/inject/mfs/stat.js index adfe6c411a..9872f1eee1 100644 --- a/packages/ipfs-http-server/test/inject/mfs/stat.js +++ b/packages/ipfs-http-server/test/inject/mfs/stat.js @@ -4,10 +4,12 @@ const { expect } = require('aegir/utils/chai') const http = require('../../utils/http') const sinon = require('sinon') -const CID = require('cids') -const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') +const { CID } = require('multiformats/cid') +const fileCid = CID.parse('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') const testHttpMethod = require('../../utils/test-http-method') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') const defaultOptions = { withLocal: false, @@ -34,6 +36,9 @@ describe('/files/stat', () => { ipfs = { files: { stat: sinon.stub().resolves(stats) + }, + bases: { + getBase: sinon.stub() } } }) @@ -43,6 +48,7 @@ describe('/files/stat', () => { }) it('should stat a path', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: `/api/v0/files/stat?arg=${path}` @@ -54,6 +60,7 @@ describe('/files/stat', () => { }) it('should stat a path with local', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) await http({ method: 'POST', url: `/api/v0/files/stat?arg=${path}&withLocal=true` @@ -67,6 +74,7 @@ describe('/files/stat', () => { }) it('should stat a path and only show hashes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: `/api/v0/files/stat?arg=${path}&hash=true` @@ -77,10 +85,11 @@ describe('/files/stat', () => { ...defaultOptions, hash: true })).to.be.true() - expect(response).to.have.nested.property('result.Hash', stats.cid.toString()) + expect(response).to.have.nested.property('result.Hash', stats.cid.toString(base58btc)) }) it('should stat a path and only show sizes', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: `/api/v0/files/stat?arg=${path}&size=true` @@ -95,6 +104,7 @@ describe('/files/stat', () => { }) it('should stat a path and show hashes with a different base', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) const response = await http({ method: 'POST', url: `/api/v0/files/stat?arg=${path}&cidBase=base64` @@ -102,10 +112,11 @@ describe('/files/stat', () => { expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.calledWith(path, defaultOptions)).to.be.true() - expect(response).to.have.nested.property('result.Hash', stats.cid.toString('base64')) + expect(response).to.have.nested.property('result.Hash', stats.cid.toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const response = await http({ method: 'POST', url: `/api/v0/files/stat?arg=${path}&timeout=1s` diff --git a/packages/ipfs-http-server/test/inject/name.js b/packages/ipfs-http-server/test/inject/name.js index 7506942311..fc654c5b2e 100644 --- a/packages/ipfs-http-server/test/inject/name.js +++ b/packages/ipfs-http-server/test/inject/name.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const { expect } = require('aegir/utils/chai') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') @@ -10,7 +10,7 @@ const sinon = require('sinon') const { AbortSignal } = require('native-abort-controller') describe('/name', function () { - const cid = new CID('QmbndGRXYRyfU41TUvc52gMrwq87JJg18QsDPcCeaMcM61') + const cid = CID.parse('QmbndGRXYRyfU41TUvc52gMrwq87JJg18QsDPcCeaMcM61') let ipfs beforeEach(() => { diff --git a/packages/ipfs-http-server/test/inject/object.js b/packages/ipfs-http-server/test/inject/object.js index be5fb4b913..7a43f107ed 100644 --- a/packages/ipfs-http-server/test/inject/object.js +++ b/packages/ipfs-http-server/test/inject/object.js @@ -6,31 +6,37 @@ const { expect } = require('aegir/utils/chai') const fs = require('fs') const FormData = require('form-data') const streamToPromise = require('stream-to-promise') -const multibase = require('multibase') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { UnixFS } = require('ipfs-unixfs') const { AbortSignal } = require('native-abort-controller') -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') +const { base58btc } = require('multiformats/bases/base58') +const { base64, base64pad } = require('multiformats/bases/base64') describe('/object', () => { - const cid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - const cid2 = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1a') + const cid = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + const cid2 = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1a') const unixfs = new UnixFS({ type: 'file' }) - const fileNode = new DAGNode(unixfs.marshal(), [ - new DAGLink('', 5, cid) - ]) - const emptyDirectoryNode = new DAGNode(new UnixFS({ - type: 'directory' - }).marshal()) + const fileNode = { + Data: unixfs.marshal(), + Links: [{ + Name: '', + Tsize: 5, + Hash: cid + }] + } + const emptyDirectoryNode = { + Data: new UnixFS({ + type: 'directory' + }).marshal(), + Links: [] + } let ipfs beforeEach(() => { @@ -48,6 +54,9 @@ describe('/object', () => { addLink: sinon.stub(), rmLink: sinon.stub() } + }, + bases: { + getBase: sinon.stub() } } }) @@ -63,6 +72,7 @@ describe('/object', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.new.withArgs({ ...defaultOptions, template: undefined @@ -80,6 +90,7 @@ describe('/object', () => { }) it('should create an object with the passed template', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const template = 'unixfs-dir' ipfs.object.new.withArgs({ @@ -115,13 +126,13 @@ describe('/object', () => { expect(res).to.have.property('statusCode', 400) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should create a new object and return a base64 encoded CID', async () => { + it('should create a new object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.object.new.withArgs({ ...defaultOptions, template: undefined - }).returns(cid) - ipfs.object.get.withArgs(cid, defaultOptions).returns(emptyDirectoryNode) + }).returns(cid.toV1()) + ipfs.object.get.withArgs(cid.toV1(), defaultOptions).returns(emptyDirectoryNode) const res = await http({ method: 'POST', @@ -129,20 +140,11 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not create a new object for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/object/new?cid-base=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res.result.Hash).to.equal(cid.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.new.withArgs({ ...defaultOptions, template: undefined, @@ -196,6 +198,7 @@ describe('/object', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, defaultOptions).returns(emptyDirectoryNode) const res = await http({ @@ -208,30 +211,21 @@ describe('/object', () => { expect(res).to.have.nested.property('result.Data', uint8ArrayToString(emptyDirectoryNode.Data, 'base64pad')) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should get object and return a base64 encoded CID', async () => { - ipfs.object.get.withArgs(cid, defaultOptions).returns(emptyDirectoryNode) + it('should get object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + ipfs.object.get.withArgs(cid.toV1(), defaultOptions).returns(emptyDirectoryNode) const res = await http({ method: 'POST', - url: `/api/v0/object/get?cid-base=base64&arg=${cid}` + url: `/api/v0/object/get?cid-base=base64&arg=${cid.toV1()}` }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not get an object for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/get?cid-base=invalid&arg=${cid}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res.result.Hash).to.equal(cid.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.get.withArgs(cid, { ...defaultOptions, timeout: 1000 @@ -250,7 +244,7 @@ describe('/object', () => { describe('/put', () => { const defaultOptions = { - enc: undefined, + pin: false, signal: sinon.match.instanceOf(AbortSignal), timeout: undefined } @@ -296,24 +290,32 @@ describe('/object', () => { expect(res).to.have.property('statusCode', 400) }) - it('updates value', async () => { - const expectedResult = { - Data: Buffer.from('another'), - Hash: cid.toString(), + it('puts value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + + const pbNode = { + Data: uint8ArrayFromString('another'), Links: [{ Name: 'some link', - Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', - Size: 8 - }], - Size: 68 + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V'), + Tsize: 8 + } + ] } - ipfs.object.put.withArgs(sinon.match.instanceOf(Buffer), defaultOptions).returns(cid) - ipfs.object.get.withArgs(cid).resolves(new DAGNode(expectedResult.Data, expectedResult.Links, expectedResult.Size - 8)) + ipfs.object.put.withArgs(pbNode, defaultOptions).returns(cid) + ipfs.object.get.withArgs(cid).resolves(pbNode) const form = new FormData() - const filePath = 'test/fixtures/test-data/node.json' - form.append('data', fs.createReadStream(filePath)) + form.append('data', Buffer.from(JSON.stringify({ + Data: Buffer.from('another').toString('base64'), + Links: [{ + Name: 'some link', + Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + Size: 8 + } + ] + }))) const headers = form.getHeaders() const payload = await streamToPromise(form) @@ -325,68 +327,99 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.property('result', expectedResult) + expect(res).to.have.deep.property('result', { + Data: Buffer.from('another').toString('base64'), + Hash: cid.toString(), + Links: [{ + Name: 'some link', + Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + Size: 8 + }], + Size: 60 + }) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should put data and return a base64 encoded CID', async () => { - const form = new FormData() - form.append('file', JSON.stringify({ Data: 'TEST' + Date.now(), Links: [] }), { filename: 'node.json' }) - const headers = form.getHeaders() + it('should put data and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: '/api/v0/object/put?cid-base=base64', - headers, - payload - }, { ipfs }) + const pbNode = { + Data: uint8ArrayFromString('another'), + Links: [{ + Name: 'some link', + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V').toV1(), + Tsize: 8 + } + ] + } - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) + ipfs.object.put.withArgs(pbNode, defaultOptions).returns(cid.toV1()) + ipfs.object.get.withArgs(cid.toV1()).resolves(pbNode) - it('should not put data for invalid cid-base option', async () => { const form = new FormData() - form.append('file', JSON.stringify({ Data: 'TEST' + Date.now(), Links: [] }), { filename: 'node.json' }) + form.append('data', Buffer.from(JSON.stringify({ + Data: Buffer.from('another').toString('base64'), + Links: [{ + Name: 'some link', + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V').toV1().toString(), + Size: 8 + } + ] + }))) const headers = form.getHeaders() const payload = await streamToPromise(form) const res = await http({ method: 'POST', - url: '/api/v0/object/put?cid-base=invalid', + url: '/api/v0/object/put?cid-base=base64', headers, payload }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res).to.have.deep.property('result', { + Data: Buffer.from('another').toString('base64'), + Hash: cid.toV1().toString(base64), + Links: [{ + Name: 'some link', + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V').toV1().toString(base64), + Size: 8 + }], + Size: 62 + }) }) it('accepts a timeout', async () => { - const expectedResult = { - Data: Buffer.from('another'), - Hash: cid.toString(), + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) + + const pbNode = { + Data: uint8ArrayFromString('another'), Links: [{ Name: 'some link', - Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', - Size: 8 - }], - Size: 68 + Hash: CID.parse('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V'), + Tsize: 8 + } + ] } - ipfs.object.put.withArgs(sinon.match.instanceOf(Buffer), { + ipfs.object.put.withArgs(pbNode, { ...defaultOptions, timeout: 1000 }).returns(cid) ipfs.object.get.withArgs(cid, { signal: sinon.match.instanceOf(AbortSignal), timeout: 1000 - }).resolves(new DAGNode(expectedResult.Data, expectedResult.Links, expectedResult.Size - 8)) + }).resolves(pbNode) const form = new FormData() - const filePath = 'test/fixtures/test-data/node.json' - form.append('data', fs.createReadStream(filePath)) + form.append('data', Buffer.from(JSON.stringify({ + Data: Buffer.from('another').toString('base64'), + Links: [{ + Name: 'some link', + Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + Size: 8 + } + ] + }))) const headers = form.getHeaders() const payload = await streamToPromise(form) @@ -398,7 +431,16 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.property('result', expectedResult) + expect(res).to.have.deep.property('result', { + Data: Buffer.from('another').toString('base64'), + Hash: cid.toString(), + Links: [{ + Name: 'some link', + Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + Size: 8 + }], + Size: 60 + }) }) }) @@ -434,8 +476,9 @@ describe('/object', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.stat.withArgs(cid, defaultOptions).returns({ - Hash: cid.toString(), + Hash: cid, NumLinks: 'NumLinks', BlockSize: 'BlockSize', LinksSize: 'LinksSize', @@ -457,40 +500,33 @@ describe('/object', () => { expect(res).to.have.nested.property('result.CumulativeSize', 'CumulativeSize') }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should stat object and return a base64 encoded CID', async () => { - let res = await http({ - method: 'POST', - url: '/api/v0/object/new' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - - res = await http({ - method: 'POST', - url: '/api/v0/object/stat?cid-base=base64&arg=' + res.result.Hash - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) + it('should stat object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + ipfs.object.stat.withArgs(cid, defaultOptions).returns({ + Hash: cid.toV1(), + NumLinks: 'NumLinks', + BlockSize: 'BlockSize', + LinksSize: 'LinksSize', + DataSize: 'DataSize', + CumulativeSize: 'CumulativeSize' + }) - it('should not stat object for invalid cid-base option', async () => { const res = await http({ method: 'POST', - url: `/api/v0/object/stat?cid-base=invalid&arg=${cid}` + url: `/api/v0/object/stat?cid-base=base64&arg=${cid}` }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res).to.have.nested.property('result.Hash', cid.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.stat.withArgs(cid, { ...defaultOptions, timeout: 1000 }).returns({ - Hash: cid.toString(), + Hash: cid, NumLinks: 'NumLinks', BlockSize: 'BlockSize', LinksSize: 'LinksSize', @@ -604,50 +640,51 @@ describe('/object', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.links.withArgs(cid, defaultOptions).returns(fileNode.Links) - const expectedResult = { - Hash: cid.toString(), - Links: [{ - Name: '', - Hash: cid.toString(), - Size: 5 - }] - } - const res = await http({ method: 'POST', url: `/api/v0/object/links?arg=${cid}` }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.property('result', expectedResult) + expect(res).to.have.deep.property('result', { + Hash: cid.toString(), + Links: [{ + Name: '', + Hash: cid.toString(), + Size: 5 + }] + }) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should list object links and return a base64 encoded CID', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/links?cid-base=base64&arg=${cid}` - }, { ipfs }) + it('should list object links and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + ipfs.object.links.withArgs(cid.toV1(), defaultOptions) + .returns(fileNode.Links.map(l => ({ + ...l, + Hash: l.Hash.toV1() + }))) - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - expect(res).to.have.nested.property('result.Links').that.is.empty() - expect(multibase.isEncoded(res.result.Links[0].Hash)).to.deep.equal('base64') - }) - - it('should not list object links for invalid cid-base option', async () => { const res = await http({ method: 'POST', - url: `/api/v0/object/links?cid-base=invalid&arg=${cid}` + url: `/api/v0/object/links?arg=${cid.toV1()}&cid-base=base64` }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res).to.have.deep.property('result', { + Hash: cid.toV1().toString(base64), + Links: [{ + Name: '', + Hash: cid.toV1().toString(base64), + Size: 5 + }] + }) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.object.links.withArgs(cid, { ...defaultOptions, timeout: 1000 @@ -725,6 +762,7 @@ describe('/object', () => { }) it('updates value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const data = Buffer.from('TEST' + Date.now()) ipfs.object.patch.appendData.withArgs(cid, data, defaultOptions).returns(cid) @@ -733,12 +771,6 @@ describe('/object', () => { const form = new FormData() form.append('data', data) const headers = form.getHeaders() - const expectedResult = { - Data: emptyDirectoryNode.Data, - Hash: cid.toString(), - Links: [], - Size: 4 - } const payload = await streamToPromise(form) const res = await http({ @@ -749,45 +781,44 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) + expect(res.result).to.deep.equal({ + Data: base64pad.encode(emptyDirectoryNode.Data).substring(1), + Hash: cid.toString(), + Links: [], + Size: 4 + }) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should append data to object and return a base64 encoded CID', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() + it('should append data to object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + const data = Buffer.from('TEST' + Date.now()) - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/append-data?cid-base=base64&arg=${cid}`, - headers, - payload - }, { ipfs }) + ipfs.object.patch.appendData.withArgs(cid.toV1(), data, defaultOptions).returns(cid.toV1()) + ipfs.object.get.withArgs(cid.toV1()).returns(emptyDirectoryNode) - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) - - it('should not append data to object for invalid cid-base option', async () => { const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) + form.append('data', data) const headers = form.getHeaders() const payload = await streamToPromise(form) const res = await http({ method: 'POST', - url: `/api/v0/object/patch/append-data?cid-base=invalid&arg=${cid}`, + url: `/api/v0/object/patch/append-data?arg=${cid.toV1()}&cid-base=base64`, headers, payload }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res.result).to.deep.equal({ + Data: base64pad.encode(emptyDirectoryNode.Data).substring(1), + Hash: cid.toV1().toString(base64), + Links: [], + Size: 4 + }) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const data = Buffer.from('TEST' + Date.now()) ipfs.object.patch.appendData.withArgs(cid, data, { @@ -799,12 +830,6 @@ describe('/object', () => { const form = new FormData() form.append('data', data) const headers = form.getHeaders() - const expectedResult = { - Data: emptyDirectoryNode.Data, - Hash: cid.toString(), - Links: [], - Size: 4 - } const payload = await streamToPromise(form) const res = await http({ @@ -815,7 +840,12 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) + expect(res.result).to.deep.equal({ + Data: base64pad.encode(emptyDirectoryNode.Data).substring(1), + Hash: cid.toString(), + Links: [], + Size: 4 + }) }) }) @@ -872,6 +902,7 @@ describe('/object', () => { }) it('updates value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const data = Buffer.from('TEST' + Date.now()) ipfs.object.patch.setData.withArgs(cid, data, defaultOptions).returns(cid) @@ -880,10 +911,6 @@ describe('/object', () => { const form = new FormData() form.append('data', data) const headers = form.getHeaders() - const expectedResult = { - Hash: cid.toString(), - Links: [] - } const payload = await streamToPromise(form) const res = await http({ @@ -894,45 +921,40 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) + expect(res.result).to.deep.equal({ + Hash: cid.toString(), + Links: [] + }) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should set data for object and return a base64 encoded CID', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/set-data?cid-base=base64&arg=${cid}`, - headers, - payload - }, { ipfs }) + it('should set data for object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + const data = Buffer.from('TEST' + Date.now()) - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) + ipfs.object.patch.setData.withArgs(cid.toV1(), data, defaultOptions).returns(cid.toV1()) + ipfs.object.get.withArgs(cid.toV1()).returns(emptyDirectoryNode) - it('should not set data for object for invalid cid-base option', async () => { const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) + form.append('data', data) const headers = form.getHeaders() const payload = await streamToPromise(form) const res = await http({ method: 'POST', - url: `/api/v0/object/patch/set-data?cid-base=invalid&arg=${cid}`, + url: `/api/v0/object/patch/set-data?arg=${cid.toV1()}&cid-base=base64`, headers, payload }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res.result).to.deep.equal({ + Hash: cid.toV1().toString(base64), + Links: [] + }) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const data = Buffer.from('TEST' + Date.now()) ipfs.object.patch.setData.withArgs(cid, data, { @@ -944,10 +966,6 @@ describe('/object', () => { const form = new FormData() form.append('data', data) const headers = form.getHeaders() - const expectedResult = { - Hash: cid.toString(), - Links: [] - } const payload = await streamToPromise(form) const res = await http({ @@ -958,7 +976,10 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res.result).to.deep.equal(expectedResult) + expect(res.result).to.deep.equal({ + Hash: cid.toString(), + Links: [] + }) }) }) @@ -1015,6 +1036,7 @@ describe('/object', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const name = 'name' ipfs.object.patch.addLink.withArgs(cid, sinon.match({ @@ -1038,28 +1060,45 @@ describe('/object', () => { }) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should add a link to an object and return a base64 encoded CID', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/add-link?cid-base=base64&arg=${cid}&arg=test&arg=${cid2}` - }, { ipfs }) + it('should add a link to an object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) + const name = 'name' - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) + ipfs.object.patch.addLink.withArgs(cid.toV1(), sinon.match({ + Name: name, + Hash: cid2.toV1() + }), defaultOptions).returns(cid.toV1()) + ipfs.object.get.withArgs(cid.toV1()).returns({ + ...fileNode, + Links: fileNode.Links.map(l => ({ + ...l, + Hash: l.Hash.toV1() + })) + }) + ipfs.object.get.withArgs(cid2.toV1()).returns({ + ...fileNode, + Links: fileNode.Links.map(l => ({ + ...l, + Hash: l.Hash.toV1() + })) + }) - it('should not add a link to an object for invalid cid-base option', async () => { const res = await http({ method: 'POST', - url: `/api/v0/object/patch/add-link?cid-base=invalid&arg=${cid}&arg=test&arg=${cid2}` + url: `/api/v0/object/patch/add-link?arg=${cid.toV1()}&arg=${name}&arg=${cid2.toV1()}&cid-base=base64` }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res).to.have.nested.property('result.Hash', cid.toV1().toString(base64)) + expect(res).to.have.deep.nested.property('result.Links[0]', { + Name: '', + Hash: cid.toV1().toString(base64), + Size: 5 + }) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const name = 'name' ipfs.object.patch.addLink.withArgs(cid, sinon.match({ @@ -1140,6 +1179,7 @@ describe('/object', () => { }) it('returns value', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const name = 'name' ipfs.object.patch.rmLink.withArgs(cid, name, { @@ -1156,30 +1196,26 @@ describe('/object', () => { expect(res).to.have.nested.property('result.Hash', cid2.toString()) }) - // TODO: unskip after switch to v1 CIDs by default - it.skip('should remove a link from an object and return a base64 encoded CID', async () => { + it('should remove a link from an object and return a base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) const name = 'name' - const res = await http({ - method: 'POST', - url: `/api/v0/object/patch/rm-link?cid-base=base64&arg=${cid}&arg=${name}` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(multibase.isEncoded(res.result.Hash)).to.deep.equal('base64') - }) + ipfs.object.patch.rmLink.withArgs(cid.toV1(), name, { + ...defaultOptions + }).returns(cid2.toV1()) + ipfs.object.get.withArgs(cid2.toV1()).returns(emptyDirectoryNode) - it('should not remove a link from an object for invalid cid-base option', async () => { const res = await http({ method: 'POST', - url: `/api/v0/object/patch/rm-link?cid-base=invalid&arg=${cid}&arg=derp` + url: `/api/v0/object/patch/rm-link?arg=${cid.toV1()}&arg=${name}&cid-base=base64` }, { ipfs }) - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') + expect(res).to.have.property('statusCode', 200) + expect(res).to.have.nested.property('result.Hash', cid2.toV1().toString(base64)) }) it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const name = 'name' ipfs.object.patch.rmLink.withArgs(cid, name, { diff --git a/packages/ipfs-http-server/test/inject/pin.js b/packages/ipfs-http-server/test/inject/pin.js index 6cf7188006..70768aab4d 100644 --- a/packages/ipfs-http-server/test/inject/pin.js +++ b/packages/ipfs-http-server/test/inject/pin.js @@ -3,17 +3,18 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const multibase = require('multibase') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const allNdjson = require('../utils/all-ndjson') const { AbortSignal } = require('native-abort-controller') +const { base58btc } = require('multiformats/bases/base58') +const { base64 } = require('multiformats/bases/base64') describe('/pin', () => { - const cid = new CID('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') - const cid2 = new CID('QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V') + const cid = CID.parse('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') + const cid2 = CID.parse('QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V') let ipfs beforeEach(() => { @@ -23,6 +24,9 @@ describe('/pin', () => { addAll: sinon.stub(), rmAll: sinon.stub(), query: sinon.stub() + }, + bases: { + getBase: sinon.stub() } } }) @@ -47,6 +51,7 @@ describe('/pin', () => { }) it('unpins recursive pins', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ cid, recursive: true }], defaultOptions).returns([ cid ]) @@ -61,6 +66,7 @@ describe('/pin', () => { }) it('unpins direct pins', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ cid, recursive: false @@ -80,32 +86,24 @@ describe('/pin', () => { }) it('should remove pin and return base64 encoded CID', async () => { - ipfs.pin.rmAll.withArgs([{ cid, recursive: true }], defaultOptions).returns([ - cid + ipfs.bases.getBase.withArgs('base64').returns(base64) + ipfs.pin.rmAll.withArgs([{ cid: cid.toV1(), recursive: true }], defaultOptions).returns([ + cid.toV1() ]) const res = await http({ method: 'POST', - url: `/api/v0/pin/rm?arg=${cid}&cid-base=base64` + url: `/api/v0/pin/rm?arg=${cid.toV1()}&cid-base=base64` }, { ipfs }) expect(res).to.have.property('statusCode', 200) - res.result.Pins.forEach(cid => { - expect(multibase.isEncoded(cid)).to.deep.equal('base64') + res.result.Pins.forEach(c => { + expect(c).to.equal(cid.toV1().toString(base64)) }) }) - it('should not remove pin for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/pin/rm?arg=${cid}&cid-base=invalid` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.rmAll.withArgs([{ cid, recursive: true @@ -146,6 +144,7 @@ describe('/pin', () => { }) it('recursively', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ cid, recursive: true, @@ -164,6 +163,7 @@ describe('/pin', () => { }) it('directly', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ cid, recursive: false, @@ -182,36 +182,28 @@ describe('/pin', () => { }) it('should add pin and return base64 encoded CID', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.pin.addAll.withArgs([{ - cid, + cid: cid.toV1(), recursive: true, metadata: undefined }], defaultOptions).returns([ - cid + cid.toV1() ]) const res = await http({ method: 'POST', - url: `/api/v0/pin/add?arg=${cid}&cid-base=base64` + url: `/api/v0/pin/add?arg=${cid.toV1()}&cid-base=base64` }, { ipfs }) expect(res).to.have.property('statusCode', 200) - res.result.Pins.forEach(cid => { - expect(multibase.isEncoded(cid)).to.deep.equal('base64') + res.result.Pins.forEach(c => { + expect(c).to.equal(cid.toV1().toString(base64)) }) }) - it('should not add pin for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/pin/add?arg=${cid}&cid-base=invalid` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.addAll.withArgs([{ cid, recursive: true, @@ -255,6 +247,7 @@ describe('/pin', () => { }) it('finds all pinned objects', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs(defaultOptions).returns([{ cid, type: 'recursive' @@ -270,6 +263,7 @@ describe('/pin', () => { }) it('finds all pinned objects streaming', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs(defaultOptions).returns([{ cid: cid, type: 'recursive' @@ -291,6 +285,7 @@ describe('/pin', () => { }) it('finds specific pinned objects', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs({ ...defaultOptions, paths: [`${cid}`] @@ -313,6 +308,7 @@ describe('/pin', () => { }) it('finds pins of type', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs({ ...defaultOptions, type: 'direct' @@ -335,8 +331,9 @@ describe('/pin', () => { }) it('should list pins and return base64 encoded CIDs', async () => { + ipfs.bases.getBase.withArgs('base64').returns(base64) ipfs.pin.ls.withArgs(defaultOptions).returns([{ - cid, + cid: cid.toV1(), type: 'direct' }]) @@ -346,24 +343,13 @@ describe('/pin', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.nested.property('result.Keys').that.satisfies((keys) => { - return Object.keys(keys).reduce((acc, curr) => { - return acc && multibase.isEncoded(curr) === 'base64' - }, true) + expect(res).to.have.nested.deep.property(`result.Keys.${cid.toV1().toString(base64)}`, { + Type: 'direct' }) }) - it('should not list pins for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/pin/ls?cid-base=invalid' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - it('accepts a timeout', async () => { + ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) ipfs.pin.ls.withArgs({ ...defaultOptions, timeout: 1000 diff --git a/packages/ipfs-http-server/test/inject/ping.js b/packages/ipfs-http-server/test/inject/ping.js index e5171747c3..2bda3c2150 100644 --- a/packages/ipfs-http-server/test/inject/ping.js +++ b/packages/ipfs-http-server/test/inject/ping.js @@ -29,15 +29,6 @@ describe('/ping', function () { return testHttpMethod('/api/v0/ping') }) - it('returns 400 if both n and count are provided', async () => { - const res = await http({ - method: 'POST', - url: '/api/v0/ping?arg=peerid&n=1&count=1' - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) - it('returns 400 if arg is not provided', async () => { const res = await http({ method: 'POST', diff --git a/packages/ipfs-http-server/test/inject/repo.js b/packages/ipfs-http-server/test/inject/repo.js index 9f0e018986..a42e1d7f4a 100644 --- a/packages/ipfs-http-server/test/inject/repo.js +++ b/packages/ipfs-http-server/test/inject/repo.js @@ -5,13 +5,13 @@ const { expect } = require('aegir/utils/chai') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { AbortSignal } = require('native-abort-controller') const allNdjson = require('../utils/all-ndjson') describe('/repo', () => { - const cid = new CID('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') - const cid2 = new CID('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgda') + const cid = CID.parse('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') + const cid2 = CID.parse('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgda') let ipfs beforeEach(() => { diff --git a/packages/ipfs-http-server/test/inject/resolve.js b/packages/ipfs-http-server/test/inject/resolve.js index 8a2619bf47..b153658c57 100644 --- a/packages/ipfs-http-server/test/inject/resolve.js +++ b/packages/ipfs-http-server/test/inject/resolve.js @@ -5,18 +5,18 @@ const { expect } = require('aegir/utils/chai') const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { AbortSignal } = require('native-abort-controller') const defaultOptions = { recursive: true, - cidBase: undefined, + cidBase: 'base58btc', signal: sinon.match.instanceOf(AbortSignal), timeout: undefined } describe('/resolve', () => { - const cid = new CID('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') + const cid = CID.parse('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') let ipfs beforeEach(() => { @@ -29,16 +29,6 @@ describe('/resolve', () => { return testHttpMethod('/api/v0/resolve') }) - it('should not resolve a path for invalid cid-base option', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/resolve?arg=${cid}&cid-base=invalid` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - expect(res).to.have.nested.property('result.Message').that.includes('Invalid request query input') - }) - it('resolves a name', async () => { const result = 'result' ipfs.resolve.withArgs(cid.toString(), defaultOptions).returns(result) @@ -97,15 +87,6 @@ describe('/resolve', () => { expect(res).to.have.nested.property('result.Path', result) }) - it('does not accept an incalid cid-base', async () => { - const res = await http({ - method: 'POST', - url: `/api/v0/resolve?arg=${cid}&cid-base=invalid` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 400) - }) - it('accepts a timeout', async () => { const result = 'result' ipfs.resolve.withArgs(cid.toString(), { diff --git a/packages/ipfs-http-server/test/inject/stats.js b/packages/ipfs-http-server/test/inject/stats.js index f744f8bc54..4670107426 100644 --- a/packages/ipfs-http-server/test/inject/stats.js +++ b/packages/ipfs-http-server/test/inject/stats.js @@ -7,7 +7,6 @@ const http = require('../utils/http') const sinon = require('sinon') const allNdjson = require('../utils/all-ndjson') const { AbortSignal } = require('native-abort-controller') -const CID = require('cids') describe('/stats', () => { let ipfs @@ -71,7 +70,7 @@ describe('/stats', () => { ipfs.stats.bw.withArgs({ ...defaultOptions, - peer: new CID(peer) + peer: peer }).returns([{ totalIn: 'totalIn1', totalOut: 'totalOut1', diff --git a/packages/ipfs-http-server/tsconfig.json b/packages/ipfs-http-server/tsconfig.json index 089355b43e..7838cdf9f1 100644 --- a/packages/ipfs-http-server/tsconfig.json +++ b/packages/ipfs-http-server/tsconfig.json @@ -9,10 +9,10 @@ ], "references": [ { - "path": "../ipfs-core-utils" + "path": "../ipfs-core-types" }, { - "path": "../ipfs-core" + "path": "../ipfs-core-utils" }, { "path": "../ipfs-http-gateway" diff --git a/packages/ipfs-message-port-client/package.json b/packages/ipfs-message-port-client/package.json index 183b3bdcdd..325518d686 100644 --- a/packages/ipfs-message-port-client/package.json +++ b/packages/ipfs-message-port-client/package.json @@ -34,13 +34,13 @@ }, "dependencies": { "browser-readablestream-to-it": "^1.0.1", - "cids": "^1.1.6", "ipfs-core-types": "^0.5.2", "ipfs-message-port-protocol": "^0.7.3", - "ipfs-unixfs": "^4.0.3" + "ipfs-unixfs": "^5.0.0", + "multiformats": "^9.4.1" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "interface-ipfs-core": "^0.147.0", "ipfs-core": "^0.8.0", "ipfs-message-port-server": "^0.7.3", diff --git a/packages/ipfs-message-port-client/src/block.js b/packages/ipfs-message-port-client/src/block.js index 4f982860da..90364f4483 100644 --- a/packages/ipfs-message-port-client/src/block.js +++ b/packages/ipfs-message-port-client/src/block.js @@ -4,10 +4,8 @@ const Client = require('./client') const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') const { decodeError } = require('ipfs-message-port-protocol/src/error') const { - encodeBlock, - decodeBlock + encodeBlock } = require('ipfs-message-port-protocol/src/block') -const CID = require('cids') /** * @typedef {import('./client').MessageTransport} MessageTransport @@ -36,9 +34,9 @@ BlockClient.prototype.get = async function get (cid, options = {}) { const { transfer } = options const { block } = await this.remote.get({ ...options, - cid: encodeCID(new CID(cid), transfer) + cid: encodeCID(cid, transfer) }) - return decodeBlock(block) + return block } /** @@ -52,10 +50,9 @@ BlockClient.prototype.put = async function put (block, options = {}) { const result = await this.remote.put({ ...options, // @ts-ignore PutOptions requires CID, we send EncodedCID - cid: options.cid == null ? undefined : encodeCID(new CID(options.cid), transfer), block: block instanceof Uint8Array ? block : encodeBlock(block, transfer) }) - return decodeBlock(result.block) + return decodeCID(result.cid) } /** @@ -66,8 +63,8 @@ BlockClient.prototype.rm = async function * rm (cids, options = {}) { const entries = await this.remote.rm({ ...options, cids: Array.isArray(cids) - ? cids.map(cid => encodeCID(new CID(cid), transfer)) - : [encodeCID(new CID(cids), transfer)] + ? cids.map(cid => encodeCID(cid, transfer)) + : [encodeCID(cids, transfer)] }) yield * entries.map(decodeRmEntry) @@ -80,7 +77,7 @@ BlockClient.prototype.stat = async function stat (cid, options = {}) { const { transfer } = options const result = await this.remote.stat({ ...options, - cid: encodeCID(new CID(cid), transfer) + cid: encodeCID(cid, transfer) }) return { ...result, cid: decodeCID(result.cid) } diff --git a/packages/ipfs-message-port-client/src/client/query.js b/packages/ipfs-message-port-client/src/client/query.js index 3246eeac92..c25cb8128e 100644 --- a/packages/ipfs-message-port-client/src/client/query.js +++ b/packages/ipfs-message-port-client/src/client/query.js @@ -22,7 +22,7 @@ module.exports = class Query { this.namespace = namespace this.method = method this.timeout = input.timeout == null ? Infinity : input.timeout - /** @type {number|null} */ + /** @type {ReturnType | null} */ this.timerID = null }) } diff --git a/packages/ipfs-message-port-client/src/core.js b/packages/ipfs-message-port-client/src/core.js index f308d9996a..061146c3b0 100644 --- a/packages/ipfs-message-port-client/src/core.js +++ b/packages/ipfs-message-port-client/src/core.js @@ -3,7 +3,7 @@ /* eslint-env browser */ const Client = require('./client') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') const { decodeIterable, @@ -122,7 +122,7 @@ CoreClient.prototype.add = async function add (input, options = {}) { * @type {RootAPI["cat"]} */ CoreClient.prototype.cat = async function * cat (inputPath, options = {}) { - const input = CID.isCID(inputPath) ? encodeCID(inputPath) : inputPath + const input = inputPath instanceof CID ? encodeCID(inputPath) : inputPath const result = await this.remote.cat({ ...options, path: input }) yield * decodeIterable(result.data, identity) } @@ -133,7 +133,7 @@ CoreClient.prototype.cat = async function * cat (inputPath, options = {}) { * @type {RootAPI["ls"]} */ CoreClient.prototype.ls = async function * ls (inputPath, options = {}) { - const input = CID.isCID(inputPath) ? encodeCID(inputPath) : inputPath + const input = inputPath instanceof CID ? encodeCID(inputPath) : inputPath const result = await this.remote.ls({ ...options, path: input }) yield * decodeIterable(result.data, decodeLsEntry) diff --git a/packages/ipfs-message-port-client/src/dag.js b/packages/ipfs-message-port-client/src/dag.js index 542db13f62..c025010c6f 100644 --- a/packages/ipfs-message-port-client/src/dag.js +++ b/packages/ipfs-message-port-client/src/dag.js @@ -5,7 +5,7 @@ const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') const { encodeNode, decodeNode } = require('ipfs-message-port-protocol/src/dag') /** - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID * @typedef {import('ipfs-message-port-server').DAGService} DagService * @typedef {import('./client').MessageTransport} MessageTransport @@ -22,7 +22,7 @@ class DAGClient extends Client { * @param {MessageTransport} transport */ constructor (transport) { - super('dag', ['put', 'get', 'resolve', 'tree'], transport) + super('dag', ['put', 'get', 'resolve'], transport) } } @@ -30,11 +30,8 @@ class DAGClient extends Client { * @type {DAGAPI["put"]} */ DAGClient.prototype.put = async function put (dagNode, options = {}) { - const { cid } = options - const encodedCID = await this.remote.put({ ...options, - encodedCid: cid != null ? encodeCID(cid) : undefined, dagNode: encodeNode(dagNode, options.transfer) }) @@ -65,18 +62,6 @@ DAGClient.prototype.resolve = async function resolve (cid, options = {}) { return { cid: decodeCID(encodedCID), remainderPath } } -/** - * @type {DAGAPI["tree"]} - */ -DAGClient.prototype.tree = async function * tree (cid, options = {}) { - const paths = await this.remote.tree({ - ...options, - cid: encodeCID(cid, options.transfer) - }) - - yield * paths -} - /** * @param {string|CID} input * @param {Transferable[]} [transfer] diff --git a/packages/ipfs-message-port-client/src/files.js b/packages/ipfs-message-port-client/src/files.js index f12a305758..c7b3ac8abb 100644 --- a/packages/ipfs-message-port-client/src/files.js +++ b/packages/ipfs-message-port-client/src/files.js @@ -3,7 +3,7 @@ /* eslint-env browser */ const Client = require('./client') const { decodeCID } = require('ipfs-message-port-protocol/src/cid') -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @typedef {import('ipfs-message-port-server').FilesService} FilesService @@ -50,7 +50,7 @@ module.exports = FilesClient * @param {string|CID} pathOrCID */ const encodeLocation = pathOrCID => - CID.isCID(pathOrCID) ? `/ipfs/${pathOrCID.toString()}` : pathOrCID + pathOrCID instanceof CID ? `/ipfs/${pathOrCID.toString()}` : pathOrCID /** * @param {EncodedStat} data diff --git a/packages/ipfs-message-port-protocol/README.md b/packages/ipfs-message-port-protocol/README.md index 7ef771235a..9b2646c674 100644 --- a/packages/ipfs-message-port-protocol/README.md +++ b/packages/ipfs-message-port-protocol/README.md @@ -20,7 +20,6 @@ - [Usage](#usage) - [Wire protocol codecs](#wire-protocol-codecs) - [`CID`](#cid) - - [Block](#block) - [DAGNode](#dagnode) - [AsyncIterable](#asynciterable) - [Callback](#callback) @@ -48,7 +47,7 @@ Codecs for [CID][] implementation in JavaScript. ```js const { CID, encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') -const cid = new CID('bafybeig6xv5nwphfmvcnektpnojts33jqcuam7bmye2pb54adnrtccjlsu') +const cid = CID.parse('bafybeig6xv5nwphfmvcnektpnojts33jqcuam7bmye2pb54adnrtccjlsu') const { port1, port2 } = new MessageChannel() @@ -66,34 +65,6 @@ port2.onmessage = ({data}) => { } ``` -### Block - -Codecs for [IPLD Block][] implementation in JavaScript. - -```js -const { Block, encodeBlock, decodeBlock } = require('ipfs-message-port-protocol/src/block') - -const data = new TextEncoder().encode('hello') -const cid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') -const block = new Block(data, cid) - -const { port1, port2 } = new MessageChannel() - -// Will copy underlying memory -port1.postMessage(encodeBlock(block)) - -// Will transfer underlying memory (block & cid will be corrupt on this thread) -const transfer = [] -port1.postMessage(encodeBlock(block, transfer), transfer) - - -// On the receiver thread -port2.onmessage = ({data}) => { - const block = decodeBlock(data) - block instanceof Block // true -} -``` - ### DAGNode Codec for DAGNodes accepted by `ipfs.dag.put` API. @@ -203,7 +174,6 @@ port2.onmessage = ({data}) => { [MessagePort]:https://developer.mozilla.org/en-US/docs/Web/API/MessagePort [Transferable]:https://developer.mozilla.org/en-US/docs/Web/API/Transferable -[IPLD Block]:https://github.com/ipld/js-ipld-block [CID]:https://github.com/multiformats/js-cid [async iterables]:https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of diff --git a/packages/ipfs-message-port-protocol/package.json b/packages/ipfs-message-port-protocol/package.json index 6f364551cc..86cc3fc9c9 100644 --- a/packages/ipfs-message-port-protocol/package.json +++ b/packages/ipfs-message-port-protocol/package.json @@ -46,14 +46,13 @@ "dep-check": "aegir dep-check -i rimraf -i ipfs-core-types" }, "dependencies": { - "cids": "^1.1.6", "ipfs-core-types": "^0.5.2", - "ipld-block": "^0.11.0" + "multiformats": "^9.4.1" }, "devDependencies": { - "aegir": "^33.0.0", + "aegir": "^34.0.2", "rimraf": "^3.0.2", - "uint8arrays": "^2.1.3" + "uint8arrays": "^2.1.6" }, "engines": { "node": ">=14.0.0", diff --git a/packages/ipfs-message-port-protocol/src/block.js b/packages/ipfs-message-port-protocol/src/block.js index 01608f1017..2654c8e8b1 100644 --- a/packages/ipfs-message-port-protocol/src/block.js +++ b/packages/ipfs-message-port-protocol/src/block.js @@ -1,8 +1,5 @@ 'use strict' -const { encodeCID, decodeCID } = require('./cid') -const Block = require('ipld-block') - /** * @typedef {import('./error').EncodedError} EncodedError * @typedef {import('./cid').EncodedCID} EncodedCID @@ -13,38 +10,19 @@ const Block = require('ipld-block') */ /** - * @typedef {Object} EncodedBlock - * @property {Uint8Array} data - * @property {EncodedCID} cid - */ - -/** - * Encodes Block for over the message channel transfer. + * Encodes Uint8Array for transfer over the message channel. * * If `transfer` array is provided all the encountered `ArrayBuffer`s within * this block will be added to the transfer so they are moved across without * copy. * - * @param {Block} block + * @param {Uint8Array} data * @param {Transferable[]} [transfer] - * @returns {EncodedBlock} */ -const encodeBlock = ({ cid, data }, transfer) => { +const encodeBlock = (data, transfer) => { if (transfer) { transfer.push(data.buffer) } - return { cid: encodeCID(cid, transfer), data } + return data } exports.encodeBlock = encodeBlock - -/** - * @param {EncodedBlock} encodedBlock - * @returns {Block} - */ -const decodeBlock = ({ cid, data }) => { - return new Block(data, decodeCID(cid)) -} - -exports.decodeBlock = decodeBlock - -exports.Block = Block diff --git a/packages/ipfs-message-port-protocol/src/cid.js b/packages/ipfs-message-port-protocol/src/cid.js index 8b2fd2cf79..cf04851eb7 100644 --- a/packages/ipfs-message-port-protocol/src/cid.js +++ b/packages/ipfs-message-port-protocol/src/cid.js @@ -1,11 +1,12 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') /** * @typedef {Object} EncodedCID - * @property {string} codec - * @property {Uint8Array} multihash + * @property {number} code + * @property {object} multihash + * @property {Uint8Array} multihash.digest * @property {number} version */ @@ -20,7 +21,7 @@ const CID = require('cids') */ const encodeCID = (cid, transfer) => { if (transfer) { - transfer.push(cid.multihash.buffer) + transfer.push(cid.multihash.bytes.buffer) } return cid } @@ -36,7 +37,24 @@ exports.encodeCID = encodeCID const decodeCID = encodedCID => { /** @type {CID} */ const cid = (encodedCID) - Object.setPrototypeOf(cid.multihash, Uint8Array.prototype) + + // @ts-ignore non-enumerable field that doesn't always get transferred + if (!cid._baseCache) { + Object.defineProperty(cid, '_baseCache', { + value: new Map() + }) + } + + // @ts-ignore non-enumerable field that doesn't always get transferred + if (!cid.asCID) { + Object.defineProperty(cid, 'asCID', { + get: () => cid + }) + } + + Object.setPrototypeOf(cid.multihash.digest, Uint8Array.prototype) + Object.setPrototypeOf(cid.multihash.bytes, Uint8Array.prototype) + Object.setPrototypeOf(cid.bytes, Uint8Array.prototype) Object.setPrototypeOf(cid, CID.prototype) // TODO: Figure out a way to avoid `Symbol.for` here as it can get out of // sync with cids implementation. diff --git a/packages/ipfs-message-port-protocol/src/dag.js b/packages/ipfs-message-port-protocol/src/dag.js index 01bfdb2ad3..f58cb80cde 100644 --- a/packages/ipfs-message-port-protocol/src/dag.js +++ b/packages/ipfs-message-port-protocol/src/dag.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const { encodeCID, decodeCID } = require('./cid') /** @@ -68,7 +68,7 @@ exports.encodeNode = encodeNode */ const collectNode = (value, cids, transfer) => { if (value != null && typeof value === 'object') { - if (CID.isCID(value)) { + if (value instanceof CID) { cids.push(value) encodeCID(value, transfer) } else if (value instanceof ArrayBuffer) { diff --git a/packages/ipfs-message-port-protocol/test/block.browser.js b/packages/ipfs-message-port-protocol/test/block.browser.js index ed7acc58a2..fed473bf22 100644 --- a/packages/ipfs-message-port-protocol/test/block.browser.js +++ b/packages/ipfs-message-port-protocol/test/block.browser.js @@ -2,12 +2,10 @@ /* eslint-env mocha */ -const CID = require('cids') -const { encodeBlock, decodeBlock } = require('../src/block') +const { encodeBlock } = require('../src/block') const { ipc } = require('./util') const { expect } = require('aegir/utils/chai') const uint8ArrayFromString = require('uint8arrays/from-string') -const Block = require('ipld-block') describe('block (browser)', function () { this.timeout(10 * 1000) @@ -15,37 +13,21 @@ describe('block (browser)', function () { describe('encodeBlock / decodeBlock', () => { it('should decode Block over message channel', async () => { - const blockIn = new Block( - uint8ArrayFromString('hello'), - new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - ) + const blockIn = uint8ArrayFromString('hello') - const blockOut = decodeBlock(await move(encodeBlock(blockIn))) + const blockOut = await move(encodeBlock(blockIn)) expect(blockOut).to.be.deep.equal(blockIn) }) it('should decode Block over message channel & transfer bytes', async () => { - const cid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - const data = uint8ArrayFromString('hello') - const blockIn = new Block(data, cid) + const blockIn = uint8ArrayFromString('hello') const transfer = [] - const blockOut = decodeBlock( - await move(encodeBlock(blockIn, transfer), transfer) - ) + const blockOut = await move(encodeBlock(blockIn, transfer), transfer) - expect(blockOut).to.be.instanceOf(Block) - expect(blockOut).to.be.deep.equal( - new Block( - uint8ArrayFromString('hello'), - new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - ) - ) - - expect(data).to.have.property('byteLength', 0, 'data was cleared') - expect(cid.multihash).to.have.property('byteLength', 0, 'cid was cleared') + expect(blockOut).to.equalBytes(uint8ArrayFromString('hello')) }) }) }) diff --git a/packages/ipfs-message-port-protocol/test/cid.browser.js b/packages/ipfs-message-port-protocol/test/cid.browser.js index 3b4761127f..e2fabcaff9 100644 --- a/packages/ipfs-message-port-protocol/test/cid.browser.js +++ b/packages/ipfs-message-port-protocol/test/cid.browser.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ -const CID = require('cids') +const { CID } = require('multiformats/cid') const { encodeCID, decodeCID } = require('../src/cid') const { ipc } = require('./util') const { expect } = require('aegir/utils/chai') @@ -13,30 +13,28 @@ describe('cid (browser)', function () { describe('encodeCID / decodeCID', () => { it('should decode to CID over message channel', async () => { - const cidIn = new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + const cidIn = CID.parse('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') const cidDataIn = encodeCID(cidIn) const cidDataOut = await move(cidDataIn) const cidOut = decodeCID(cidDataOut) expect(cidOut).to.be.an.instanceof(CID) - expect(CID.isCID(cidOut)).to.be.true() expect(cidOut.equals(cidIn)).to.be.true() - expect(cidIn.multihash) + expect(cidIn.bytes) .property('byteLength') .not.be.equal(0) }) it('should decode CID and transfer bytes', async () => { - const cidIn = new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + const cidIn = CID.parse('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') const transfer = [] const cidDataIn = encodeCID(cidIn, transfer) const cidDataOut = await move(cidDataIn, transfer) const cidOut = decodeCID(cidDataOut) expect(cidOut).to.be.an.instanceof(CID) - expect(CID.isCID(cidOut)).to.be.true() - expect(cidIn.multihash).property('byteLength', 0) - expect(cidOut.multihash) + expect(cidIn.bytes).property('byteLength', 0) + expect(cidOut.bytes) .property('byteLength') .to.not.be.equal(0) expect(cidOut.toString()).to.be.equal( diff --git a/packages/ipfs-message-port-protocol/test/cid.spec.js b/packages/ipfs-message-port-protocol/test/cid.spec.js index 14bee372f8..6fbfce7762 100644 --- a/packages/ipfs-message-port-protocol/test/cid.spec.js +++ b/packages/ipfs-message-port-protocol/test/cid.spec.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ -const CID = require('cids') +const { CID } = require('multiformats/cid') const { encodeCID, decodeCID } = require('../src/cid') const { expect } = require('aegir/utils/chai') @@ -11,22 +11,22 @@ describe('cid', function () { describe('encodeCID / decodeCID', () => { it('should encode CID', () => { - const { multihash, codec, version } = encodeCID( - new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + const { multihash: { digest }, code, version } = encodeCID( + CID.parse('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') ) - expect(multihash).to.be.an.instanceof(Uint8Array) + expect(digest).to.be.an.instanceof(Uint8Array) expect(version).to.be.a('number') - expect(codec).to.be.a('string') + expect(code).to.be.a('number') }) it('should decode CID', () => { - const { multihash, codec, version } = encodeCID( - new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + const encoded = encodeCID( + CID.parse('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') ) - const cid = new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') - const decodecCID = decodeCID({ multihash, codec, version }) + const cid = CID.parse('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + const decodedCID = decodeCID(encoded) - expect(cid.equals(decodecCID)).to.be.true() + expect(cid.equals(decodedCID)).to.be.true() }) }) }) diff --git a/packages/ipfs-message-port-protocol/test/dag.browser.js b/packages/ipfs-message-port-protocol/test/dag.browser.js index d70805284b..39af286b66 100644 --- a/packages/ipfs-message-port-protocol/test/dag.browser.js +++ b/packages/ipfs-message-port-protocol/test/dag.browser.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ -const CID = require('cids') +const { CID } = require('multiformats/cid') const { encodeNode, decodeNode } = require('../src/dag') const { ipc } = require('./util') const { expect } = require('aegir/utils/chai') @@ -14,10 +14,10 @@ describe('dag (browser)', function () { describe('encodeNode / decodeNode', () => { it('should decode dagNode over message channel', async () => { - const cid1 = new CID( + const cid1 = CID.parse( 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce' ) - const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') + const cid2 = CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') const hi = uint8ArrayFromString('hello world') const nodeIn = { @@ -50,13 +50,13 @@ describe('dag (browser)', function () { structure: { with: { links: [ - new CID(cid1) + CID.parse(cid1) ] } } }, other: { - link: new CID(cid2) + link: CID.parse(cid2) } } const transfer = [] @@ -71,19 +71,19 @@ describe('dag (browser)', function () { structure: { with: { links: [ - new CID(cid1) + CID.parse(cid1) ] } } }, other: { - link: new CID(cid2) + link: CID.parse(cid2) } }) expect(transfer).to.containSubset( [{ byteLength: 0 }, { byteLength: 0 }, { byteLength: 0 }], - 'tarnsferred buffers were cleared' + 'transferred buffers were cleared' ) }) }) diff --git a/packages/ipfs-message-port-protocol/test/dag.spec.js b/packages/ipfs-message-port-protocol/test/dag.spec.js index 4a2ae57b7d..0cbc368d0a 100644 --- a/packages/ipfs-message-port-protocol/test/dag.spec.js +++ b/packages/ipfs-message-port-protocol/test/dag.spec.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ -const CID = require('cids') +const { CID } = require('multiformats/cid') const { encodeNode } = require('../src/dag') const { expect } = require('aegir/utils/chai') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -12,10 +12,10 @@ describe('dag', function () { describe('encodeNode / decodeNode', () => { it('shoud encode node', () => { - const cid1 = new CID( + const cid1 = CID.parse( 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce' ) - const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') + const cid2 = CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') const dagNode = { hi: 'hello', link: cid1, @@ -36,10 +36,10 @@ describe('dag', function () { }) it('shoud encode and add buffers to transfer list', () => { - const cid1 = new CID( + const cid1 = CID.parse( 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce' ) - const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') + const cid2 = CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') const hi = uint8ArrayFromString('hello world') const dagNode = { @@ -67,16 +67,16 @@ describe('dag', function () { expect(transfer).to.be.an.instanceOf(Array) expect(transfer).to.have.property('length', 3) - expect(transfer).to.include(cid1.multihash.buffer) - expect(transfer).to.include(cid2.multihash.buffer) + expect(transfer).to.include(cid1.multihash.bytes.buffer) + expect(transfer).to.include(cid2.multihash.bytes.buffer) expect(transfer).to.include(hi.buffer) }) it('shoud decode node', () => { - const cid1 = new CID( + const cid1 = CID.parse( 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce' ) - const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') + const cid2 = CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ') const hi = uint8ArrayFromString('hello world') const dagNode = { @@ -104,8 +104,8 @@ describe('dag', function () { expect(transfer).to.be.an.instanceOf(Array) expect(transfer).to.have.property('length', 3) - expect(transfer).to.include(cid1.multihash.buffer) - expect(transfer).to.include(cid2.multihash.buffer) + expect(transfer).to.include(cid1.multihash.bytes.buffer) + expect(transfer).to.include(cid2.multihash.bytes.buffer) expect(transfer).to.include(hi.buffer) }) }) diff --git a/packages/ipfs-message-port-server/.aegir.js b/packages/ipfs-message-port-server/.aegir.js index 7309e2a5d4..8f801e3b0e 100644 --- a/packages/ipfs-message-port-server/.aegir.js +++ b/packages/ipfs-message-port-server/.aegir.js @@ -3,6 +3,6 @@ /** @type {import('aegir').PartialOptions} */ module.exports = { build: { - bundlesizeMax: '13KB' + bundlesizeMax: '8KB' } } diff --git a/packages/ipfs-message-port-server/package.json b/packages/ipfs-message-port-server/package.json index c9398b8681..f1bbe1bdb8 100644 --- a/packages/ipfs-message-port-server/package.json +++ b/packages/ipfs-message-port-server/package.json @@ -40,11 +40,11 @@ "dependencies": { "ipfs-core-types": "^0.5.2", "ipfs-message-port-protocol": "^0.7.3", - "it-all": "^1.0.4" + "it-all": "^1.0.4", + "multiformats": "^9.4.1" }, "devDependencies": { - "aegir": "^33.0.0", - "cids": "^1.1.6", + "aegir": "^34.0.2", "rimraf": "^3.0.2" }, "engines": { diff --git a/packages/ipfs-message-port-server/src/block.js b/packages/ipfs-message-port-server/src/block.js index 1a19efb596..187003896f 100644 --- a/packages/ipfs-message-port-server/src/block.js +++ b/packages/ipfs-message-port-server/src/block.js @@ -4,17 +4,14 @@ const collect = require('it-all') const { encodeError } = require('ipfs-message-port-protocol/src/error') const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') const { - decodeBlock, encodeBlock } = require('ipfs-message-port-protocol/src/block') /** * @typedef {import('ipfs-core-types').IPFS} IPFS - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-message-port-protocol/src/error').EncodedError} EncodedError - * @typedef {import('ipfs-message-port-protocol/src/block').Block} Block * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID - * @typedef {import('ipfs-message-port-protocol/src/block').EncodedBlock} EncodedBlock * @typedef {import('ipfs-message-port-protocol/src/block').EncodedRmResult} EncodedRmResult * @typedef {import('ipfs-core-types/src/block').PutOptions} PutOptions */ @@ -29,7 +26,7 @@ exports.BlockService = class BlockService { /** * @typedef {Object} GetResult - * @property {EncodedBlock} block + * @property {Uint8Array} block * @property {Transferable[]} transfer * * @typedef {Object} GetQuery @@ -50,11 +47,11 @@ exports.BlockService = class BlockService { /** * @typedef {Object} PutResult - * @property {EncodedBlock} block + * @property {EncodedCID} cid * @property {Transferable[]} transfer * * @typedef {Object} PutQuery - * @property {EncodedBlock|Uint8Array} block + * @property {Uint8Array} block * @property {EncodedCID|undefined} [cid] * * Stores input as an IPFS block. @@ -64,24 +61,12 @@ exports.BlockService = class BlockService { */ async put (query) { const input = query.block - let result - /** @type {Uint8Array|Block} */ - if (input instanceof Uint8Array) { - result = await this.ipfs.block.put(input, { - ...query, - cid: query.cid ? decodeCID(query.cid) : query.cid - }) - } else { - const block = decodeBlock(input) - result = await this.ipfs.block.put(block, { - ...query, - cid: undefined - }) - } + const result = await this.ipfs.block.put(input, query) /** @type {Transferable[]} */ const transfer = [] - return { transfer, block: encodeBlock(result, transfer) } + + return { transfer, cid: encodeCID(result, transfer) } } /** diff --git a/packages/ipfs-message-port-server/src/core.js b/packages/ipfs-message-port-server/src/core.js index f0961e83d3..4cb3f5505a 100644 --- a/packages/ipfs-message-port-server/src/core.js +++ b/packages/ipfs-message-port-server/src/core.js @@ -10,7 +10,7 @@ const { const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') /** - * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {import('ipfs-core-types/src/root').AddOptions} AddOptions * @typedef {import('ipfs-core-types/src/root').AddAllOptions} AddAllOptions diff --git a/packages/ipfs-message-port-server/src/dag.js b/packages/ipfs-message-port-server/src/dag.js index 98aadb1f25..379a3ed3e2 100644 --- a/packages/ipfs-message-port-server/src/dag.js +++ b/packages/ipfs-message-port-server/src/dag.js @@ -2,11 +2,10 @@ const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') const { decodeNode, encodeNode } = require('ipfs-message-port-protocol/src/dag') -const collect = require('it-all') /** * @typedef {import('ipfs-core-types').IPFS} IPFS - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID * @typedef {import('ipfs-message-port-protocol/src/dag').EncodedDAGNode} EncodedDAGNode * @typedef {import('ipfs-core-types/src/dag').PutOptions} PutOptions @@ -30,11 +29,8 @@ exports.DAGService = class DAGService { */ async put (query) { const dagNode = decodeNode(query.dagNode) + const cid = await this.ipfs.dag.put(dagNode, query) - const cid = await this.ipfs.dag.put(dagNode, { - ...query, - cid: query.encodedCid ? decodeCID(query.encodedCid) : undefined - }) return encodeCID(cid) } @@ -94,30 +90,6 @@ exports.DAGService = class DAGService { remainderPath } } - - /** - * @typedef {Object} EnumerateDAG - * @property {EncodedCID} cid - * @property {string} [path] - * @property {boolean} [recursive] - * @property {number} [timeout] - * @property {AbortSignal} [signal] - * - * @param {EnumerateDAG} query - * @returns {Promise} - */ - async tree (query) { - const { cid, path, recursive, timeout, signal } = query - const result = await this.ipfs.dag.tree(decodeCID(cid), { - path, - recursive, - timeout, - signal - }) - const entries = await collect(result) - - return entries - } } /** diff --git a/packages/ipfs-message-port-server/test/basic.spec.js b/packages/ipfs-message-port-server/test/basic.spec.js index 4c94264945..690473bee1 100644 --- a/packages/ipfs-message-port-server/test/basic.spec.js +++ b/packages/ipfs-message-port-server/test/basic.spec.js @@ -19,9 +19,6 @@ describe('dag', function () { expect(service) .to.have.nested.property('dag.get') .be.a('function') - expect(service) - .to.have.nested.property('dag.tree') - .be.a('function') }) it('Server', () => { expect(Server).to.be.a('function') diff --git a/packages/ipfs-message-port-server/test/transfer.spec.js b/packages/ipfs-message-port-server/test/transfer.spec.js index bf6092960b..9ecde115a1 100644 --- a/packages/ipfs-message-port-server/test/transfer.spec.js +++ b/packages/ipfs-message-port-server/test/transfer.spec.js @@ -3,7 +3,7 @@ /* eslint-env mocha */ const { encodeCID } = require('ipfs-message-port-protocol/src/cid') -const CID = require('cids') +const { CID } = require('multiformats/cid') const { Server } = require('../src/server') const { IPFSService } = require('../src/index') @@ -11,7 +11,7 @@ describe('Server', function () { this.timeout(10 * 1000) it('should be able to transfer multiple of the same CID instances', () => { - const cid = new CID('QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D') + const cid = CID.parse('QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D') return new Promise((resolve, reject) => { const channel = process.browser @@ -20,6 +20,9 @@ describe('Server', function () { channel.port1.onmessageerror = reject channel.port1.onmessage = event => { + channel.port1.close() + channel.port2.close() + const result = event.data.result result.ok ? resolve(result.value) : reject(new Error(result.error.message)) } diff --git a/packages/ipfs-message-port-server/tsconfig.json b/packages/ipfs-message-port-server/tsconfig.json index 58366df84c..39c5ece837 100644 --- a/packages/ipfs-message-port-server/tsconfig.json +++ b/packages/ipfs-message-port-server/tsconfig.json @@ -8,10 +8,10 @@ ], "references": [ { - "path": "../ipfs-message-port-protocol" + "path": "../ipfs-core-types" }, { - "path": "../ipfs-core-types" + "path": "../ipfs-message-port-protocol" } ] } diff --git a/packages/ipfs/.aegir.js b/packages/ipfs/.aegir.js index 3ae7aa2256..73412c5c84 100644 --- a/packages/ipfs/.aegir.js +++ b/packages/ipfs/.aegir.js @@ -118,7 +118,7 @@ module.exports = { } }, build: { - bundlesizeMax: '500KB', + bundlesizeMax: '477KB', config: esbuild }, dependencyCheck: { diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index 7903752052..0fc3be689f 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -47,7 +47,7 @@ "devDependencies": { "@types/semver": "^7.3.4", "@types/update-notifier": "^5.0.0", - "aegir": "^33.0.0", + "aegir": "^34.0.2", "assert": "^2.0.0", "cross-env": "^7.0.0", "electron-webrtc": "^0.3.0", @@ -56,11 +56,11 @@ "ipfs-client": "^0.4.3", "ipfs-core-types": "^0.5.2", "ipfs-http-client": "^50.1.2", - "ipfs-interop": "^5.0.2", - "ipfs-utils": "^8.1.2", - "ipfsd-ctl": "^8.0.1", + "ipfs-interop": "^6.0.0", + "ipfs-utils": "^8.1.4", + "ipfsd-ctl": "^9.0.0", "iso-url": "^1.0.0", - "libp2p-webrtc-star": "^0.22.2", + "libp2p-webrtc-star": "^0.23.0", "merge-options": "^3.0.4", "mock-ipfs-pinning-service": "^0.1.2", "rimraf": "^3.0.2", diff --git a/packages/ipfs/test/interface-http-go.js b/packages/ipfs/test/interface-http-go.js index 8f434a4711..1e3a4501b7 100644 --- a/packages/ipfs/test/interface-http-go.js +++ b/packages/ipfs/test/interface-http-go.js @@ -94,12 +94,7 @@ describe('interface-ipfs-core over ipfs-http-client tests against go-ipfs', () = ] }) - tests.block(commonFactory, { - skip: [{ - name: 'should get a block added as CIDv1 with a CIDv0', - reason: 'go-ipfs does not support the `version` param' - }] - }) + tests.block(commonFactory) tests.bootstrap(commonFactory) @@ -123,11 +118,6 @@ describe('interface-ipfs-core over ipfs-http-client tests against go-ipfs', () = tests.dag(commonFactory, { skip: [ - // dag.tree - { - name: 'tree', - reason: 'TODO vmx 2018-02-22: Currently the tree API is not exposed in go-ipfs' - }, // dag.get: { name: 'should get a dag-pb node local value', @@ -524,14 +514,7 @@ describe('interface-ipfs-core over ipfs-http-client tests against go-ipfs', () = ipfsOptions: { offline: true } - }), { - skip: [ - { - name: 'should resolve a record from peerid as cidv1 in base32', - reason: 'TODO not implemented in go-ipfs yet: https://github.com/ipfs/go-ipfs/issues/5287' - } - ] - }) + })) tests.namePubsub(factory({ type: 'go', diff --git a/packages/ipfs/test/interface-http-js.js b/packages/ipfs/test/interface-http-js.js index 5749d77c86..ce00df00ee 100644 --- a/packages/ipfs/test/interface-http-js.js +++ b/packages/ipfs/test/interface-http-js.js @@ -63,9 +63,6 @@ describe('interface-ipfs-core over ipfs-http-client tests against js-ipfs', func skip: [{ name: 'should get only a CID, due to resolving locally only', reason: 'Local resolve option is not implemented yet' - }, { - name: 'tree', - reason: 'dag.tree is not implemented yet' }] }) diff --git a/packages/ipfs/test/utils/factory.js b/packages/ipfs/test/utils/factory.js index 7268659079..d82a6b09f0 100644 --- a/packages/ipfs/test/utils/factory.js +++ b/packages/ipfs/test/utils/factory.js @@ -1,6 +1,6 @@ 'use strict' const { createFactory } = require('ipfsd-ctl') -const merge = require('merge-options') +const merge = require('merge-options').bind({ ignoreUndefined: true }) const { isNode, isBrowser } = require('ipfs-utils/src/env') const commonOptions = {