diff --git a/examples/browser-sharing-node-across-tabs/README.md b/examples/browser-sharing-node-across-tabs/README.md
new file mode 100644
index 0000000000..039bc7b99a
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/README.md
@@ -0,0 +1,38 @@
+# Sharing js-ipfs node across browsing contexts (tabs) using [SharedWorker][]
+
+> In this example, you will find a boilerplate you can use to set up a js-ipfs
+> node in the [SharedWorker] and use it from multiple tabs.
+
+## Before you start
+
+First clone this repo, install dependencies in the project root and build the project.
+
+```bash
+git clone https://github.com/ipfs/js-ipfs.git
+cd js-ipfs/examples/browser-sharing-node-across-tabs
+npm install
+```
+
+## Running the example
+
+Run the following command within this folder:
+
+```bash
+npm start
+```
+
+Now open your browser at `http://localhost:3000`
+
+You should see the following:
+
+![Screen Shot](./Screen Shot.png)
+
+
+### Run tests
+
+```bash
+npm test
+```
+
+
+[SharedWorker]:https://developer.mozilla.org/en-US/docs/Web/API/SharedWorker
\ No newline at end of file
diff --git a/examples/browser-sharing-node-across-tabs/Screen Shot.png b/examples/browser-sharing-node-across-tabs/Screen Shot.png
new file mode 100644
index 0000000000..38cf03585f
Binary files /dev/null and b/examples/browser-sharing-node-across-tabs/Screen Shot.png differ
diff --git a/examples/browser-sharing-node-across-tabs/index.html b/examples/browser-sharing-node-across-tabs/index.html
new file mode 100644
index 0000000000..8a19ba7c21
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/index.html
@@ -0,0 +1,12 @@
+
+
+
+ Sample App
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/browser-sharing-node-across-tabs/package.json b/examples/browser-sharing-node-across-tabs/package.json
new file mode 100644
index 0000000000..36d4b7c6c3
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "expample-browser-sharing-node-across-tabs",
+ "description": "Sharing IPFS node across browsing contexts",
+ "version": "1.0.0",
+ "private": true,
+ "scripts": {
+ "clean": "rm -rf ./dist",
+ "build": "webpack",
+ "start": "node server.js",
+ "test": "test-ipfs-example"
+ },
+ "license": "MIT",
+ "keywords": [],
+ "devDependencies": {
+ "@babel/core": "^7.2.2",
+ "@babel/preset-env": "^7.3.1",
+ "babel-loader": "^8.0.5",
+ "copy-webpack-plugin": "^5.0.4",
+ "test-ipfs-example": "^2.0.3",
+ "webpack": "^4.43.0",
+ "webpack-cli": "^3.3.11",
+ "webpack-dev-server": "^3.11.0",
+ "worker-plugin": "4.0.3"
+ },
+ "dependencies": {
+ "ipfs": "^0.47.0",
+ "ipfs-message-port-client": "^0.0.1",
+ "ipfs-message-port-server": "^0.0.1"
+ },
+ "browserslist": [
+ ">1%",
+ "not dead",
+ "not ie <= 11",
+ "not op_mini all"
+ ]
+}
\ No newline at end of file
diff --git a/examples/browser-sharing-node-across-tabs/server.js b/examples/browser-sharing-node-across-tabs/server.js
new file mode 100644
index 0000000000..4a1a8ebdfb
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/server.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const webpack = require('webpack')
+const WebpackDevServer = require('webpack-dev-server')
+const config = require('./webpack.config')
+
+const wds = new WebpackDevServer(webpack(config), {
+ hot: true,
+ historyApiFallback: true
+})
+
+wds.listen(3000, 'localhost', (err) => {
+ if (err) {
+ throw err
+ }
+
+ console.log('Listening at localhost:3000')
+})
diff --git a/examples/browser-sharing-node-across-tabs/src/main.js b/examples/browser-sharing-node-across-tabs/src/main.js
new file mode 100644
index 0000000000..44d9f5ecf3
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/src/main.js
@@ -0,0 +1,45 @@
+'use strict'
+
+import IPFSClient from "ipfs-message-port-client"
+
+
+const main = async () => {
+ // connect / spawn shared ipfs worker & create a client.
+ const worker = new SharedWorker('./worker.js', { type: 'module' })
+ const ipfs = IPFSClient.from(worker.port)
+
+ const path = location.hash.slice(1)
+ if (path.startsWith('/ipfs/')) {
+ await viewer(ipfs, path)
+ } else {
+ await uploader(ipfs)
+ }
+}
+
+const uploader = async (ipfs) => {
+ document.body.outerHTML += 'Adding "hello world!" to shared IPFS node
'
+ const entry = await ipfs.add(ipfs, new Blob(['hello world!'], { type: "text/plain" }))
+ const path = `/ipfs/${entry.cid}/`
+ document.body.outerHTML += ``
+}
+
+const viewer = async (ipfs, path) => {
+ document.body.outerHTML += `Loading ${path}
`
+ try {
+ const chunks = []
+ for await (const chunk of await ipfs.cat(path)) {
+ chunks.push(chunk)
+ }
+ const blob = new Blob(chunks)
+ const url = URL.createObjectURL(blob)
+ document.body.outerHTML +=
+ ``
+
+ } catch(error) {
+ document.body.outerHTML += `${error}
`
+ }
+}
+
+onload = main
\ No newline at end of file
diff --git a/examples/browser-sharing-node-across-tabs/src/worker.js b/examples/browser-sharing-node-across-tabs/src/worker.js
new file mode 100644
index 0000000000..e39c5d9b89
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/src/worker.js
@@ -0,0 +1,65 @@
+'use strict'
+
+import IPFS from 'ipfs'
+import { Server, IPFSService } from 'ipfs-message-port-server'
+
+const main = async () => {
+ // start listening to all the incoming connections (browsing contexts that
+ // which run new SharedWorker...)
+ // Note: It is important to start listening before we do any await to ensure
+ // that connections aren't missed while awaiting.
+ const connections = listen(self, 'connect')
+
+ // Start an IPFS node & create server that will expose it's API to all clients
+ // over message channel.
+ const ipfs = await IPFS.create()
+ const service = new IPFSService(ipfs)
+ const server = new Server(service)
+
+ // connect every queued and future connection to the server.
+ for await (const event of connections) {
+ const port = event.ports[0]
+ if (port) {
+ server.connect(port)
+ }
+ }
+}
+
+/**
+ * Creates an AsyncIterable for all the events on the given `target` for
+ * the given event `type`. It is like `target.addEventListener(type, listener, options)`
+ * but instead of passing listener you get `AsyncIterable` instead.
+ * @param {EventTarget} target
+ * @param {string} type
+ * @param {AddEventListenerOptions} options
+ */
+const listen = function (target, type, options) {
+ const events = []
+ let resume
+ let ready = new Promise(resolve => (resume = resolve))
+
+ const write = event => {
+ events.push(event)
+ resume()
+ }
+ const read = async () => {
+ await ready
+ ready = new Promise(resolve => (resume = resolve))
+ return events.splice(0)
+ }
+
+ const reader = async function * () {
+ try {
+ while (true) {
+ yield * await read()
+ }
+ } finally {
+ target.removeEventListener(type, write, options)
+ }
+ }
+
+ target.addEventListener(type, write, options)
+ return reader()
+}
+
+main()
\ No newline at end of file
diff --git a/examples/browser-sharing-node-across-tabs/test.js b/examples/browser-sharing-node-across-tabs/test.js
new file mode 100644
index 0000000000..0f73bb20de
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/test.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const pkg = require('./package.json')
+
+module.exports = {
+ [pkg.name]: (browser) => {
+ browser
+ .url(process.env.IPFS_EXAMPLE_TEST_URL)
+ .waitForElementVisible('.ipfs-add')
+
+ browser.expect.element('.ipfs-add a').text.to.contain('/ipfs/')
+ browser.click('.ipfs-add a')
+
+ browser.windowHandle(({ value }) => {
+ browser.windowHandles(({ value: handles }) => {
+ const [handle] = handles.filter(handle => handle != value)
+ browser.switchWindow(handle)
+ })
+ })
+
+ browser.waitForElementVisible('.loading')
+ browser.expect.element('.loading').text.to.contain('Loading /ipfs/')
+
+ browser.waitForElementVisible('#content').pause(5000)
+ browser.element('css selector', '#content', frame => {
+ browser.frame({ ELEMENT: frame.value.ELEMENT }, () => {
+ browser.waitForElementPresent('body')
+ browser.expect.element('body').text.to.contain('hello world!')
+ browser.end()
+ })
+ })
+ }
+}
diff --git a/examples/browser-sharing-node-across-tabs/webpack.config.js b/examples/browser-sharing-node-across-tabs/webpack.config.js
new file mode 100644
index 0000000000..a9b412db3a
--- /dev/null
+++ b/examples/browser-sharing-node-across-tabs/webpack.config.js
@@ -0,0 +1,44 @@
+'use strict'
+
+var path = require('path')
+var webpack = require('webpack')
+const WorkerPlugin = require('worker-plugin')
+
+module.exports = {
+ devtool: 'source-map',
+ entry: [
+ 'webpack-dev-server/client?http://localhost:3000',
+ 'webpack/hot/only-dev-server',
+ './src/main'
+ ],
+ output: {
+ path: path.join(__dirname, 'dist'),
+ filename: 'static/bundle.js'
+ },
+ plugins: [
+ new WorkerPlugin({
+ sharedWorker: true,
+ globalObject: 'self'
+ }),
+ new webpack.HotModuleReplacementPlugin()
+ ],
+ module: {
+ rules: [
+ {
+ test: /\.js$/,
+ exclude: /node_modules/,
+ use: {
+ loader: 'babel-loader',
+ options: {
+ presets: ['@babel/preset-env']
+ }
+ }
+ }
+ ]
+ },
+ node: {
+ fs: 'empty',
+ net: 'empty',
+ tls: 'empty'
+ }
+}
diff --git a/examples/traverse-ipld-graphs/package.json b/examples/traverse-ipld-graphs/package.json
index af27e76a6c..a8d8aeb85f 100644
--- a/examples/traverse-ipld-graphs/package.json
+++ b/examples/traverse-ipld-graphs/package.json
@@ -15,7 +15,7 @@
"dependencies": {
"cids": "^0.8.3",
"ipfs": "^0.48.0",
- "ipld-block": "^0.9.1",
+ "ipld-block": "^0.9.2",
"ipld-dag-pb": "^0.19.0",
"multihashing-async": "^1.0.0"
}
diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json
index 36dc42332b..fd249612a0 100644
--- a/packages/interface-ipfs-core/package.json
+++ b/packages/interface-ipfs-core/package.json
@@ -42,8 +42,8 @@
"ipfs-unixfs": "^1.0.3",
"ipfs-unixfs-importer": "^2.0.2",
"ipfs-utils": "^2.2.2",
- "ipld-block": "^0.9.1",
- "ipld-dag-cbor": "^0.15.2",
+ "ipld-block": "^0.9.2",
+ "ipld-dag-cbor": "^0.15.3",
"ipld-dag-pb": "^0.19.0",
"is-ipfs": "^1.0.3",
"iso-random-stream": "^1.1.1",
diff --git a/packages/interface-ipfs-core/src/object/links.js b/packages/interface-ipfs-core/src/object/links.js
index 29f1d5a591..30f52274a8 100644
--- a/packages/interface-ipfs-core/src/object/links.js
+++ b/packages/interface-ipfs-core/src/object/links.js
@@ -59,8 +59,9 @@ module.exports = (common, options) => {
const node1bCid = await ipfs.object.put(node1b)
const links = await ipfs.object.links(node1bCid)
- expect(links).to.be.an('array').that.has.property('length', 1)
- expect(node1b.Links).to.be.deep.equal(links)
+
+ expect(links).to.have.lengthOf(1)
+ expect(node1b.Links).to.deep.equal(links)
})
it('should get links by base58 encoded multihash', async () => {
diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json
index deeb97a79d..866c14c333 100644
--- a/packages/ipfs-http-client/package.json
+++ b/packages/ipfs-http-client/package.json
@@ -49,8 +49,8 @@
"form-data": "^3.0.0",
"ipfs-core-utils": "^0.3.0",
"ipfs-utils": "^2.2.2",
- "ipld-block": "^0.9.1",
- "ipld-dag-cbor": "^0.15.2",
+ "ipld-block": "^0.9.2",
+ "ipld-dag-cbor": "^0.15.3",
"ipld-dag-pb": "^0.19.0",
"ipld-raw": "^5.0.0",
"iso-url": "^0.4.7",
@@ -67,7 +67,7 @@
"nanoid": "^3.0.2",
"node-fetch": "^2.6.0",
"parse-duration": "^0.4.4",
- "stream-to-it": "^0.2.0"
+ "stream-to-it": "^0.2.1"
},
"devDependencies": {
"aegir": "^23.0.0",
diff --git a/packages/ipfs-message-port-client/.aegir.js b/packages/ipfs-message-port-client/.aegir.js
new file mode 100644
index 0000000000..b24f1478c8
--- /dev/null
+++ b/packages/ipfs-message-port-client/.aegir.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const EchoServer = require('aegir/utils/echo-server')
+const echoServer = new EchoServer()
+
+module.exports = {
+ bundlesize: { maxSize: '80kB' },
+ karma: {
+ files: [
+ {
+ pattern: 'node_modules/interface-ipfs-core/test/fixtures/**/*',
+ watched: false,
+ served: true,
+ included: false
+ },
+ {
+ pattern: 'dist/**/*',
+ watched: true,
+ served: true,
+ included: false
+ }
+ ],
+ browserNoActivityTimeout: 210 * 1000,
+ singleRun: true,
+ captureConsole: true,
+ logLevel: 'LOG_DEBUG',
+ mocha: {
+ bail: true
+ }
+ },
+ hooks: {
+ browser: {
+ pre: async () => {
+ await echoServer.start()
+
+ return {
+ env: {
+ IPFS_WORKER_URL: `/base/dist/worker.bundle.js`,
+ ECHO_SERVER: `http://${echoServer.host}:${echoServer.port}`
+ }
+ }
+ },
+ post: async () => {
+ await echoServer.stop()
+ }
+ }
+ }
+}
diff --git a/packages/ipfs-message-port-client/README.md b/packages/ipfs-message-port-client/README.md
new file mode 100644
index 0000000000..f8e15ec7fb
--- /dev/null
+++ b/packages/ipfs-message-port-client/README.md
@@ -0,0 +1,147 @@
+# ipfs-message-port-client
+
+[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai)
+[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
+[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
+[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs)](https://travis-ci.com/ipfs/js-ipfs)
+[![Codecov branch](https://img.shields.io/codecov/c/github/ipfs/js-ipfs/master.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs)
+[![Dependency Status](https://david-dm.org/ipfs/js-ipfs/status.svg?path=packages/ipfs-message-port-client)](https://david-dm.org/ipfs/js-ipfs?path=packages/ipfs-message-port-client)
+[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard)
+
+> A client library for the IPFS API over [message channel][]. This client library provides (subset) of [IPFS API](https://github.com/ipfs/js-ipfs/tree/master/docs/core-api) enabling applications to work with js-ipfs running in the different JS e.g. [SharedWorker][].
+
+
+## Lead Maintainer
+
+[Alex Potsides](https://github.com/achingbrain)
+
+## Table of Contentens
+
+- [Install](#install)
+- [Usage](#usage)
+- [Notes on Performance](#notes-on-performance)
+- [Contribute](#contribute)
+- [License](#license)
+
+## Install
+
+```bash
+$ npm install --save ipfs-message-port-client
+```
+
+## Usage
+
+This client library works with IPFS node over the [message channel][] and assumes that IPFS node is provided via `ipfs-message-port-server` on the other end.
+
+It provides following API subset:
+
+- [`ipfs.dag`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/DAG.md)
+- [`ipfs.block`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/BLOCK.md)
+- [`ipfs.add`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options)
+- [`ipfs.addAll`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsaddallsource-options)
+- [`ipfs.cat`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfscatipfspath-options)
+- [`ipfs.files.stat`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsfilesstatpath-options)
+
+A client can be instantiated from the [`MessagePort`][] instance. The primary
+goal of this library is to allow sharing a node across browsing contexts (tabs,
+iframes) and therefore most likely `ipfs-message-port-server` will be in a
+separate JS bundle and loaded in the [SharedWorker][].
+
+
+```js
+const IPFSClient = require('ipfs-message-port-client')
+// URL to the script containing ipfs-message-port-server.
+const IPFS_SERVER_URL = '/bundle/ipfs-worker.js'
+
+const main = async () => {
+ const worker = new SharedWorker(IPFS_SERVER_URL)
+ const ipfs = IPFSClient.from(worker.port)
+ const data = ipfs.cat('/ipfs/QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+
+ for await (const chunk of data) {
+ console.log(chunk)
+ }
+}
+```
+
+It is also possible to instantiate a detached client, which can be attached to
+the server later on. This is useful when a server port is received via a message
+from another JS context (e.g. iframe)
+
+> Note: Client will queue all API calls and only execute them once it is
+> attached (unless they time out or are aborted in the meantime).
+
+```js
+const IPFSClient = require('ipfs-message-port-client')
+
+
+const ipfs = IPFSClient.detached()
+
+const main = async () => {
+ const data = ipfs.cat('/ipfs/QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+
+ for await (const chunk of data) {
+ console.log(chunk)
+ }
+}
+
+window.onload = main
+window.onmessage = ({ports}) => {
+ IPFSClient.attach(ports[0])
+}
+```
+
+### Notes on Performance
+
+Since client works with IPFS node over [message channel][] all the data passed
+is copied via [structured cloning algorithm][], which may lead to suboptimal
+results (especially with large binary data). In order to avoid unnecessary
+copying all API options have being extended with optional `transfer` property
+that can be supplied [Transferable][]s which will be used to move corresponding
+values instead of copying.
+
+> **Note:** Transferring data will empty it on the sender side which can lead to
+> errors if that data is used again later. To avoid these errors transfer option
+> was added so user can explicitily give up reference when it is safe to do so.
+
+```js
+/**
+ * @param {Uint8Array} data - Large data chunk
+ */
+const example = async (data) => {
+ // Passing `data.buffer` will cause underlying `ArrayBuffer` to be
+ // transferred emptying `data` in JS context.
+ ipfs.add(data, { transfer: [data.buffer] })
+}
+```
+
+It is however recommended to prefer web native [Blob][] / [File][] intances as
+most web APIs provide them as option & can be send across without copying
+underlying memory.
+
+```js
+const example = async (url) => {
+ const request = await fetch(url)
+ const blob = await request.blob()
+ ipfs.add(blob)
+}
+```
+
+[message channel]:https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel
+[SharedWorker]:https://developer.mozilla.org/en-US/docs/Web/API/SharedWorker
+[`MessagePort`]:https://developer.mozilla.org/en-US/docs/Web/API/MessagePort
+[structured cloning algorithm]:https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
+[Transferable]:https://developer.mozilla.org/en-US/docs/Web/API/Transferable
+[Blob]:https://developer.mozilla.org/en-US/docs/Web/API/Blob/Blob
+[File]:https://developer.mozilla.org/en-US/docs/Web/API/File
+
+
+## Contribute
+
+Contributions welcome. Please check out [the issues](https://github.com/ipfs/js-ipfs/issues).
+
+Check out our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
+
+## License
+
+[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fipfs%2Fjs-ipfs.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fipfs%2Fjs-ipfs?ref=badge_large)
diff --git a/packages/ipfs-message-port-client/package.json b/packages/ipfs-message-port-client/package.json
new file mode 100644
index 0000000000..350a97ffec
--- /dev/null
+++ b/packages/ipfs-message-port-client/package.json
@@ -0,0 +1,56 @@
+{
+ "name": "ipfs-message-port-client",
+ "version": "0.0.1",
+ "description": "IPFS client library for accessing IPFS node over message port",
+ "keywords": [
+ "ipfs",
+ "message-port",
+ "worker"
+ ],
+ "homepage": "https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs-message-port-client#readme",
+ "bugs": "https://github.com/ipfs/js-ipfs/issues",
+ "license": "(Apache-2.0 OR MIT)",
+ "leadMaintainer": "Alex Potsides ",
+ "files": [
+ "src",
+ "dist"
+ ],
+ "main": "src/index.js",
+ "browser": {},
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/ipfs/js-ipfs.git"
+ },
+ "scripts": {
+ "test": "npm run test:browser",
+ "test:browser": "npm run build:test-worker && aegir test -t browser",
+ "test:chrome": "aegir test -t browser -- --browsers ChromeHeadless",
+ "test:firefox": "aegir test -t browser -- --browsers FirefoxHeadless",
+ "lint": "aegir lint",
+ "build:test-worker": "aegir build -- --config ./test/util/webpack.config.js",
+ "build": "aegir build",
+ "coverage": "npx nyc -r html npm run test:node -- --bail",
+ "clean": "rm -rf ./dist",
+ "dep-check": "aegir dep-check"
+ },
+ "dependencies": {
+ "cids": "^0.8.3",
+ "browser-readablestream-to-it": "0.0.1"
+ },
+ "devDependencies": {
+ "ipfs-message-port-protocol": "~0.0.1",
+ "ipfs-message-port-server": "~0.0.1",
+ "ipld-dag-pb": "^0.19.0",
+ "ipfs": "^0.48.0",
+ "aegir": "^23.0.0",
+ "cross-env": "^7.0.0",
+ "interface-ipfs-core": "^0.138.0"
+ },
+ "engines": {
+ "node": ">=10.3.0",
+ "npm": ">=3.0.0"
+ },
+ "contributors": [
+ "Irakli Gozalishvili "
+ ]
+}
\ No newline at end of file
diff --git a/packages/ipfs-message-port-client/src/block.js b/packages/ipfs-message-port-client/src/block.js
new file mode 100644
index 0000000000..ef8fba8636
--- /dev/null
+++ b/packages/ipfs-message-port-client/src/block.js
@@ -0,0 +1,154 @@
+'use strict'
+
+const { Client } = require('./client')
+const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid')
+const { decodeError } = require('ipfs-message-port-protocol/src/error')
+const {
+ encodeBlock,
+ decodeBlock
+} = require('ipfs-message-port-protocol/src/block')
+
+/**
+ * @typedef {import('cids')} CID
+ * @typedef {import('ipfs-message-port-server/src/block').Block} Block
+ * @typedef {import('ipfs-message-port-server/src/block').EncodedBlock} EncodedBlock
+ * @typedef {import('ipfs-message-port-server/src/block').Rm} EncodedRmEntry
+ * @typedef {import('ipfs-message-port-server/src/block').BlockService} BlockService
+ * @typedef {import('./client').ClientTransport} Transport
+ */
+
+/**
+ * @class
+ * @extends {Client}
+ */
+class BlockClient extends Client {
+ /**
+ * @param {Transport} transport
+ */
+ constructor (transport) {
+ super('block', ['put', 'get', 'rm', 'stat'], transport)
+ }
+
+ /**
+ * Get a raw IPFS block.
+ * @param {CID} cid - A CID that corresponds to the desired block
+ * @param {Object} [options]
+ * @param {number} [options.timeout] - A timeout in ms
+ * @param {AbortSignal} [options.signal] - Can be used to cancel any long
+ * running requests started as a result of this call
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * worker if passed.
+ * @returns {Promise}
+ */
+ async get (cid, options = {}) {
+ const { transfer } = options
+ const { block } = await this.remote.get({
+ ...options,
+ cid: encodeCID(cid, transfer)
+ })
+ return decodeBlock(block)
+ }
+
+ /**
+ * Stores input as an IPFS block.
+ * @param {Block|Uint8Array} block - A Block or Uint8Array of block data
+ * @param {Object} [options]
+ * @param {CID} [options.cid] - A CID to store the block under (if block is
+ * `Uint8Array`)
+ * @param {string} [options.format='dag-pb'] - The codec to use to create the
+ * CID (if block is `Uint8Array`)
+ * @param {string} [options.mhtype='sha2-256'] - The hashing algorithm to use
+ * to create the CID (if block is `Uint8Array`)
+ * @param {0|1} [options.version=0] - The version to use to create the CID
+ * (if block is `Uint8Array`)
+ * @param {number} [options.mhlen]
+ * @param {boolean} [options.pin=false] - If true, pin added blocks recursively
+ * @param {number} [options.timeout] - A timeout in ms
+ * @param {AbortSignal} [options.signal] - Can be used to cancel any long
+ * running requests started as a result of this call
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * worker if passed.
+ * @returns {Promise}
+ */
+ async put (block, options = {}) {
+ const { transfer } = options
+ // @ts-ignore - ipfs-unixfs-importer passes `progress` which causing errors
+ // because functions can't be transferred.
+ delete options.progress
+ const result = await this.remote.put({
+ ...options,
+ cid: options.cid == null ? undefined : encodeCID(options.cid, transfer),
+ block: block instanceof Uint8Array ? block : encodeBlock(block, transfer)
+ })
+ return decodeBlock(result.block)
+ }
+
+ /**
+ * Remove one or more IPFS block(s).
+ * @param {CID|CID[]} cids - Block(s) to be removed
+ * @param {Object} [options]
+ * @param {boolean} [options.force=false] - Ignores nonexistent blocks
+ * @param {boolean} [options.quiet=false] - Write minimal output
+ * @param {number} [options.timeout] - A timeout in ms
+ * @param {AbortSignal} [options.signal] - Can be used to cancel any long
+ * running requests started as a result of this call
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * worker if passed.
+ * @returns {AsyncIterable}
+ *
+ * @typedef {Object} RmEntry
+ * @property {CID} cid
+ * @property {Error|void} [error]
+ */
+ async * rm (cids, options = {}) {
+ const { transfer } = options
+ const entries = await this.remote.rm({
+ ...options,
+ cids: Array.isArray(cids)
+ ? cids.map(cid => encodeCID(cid, transfer))
+ : [encodeCID(cids, transfer)]
+ })
+
+ yield * entries.map(decodeRmEntry)
+ }
+
+ /**
+ * Returns information about a raw IPFS block.
+ * @param {CID} cid - Block to get information about.
+ * @param {Object} [options]
+ * @param {number} [options.timeout] - A timeout in ms
+ * @param {AbortSignal} [options.signal] - Can be used to cancel any long
+ * running requests started as a result of this call
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * worker if passed.
+ * @returns {Promise}
+ *
+ * @typedef {Object} Stat
+ * @property {CID} cid
+ * @property {number} size
+ */
+ async stat (cid, options = {}) {
+ const { transfer } = options
+ const result = await this.remote.stat({
+ ...options,
+ cid: encodeCID(cid, transfer)
+ })
+
+ return { ...result, cid: decodeCID(result.cid) }
+ }
+}
+
+/**
+ * @param {EncodedRmEntry} entry
+ * @returns {RmEntry}
+ */
+const decodeRmEntry = entry => {
+ const cid = decodeCID(entry.cid)
+ if (entry.error) {
+ return { cid, error: decodeError(entry.error) }
+ } else {
+ return { cid }
+ }
+}
+
+module.exports = BlockClient
diff --git a/packages/ipfs-message-port-client/src/client.js b/packages/ipfs-message-port-client/src/client.js
new file mode 100644
index 0000000000..a15ae0241c
--- /dev/null
+++ b/packages/ipfs-message-port-client/src/client.js
@@ -0,0 +1,359 @@
+'use strict'
+
+/* eslint-env browser */
+const { decodeError } = require('ipfs-message-port-protocol/src/error')
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').Remote} Remote
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').ProcedureNames} ProcedureNames
+ */
+
+/**
+ * @typedef {Object} QueryOptions
+ * @property {AbortSignal} [signal]
+ * @property {number} [timeout]
+ * @property {Transferable[]} [transfer]
+ */
+
+/**
+ * @template I
+ * @typedef {I & QueryOptions} QueryInput
+ */
+
+/**
+ * Represents server query, encapsulating inputs to the server endpoint and
+ * promise of it's result.
+ *
+ * @template I,O
+ * @class
+ */
+class Query {
+ /**
+ * @param {string} namespace - component namespace on the server.
+ * @param {string} method - remote method this is a query of.
+ * @param {QueryInput} input - query input.
+ */
+ constructor (namespace, method, input) {
+ /** @type {Promise} */
+ this.result = new Promise((resolve, reject) => {
+ this.succeed = resolve
+ this.fail = reject
+ this.signal = input.signal
+ this.input = input
+ this.namespace = namespace
+ this.method = method
+ this.timeout = input.timeout == null ? Infinity : input.timeout
+ /** @type {number|null} */
+ this.timerID = null
+ })
+ }
+
+ /**
+ * Data that will be structure cloned over message channel.
+ * @returns {Object}
+ */
+ toJSON () {
+ return this.input
+ }
+
+ /**
+ * Data that will be transferred over message channel.
+ * @returns {Transferable[]}
+ */
+ transfer () {
+ return this.input.transfer
+ }
+}
+
+/** @typedef {Transport} ClientTransport */
+
+/**
+ * RPC Transport over `MessagePort` that can execute queries. It takes care of
+ * executing queries by issuing a message with unique ID and fullfilling a
+ * query when corresponding response message is received. It also makes sure
+ * that aborted / timed out queries are cancelled as needed.
+ *
+ * It is expected that there will be at most one transport for a message port
+ * instance.
+ * @class
+ */
+class Transport {
+ /**
+ * Create transport for the underlying message port.
+ * @param {MessagePort} [port]
+ */
+ constructor (port) {
+ this.port = null
+ // Assigining a random enough identifier to the transport, to ensure that
+ // query.id will be unique when multiple tabs are communicating with a
+ // a server in the SharedWorker.
+ this.id = Math.random()
+ .toString(32)
+ .slice(2)
+
+ // Local unique id on the transport which is incremented for each query.
+ this.nextID = 0
+
+ // Dictionary of pending requests
+ /** @type {Record>} */
+ this.queries = Object.create(null)
+
+ // If port is provided connect this transport to it. If not transport can
+ // queue queries and execute those once it's connected.
+ if (port) {
+ this.connect(port)
+ }
+ }
+
+ /**
+ * Executes given query with this transport and returns promise for it's
+ * result. Promise fails with an error if query fails.
+ *
+ * @template I, O
+ * @param {Query} query
+ * @returns {Promise}
+ */
+ execute (query) {
+ const id = `${this.id}@${this.nextID++}`
+ this.queries[id] = query
+
+ // If query has a timeout set a timer.
+ if (query.timeout > 0 && query.timeout < Infinity) {
+ query.timerID = setTimeout(Transport.timeout, query.timeout, this, id)
+ }
+
+ if (query.signal) {
+ query.signal.addEventListener('abort', () => this.abort(id), {
+ once: true
+ })
+ }
+
+ // If transport is connected (it has port) post a query, otherwise it
+ // will remain in the pending queries queue.
+ if (this.port) {
+ Transport.postQuery(this.port, id, query)
+ }
+
+ return query.result
+ }
+
+ /**
+ * Connects this transport to the given message port. Throws `Error` if
+ * transport is already connected. All the pending queries will be executed
+ * as connection occurs.
+ *
+ * @param {MessagePort} port
+ */
+ connect (port) {
+ if (this.port) {
+ throw new Error('Transport is already open')
+ } else {
+ this.port = port
+ this.port.addEventListener('message', this)
+ this.port.start()
+
+ // Go ever pending queries (that were submitted before transport was
+ // connected) and post them. This loop is safe because messages will not
+ // arrive while this loop is running so no mutation can occur.
+ for (const [id, query] of Object.entries(this.queries)) {
+ Transport.postQuery(port, id, query)
+ }
+ }
+ }
+
+ /**
+ * Disconnects this transport. This will cause all the pending queries
+ * to be aborted and undelying message port to be closed.
+ *
+ * Once disconnected transport can not be reconnected back.
+ */
+ disconnect () {
+ const error = new DisconnectError()
+ for (const [id, query] of Object.entries(this.queries)) {
+ query.fail(error)
+ this.abort(id)
+ }
+
+ // Note that reference to port is kept that ensures that attempt to
+ // reconnect will throw an error.
+ if (this.port) {
+ this.port.removeEventListener('message', this)
+ this.port.close()
+ }
+ }
+
+ /**
+ * Invoked on query timeout. If query is still pending it will fail and
+ * abort message will be send to a the server.
+ *
+ * @param {Transport} self
+ * @param {string} id
+ */
+ static timeout (self, id) {
+ const { queries } = self
+ const query = queries[id]
+ if (query) {
+ delete queries[id]
+ query.fail(new TimeoutError('request timed out'))
+ if (self.port) {
+ self.port.postMessage({ type: 'abort', id })
+ }
+ }
+ }
+
+ /**
+ * Aborts this query by failing with `AbortError` and sending an abort message
+ * to the server. If query is no longer pending this has no effect.
+ * @param {string} id
+ */
+ abort (id) {
+ const { queries } = this
+ const query = queries[id]
+ if (query) {
+ delete queries[id]
+
+ query.fail(new AbortError())
+ if (this.port) {
+ this.port.postMessage({ type: 'abort', id })
+ }
+
+ if (query.timerID != null) {
+ clearTimeout(query.timerID)
+ }
+ }
+ }
+
+ /**
+ * Sends a given `query` with a given `id` over the message channel.
+ * @param {MessagePort} port
+ * @param {string} id
+ * @param {Query} query
+ */
+ static postQuery (port, id, query) {
+ port.postMessage(
+ {
+ type: 'query',
+ namespace: query.namespace,
+ method: query.method,
+ id,
+ input: query.toJSON()
+ },
+ query.transfer()
+ )
+ }
+
+ /**
+ * Handler is invoked when message on the message port is received.
+ * @param {MessageEvent} event
+ */
+ handleEvent (event) {
+ const { id, result } = event.data
+ const query = this.queries[id]
+ // If query with a the given ID is found it is completed with the result,
+ // otherwise it is cancelled.
+ // Note: query may not be found when it was aborted on the client and at the
+ // same time server posted response.
+ if (query) {
+ delete this.queries[id]
+ if (result.ok) {
+ query.succeed(result.value)
+ } else {
+ query.fail(decodeError(result.error))
+ }
+
+ if (query.timerID != null) {
+ clearTimeout(query.timerID)
+ }
+ }
+ }
+}
+exports.Transport = Transport
+
+/**
+ * @template T
+ * @typedef {Array} Keys
+ */
+
+/**
+ * @template T
+ * @typedef {Remote & Service} RemoteService
+ */
+
+/**
+ * Service represents an API to a remote service `T`. It will have all the
+ * methods with the same signatures as `T`.
+ *
+ * @class
+ * @template T
+ */
+class Service {
+ /**
+ * @param {string} namespace - Namespace that remote API is served under.
+ * @param {ProcedureNames} methods - Method names of the remote API.
+ * @param {Transport} transport - Transport to issue queries over.
+ */
+ constructor (namespace, methods, transport) {
+ this.transport = transport
+ // Type script does not like using classes as some dicitionaries, so
+ // we explicitly type it as dictionary.
+ /** @type {Object., Function>} */
+ const api = this
+ for (const method of methods) {
+ /**
+ * @template I, O
+ * @param {I} input
+ * @returns {Promise}
+ */
+ api[method] = input =>
+ this.transport.execute(new Query(namespace, method.toString(), input))
+ }
+ }
+}
+
+/**
+ * Client represents the client to remote `T` service. It is a base clase that
+ * specific API clients will subclass to provide a higher level API for end
+ * user. Client implementations take care of encoding arguments into quries
+ * and issing those to `remote` service.
+ *
+ * @class
+ * @template T
+ */
+class Client {
+ /**
+ * @param {string} namespace
+ * @param {ProcedureNames} methods
+ * @param {Transport} transport
+ */
+ constructor (namespace, methods, transport) {
+ /** @type {RemoteService} */
+ this.remote = (new Service(namespace, methods, transport))
+ }
+}
+exports.Client = Client
+
+class TimeoutError extends Error {
+ get name () {
+ return this.constructor.name
+ }
+}
+exports.TimeoutError = TimeoutError
+
+class AbortError extends Error {
+ get name () {
+ return this.constructor.name
+ }
+}
+exports.AbortError = AbortError
+
+class DisconnectError extends Error {
+ get name () {
+ return this.constructor.name
+ }
+}
+exports.DisconnectError = DisconnectError
diff --git a/packages/ipfs-message-port-client/src/core.js b/packages/ipfs-message-port-client/src/core.js
new file mode 100644
index 0000000000..603a63f168
--- /dev/null
+++ b/packages/ipfs-message-port-client/src/core.js
@@ -0,0 +1,461 @@
+'use strict'
+
+/* eslint-env browser */
+
+const { Client } = require('./client')
+const { encodeCID, decodeCID, CID } = require('ipfs-message-port-protocol/src/cid')
+const {
+ decodeIterable,
+ encodeIterable,
+ encodeCallback
+} = require('ipfs-message-port-protocol/src/core')
+/** @type { (stream:ReadableStream) => AsyncIterable} */
+// @ts-ignore - browser-stream-to-it has not types
+const iterateReadableStream = require('browser-readablestream-to-it')
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/core').RemoteIterable} RemoteIterable
+ */
+/**
+ * @typedef {import('ipfs-message-port-protocol/src/data').Time} Time
+ * @typedef {import('ipfs-message-port-protocol/src/data').UnixFSTime} UnixFSTime
+ * @typedef {import('ipfs-message-port-protocol/src/dag').EncodedCID} EncodedCID
+ * @typedef {import('ipfs-message-port-server/src/core').SingleFileInput} EncodedAddInput
+ * @typedef {import('ipfs-message-port-server/src/core').MultiFileInput} EncodedAddAllInput
+ * @typedef {import('ipfs-message-port-server/src/core').FileInput} FileInput
+ * @typedef {import('ipfs-message-port-server/src/core').FileContent} EncodedFileContent
+ *
+ * @typedef {Object} NoramilzedFileInput
+ * @property {string} path
+ * @property {AsyncIterable} content
+ *
+ * @typedef {ArrayBuffer|ArrayBufferView} Bytes
+ *
+ * @typedef {Blob|Bytes|string|Iterable|Iterable|AsyncIterable} FileContent
+ *
+ * @typedef {Object} FileObject
+ * @property {string} [path]
+ * @property {FileContent} [content]
+ * @property {string|number} [mode]
+ * @property {UnixFSTime} [mtime]
+ *
+ *
+ * @typedef {Blob|Bytes|string|FileObject|Iterable|Iterable|AsyncIterable|ReadableStream} AddInput
+ *
+ * @typedef {Iterable|AsyncIterable} AddAllInput
+ */
+
+/**
+ * @typedef {import('ipfs-message-port-server/src/core').CoreService} CoreService
+ * @typedef {import('ipfs-message-port-server/src/core').AddedEntry} AddedEntry
+ * @typedef {import('./client').ClientTransport} Transport
+ */
+
+/**
+ * @class
+ * @extends {Client}
+ */
+class CoreClient extends Client {
+ /**
+ * @param {Transport} transport
+ */
+ constructor (transport) {
+ super('core', ['add', 'cat'], transport)
+ }
+
+ /**
+ * Import files and data into IPFS.
+ *
+ * If you pass binary data like `Uint8Array` it is recommended to provide
+ * `transfer: [input.buffer]` which would allow transferring it instead of
+ * copying.
+ *
+ * @param {AddAllInput} input
+ * @param {Object} [options]
+ * @param {string} [options.chunker="size-262144"]
+ * @param {number} [options.cidVersion=0]
+ * @param {boolean} [options.enableShardingExperiment]
+ * @param {string} [options.hashAlg="sha2-256"]
+ * @param {boolean} [options.onlyHash=false]
+ * @param {boolean} [options.pin=true]
+ * @param {function(number):void} [options.progress]
+ * @param {boolean} [options.rawLeaves=false]
+ * @param {number} [options.shardSplitThreshold=1000]
+ * @param {boolean} [options.trickle=false]
+ * @param {boolean} [options.wrapWithDirectory=false]
+ * @param {number} [options.timeout]
+ * @param {Transferable[]} [options.transfer]
+ * @param {AbortSignal} [options.signal]
+ * @returns {AsyncIterable}
+ *
+ * @typedef {Object} AddedData
+ * @property {string} path
+ * @property {CID} cid
+ * @property {number} mode
+ * @property {number} size
+ * @property {Time} mtime
+ */
+ async * addAll (input, options = {}) {
+ const { timeout, signal } = options
+ const transfer = [...(options.transfer || [])]
+ const progress = options.progress
+ ? encodeCallback(options.progress, transfer)
+ : undefined
+
+ const result = await this.remote.addAll({
+ ...options,
+ input: encodeAddAllInput(input, transfer),
+ progress,
+ transfer,
+ timeout,
+ signal
+ })
+ yield * decodeIterable(result.data, decodeAddedData)
+ }
+
+ /**
+ * Add file to IPFS.
+ *
+ * If you pass binary data like `Uint8Array` it is recommended to provide
+ * `transfer: [input.buffer]` which would allow transferring it instead of
+ * copying.
+ *
+ * @param {AddInput} input
+ * @param {Object} [options]
+ * @param {string} [options.chunker="size-262144"]
+ * @param {number} [options.cidVersion=0]
+ * @param {boolean} [options.enableShardingExperiment]
+ * @param {string} [options.hashAlg="sha2-256"]
+ * @param {boolean} [options.onlyHash=false]
+ * @param {boolean} [options.pin=true]
+ * @param {function(number):void} [options.progress]
+ * @param {boolean} [options.rawLeaves=false]
+ * @param {number} [options.shardSplitThreshold=1000]
+ * @param {boolean} [options.trickle=false]
+ * @param {boolean} [options.wrapWithDirectory=false]
+ * @param {number} [options.timeout]
+ * @param {Transferable[]} [options.transfer]
+ * @param {AbortSignal} [options.signal]
+ * @returns {Promise}
+ */
+ async add (input, options = {}) {
+ const { timeout, signal } = options
+ const transfer = [...(options.transfer || [])]
+ const progress = options.progress
+ ? encodeCallback(options.progress, transfer)
+ : undefined
+
+ const result = await this.remote.add({
+ ...options,
+ input: encodeAddInput(input, transfer),
+ progress,
+ transfer,
+ timeout,
+ signal
+ })
+
+ return decodeAddedData(result.data)
+ }
+
+ /**
+ * Returns content addressed by a valid IPFS Path.
+ * @param {string|CID} inputPath
+ * @param {Object} [options]
+ * @param {number} [options.offset]
+ * @param {number} [options.length]
+ * @param {number} [options.timeout]
+ * @param {AbortSignal} [options.signal]
+ * @returns {AsyncIterable}
+ */
+ async * cat (inputPath, options = {}) {
+ const input = CID.isCID(inputPath) ? encodeCID(inputPath) : inputPath
+ const result = await this.remote.cat({ ...options, path: input })
+ yield * decodeIterable(result.data, identity)
+ }
+}
+
+/**
+ * Decodes values yield by `ipfs.add`.
+ * @param {AddedEntry} data
+ * @returns {AddedData}
+ */
+const decodeAddedData = ({ path, cid, mode, mtime, size }) => {
+ return {
+ path,
+ cid: decodeCID(cid),
+ mode,
+ mtime,
+ size
+ }
+}
+
+/**
+ * @template T
+ * @param {T} v
+ * @returns {T}
+ */
+const identity = (v) => v
+
+/**
+ * Encodes input passed to the `ipfs.add` via the best possible strategy for the
+ * given input.
+ *
+ * @param {AddInput} input
+ * @param {Transferable[]} transfer
+ * @returns {EncodedAddInput}
+ */
+const encodeAddInput = (input, transfer) => {
+ // We want to get a Blob as input. If we got it we're set.
+ if (input instanceof Blob) {
+ return input
+ } else if (typeof input === 'string') {
+ return input
+ } else if (input instanceof ArrayBuffer) {
+ return input
+ } else if (ArrayBuffer.isView(input)) {
+ // Note we are not adding `input.buffer` into transfer list, it's on user.
+ return input
+ } else {
+ // If input is (async) iterable or `ReadableStream` or "FileObject" it will
+ // be encoded via own specific encoder.
+ const iterable = asIterable(input)
+ if (iterable) {
+ return encodeIterable(iterable, encodeIterableContent, transfer)
+ }
+
+ const asyncIterable = asAsyncIterable(input)
+ if (asyncIterable) {
+ return encodeIterable(
+ asyncIterable,
+ encodeAsyncIterableContent,
+ transfer
+ )
+ }
+
+ const readableStream = asReadableStream(input)
+ if (readableStream) {
+ return encodeIterable(
+ iterateReadableStream(readableStream),
+ encodeAsyncIterableContent,
+ transfer
+ )
+ }
+
+ const file = asFileObject(input)
+ if (file) {
+ return encodeFileObject(file, transfer)
+ }
+
+ throw TypeError('Unexpected input: ' + typeof input)
+ }
+}
+
+/**
+ * Encodes input passed to the `ipfs.add` via the best possible strategy for the
+ * given input.
+ *
+ * @param {AddAllInput} input
+ * @param {Transferable[]} transfer
+ * @returns {EncodedAddAllInput}
+ */
+const encodeAddAllInput = (input, transfer) => {
+ // If input is (async) iterable or `ReadableStream` or "FileObject" it will
+ // be encoded via own specific encoder.
+ const iterable = asIterable(input)
+ if (iterable) {
+ return encodeIterable(iterable, encodeIterableContent, transfer)
+ }
+
+ const asyncIterable = asAsyncIterable(input)
+ if (asyncIterable) {
+ return encodeIterable(
+ asyncIterable,
+ encodeAsyncIterableContent,
+ transfer
+ )
+ }
+
+ const readableStream = asReadableStream(input)
+ if (readableStream) {
+ return encodeIterable(
+ iterateReadableStream(readableStream),
+ encodeAsyncIterableContent,
+ transfer
+ )
+ }
+
+ throw TypeError('Unexpected input: ' + typeof input)
+}
+
+/**
+ * Function encodes individual item of some `AsyncIterable` by choosing most
+ * effective strategy.
+ * @param {ArrayBuffer|ArrayBufferView|Blob|string|FileObject} content
+ * @param {Transferable[]} transfer
+ * @returns {FileInput|ArrayBuffer|ArrayBufferView}
+ */
+const encodeAsyncIterableContent = (content, transfer) => {
+ if (content instanceof ArrayBuffer) {
+ return content
+ } else if (ArrayBuffer.isView(content)) {
+ return content
+ } else if (content instanceof Blob) {
+ return { path: '', content }
+ } else if (typeof content === 'string') {
+ return { path: '', content }
+ } else {
+ const file = asFileObject(content)
+ if (file) {
+ return encodeFileObject(file, transfer)
+ } else {
+ throw TypeError('Unexpected input: ' + typeof content)
+ }
+ }
+}
+
+/**
+ * @param {number|Bytes|Blob|string|FileObject} content
+ * @param {Transferable[]} transfer
+ * @returns {FileInput|ArrayBuffer|ArrayBufferView}
+ */
+const encodeIterableContent = (content, transfer) => {
+ if (typeof content === 'number') {
+ throw TypeError('Iterable of numbers is not supported')
+ } else if (content instanceof ArrayBuffer) {
+ return content
+ } else if (ArrayBuffer.isView(content)) {
+ return content
+ } else if (content instanceof Blob) {
+ return { path: '', content }
+ } else if (typeof content === 'string') {
+ return { path: '', content }
+ } else {
+ const file = asFileObject(content)
+ if (file) {
+ return encodeFileObject(file, transfer)
+ } else {
+ throw TypeError('Unexpected input: ' + typeof content)
+ }
+ }
+}
+
+/**
+ * @param {FileObject} file
+ * @param {Transferable[]} transfer
+ * @returns {FileInput}
+ */
+const encodeFileObject = ({ path, mode, mtime, content }, transfer) => {
+ return {
+ path,
+ mode,
+ mtime,
+ content: encodeFileContent(content, transfer)
+ }
+}
+
+/**
+ *
+ * @param {FileContent} [content]
+ * @param {Transferable[]} transfer
+ * @returns {EncodedFileContent}
+ */
+const encodeFileContent = (content, transfer) => {
+ if (content == null) {
+ return ''
+ } else if (content instanceof ArrayBuffer || ArrayBuffer.isView(content)) {
+ return content
+ } else if (content instanceof Blob) {
+ return content
+ } else {
+ const iterable = asIterable(content)
+ if (iterable) {
+ return encodeIterable(iterable, encodeIterableContent, transfer)
+ }
+
+ const asyncIterable = asAsyncIterable(content)
+ if (asyncIterable) {
+ return encodeIterable(
+ asyncIterable,
+ encodeAsyncIterableContent,
+ transfer
+ )
+ }
+
+ const readableStream = asReadableStream(content)
+ if (readableStream) {
+ return encodeIterable(
+ iterateReadableStream(readableStream),
+ encodeAsyncIterableContent,
+ transfer
+ )
+ }
+
+ throw TypeError('Unexpected input: ' + typeof content)
+ }
+}
+
+/**
+ * Pattern matches given input as `Iterable` and returns back either matched
+ * iterable or `null`.
+ * @template I
+ * @param {Iterable|AddInput|AddAllInput} input
+ * @returns {Iterable|null}
+ */
+const asIterable = (input) => {
+ /** @type {*} */
+ const object = input
+ if (object && typeof object[Symbol.iterator] === 'function') {
+ return object
+ } else {
+ return null
+ }
+}
+
+/**
+ * Pattern matches given `input` as `AsyncIterable` and returns back either
+ * matched `AsyncIterable` or `null`.
+ * @template I
+ * @param {AsyncIterable|AddInput|AddAllInput} input
+ * @returns {AsyncIterable|null}
+ */
+const asAsyncIterable = (input) => {
+ /** @type {*} */
+ const object = input
+ if (object && typeof object[Symbol.asyncIterator] === 'function') {
+ return object
+ } else {
+ return null
+ }
+}
+
+/**
+ * Pattern matches given `input` as `ReadableStream` and return back either
+ * matched input or `null`.
+ *
+ * @param {any} input
+ * @returns {ReadableStream|null}
+ */
+const asReadableStream = (input) => {
+ if (input && typeof input.getReader === 'function') {
+ return input
+ } else {
+ return null
+ }
+}
+
+/**
+ * Pattern matches given input as "FileObject" and returns back eithr matched
+ * input or `null`.
+ * @param {*} input
+ * @returns {FileObject|null}
+ */
+const asFileObject = (input) => {
+ if (typeof input === 'object' && (input.path || input.content)) {
+ return input
+ } else {
+ return null
+ }
+}
+
+module.exports = CoreClient
diff --git a/packages/ipfs-message-port-client/src/dag.js b/packages/ipfs-message-port-client/src/dag.js
new file mode 100644
index 0000000000..48a4f84a02
--- /dev/null
+++ b/packages/ipfs-message-port-client/src/dag.js
@@ -0,0 +1,129 @@
+'use strict'
+
+const { Client } = require('./client')
+const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid')
+const { encodeNode, decodeNode } = require('ipfs-message-port-protocol/src/dag')
+
+/**
+ * @typedef {import('cids')} CID
+ * @typedef {import('ipfs-message-port-server/src/dag').EncodedCID} EncodedCID
+ * @typedef {import('ipfs-message-port-server/src/dag').DAGNode} DAGNode
+ * @typedef {import('ipfs-message-port-server/src/dag').EncodedDAGNode} EncodedDAGNode
+ * @typedef {import('ipfs-message-port-server/src/dag').DAGEntry} DAGEntry
+ * @typedef {import('ipfs-message-port-server/src/dag').DAGService} DagService
+ * @typedef {import('./client').ClientTransport} Transport
+ */
+
+/**
+ * @class
+ * @extends {Client}
+ */
+class DAGClient extends Client {
+ /**
+ * @param {Transport} transport
+ */
+ constructor (transport) {
+ super('dag', ['put', 'get', 'resolve', 'tree'], transport)
+ }
+
+ /**
+ * @param {DAGNode} dagNode
+ * @param {Object} [options]
+ * @param {string} [options.format="dag-cbor"] - The IPLD format multicodec
+ * @param {string} [options.hashAlg="sha2-256"] - The hash algorithm to be used over the serialized DAG node
+ * @param {CID} [options.cid]
+ * @param {boolean} [options.pin=false] - Pin this node when adding to the blockstore
+ * @param {boolean} [options.preload=true]
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * @param {number} [options.timeout] - A timeout in ms
+ * @param {AbortSignal} [options.signal] - Can be used to cancel any long running requests started as a result of this call.
+ * @returns {Promise}
+ */
+ async put (dagNode, options = {}) {
+ const { cid } = options
+
+ const encodedCID = await this.remote.put({
+ ...options,
+ dagNode: encodeNode(dagNode, options.transfer),
+ cid: cid != null ? encodeCID(cid) : undefined
+ })
+
+ return decodeCID(encodedCID)
+ }
+
+ /**
+ * @param {CID} cid
+ * @param {Object} [options]
+ * @param {string} [options.path]
+ * @param {boolean} [options.localResolve]
+ * @param {number} [options.timeout]
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * @param {AbortSignal} [options.signal]
+ * @returns {Promise}
+ */
+ async get (cid, options = {}) {
+ const { value, remainderPath } = await this.remote.get({
+ ...options,
+ cid: encodeCID(cid, options.transfer)
+ })
+
+ return { value: decodeNode(value), remainderPath }
+ }
+
+ /**
+ * @typedef {Object} ResolveResult
+ * @property {CID} cid
+ * @property {string|void} remainderPath
+ *
+ * @param {CID} cid
+ * @param {Object} [options]
+ * @param {string} [options.path]
+ * @param {number} [options.timeout]
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * @param {AbortSignal} [options.signal]
+ * @returns {Promise}
+ */
+ async resolve (cid, options = {}) {
+ const { cid: encodedCID, remainderPath } = await this.remote.resolve({
+ ...options,
+ cid: encodeCIDOrPath(cid, options.transfer)
+ })
+
+ return { cid: decodeCID(encodedCID), remainderPath }
+ }
+
+ /**
+ * Enumerate all the entries in a graph
+ * @param {CID} cid - CID of the DAG node to enumerate
+ * @param {Object} [options]
+ * @param {string} [options.path]
+ * @param {boolean} [options.recursive]
+ * @param {Transferable[]} [options.transfer] - References to transfer to the
+ * @param {number} [options.timeout]
+ * @param {AbortSignal} [options.signal]
+ * @returns {AsyncIterable}
+ */
+ async * tree (cid, options = {}) {
+ const paths = await this.remote.tree({
+ ...options,
+ cid: encodeCID(cid, options.transfer)
+ })
+
+ yield * paths
+ }
+}
+
+/**
+ * @param {string|CID} input
+ * @param {Transferable[]} [transfer]
+ * @returns {string|EncodedCID}
+ */
+const encodeCIDOrPath = (input, transfer) => {
+ if (typeof input === 'string') {
+ return input
+ } else {
+ return encodeCID(input, transfer)
+ }
+}
+
+module.exports = DAGClient
diff --git a/packages/ipfs-message-port-client/src/files.js b/packages/ipfs-message-port-client/src/files.js
new file mode 100644
index 0000000000..d274996849
--- /dev/null
+++ b/packages/ipfs-message-port-client/src/files.js
@@ -0,0 +1,76 @@
+'use strict'
+
+/* eslint-env browser */
+const { Client } = require('./client')
+const { decodeCID, CID } = require('ipfs-message-port-protocol/src/cid')
+
+/**
+ * @typedef {import('ipfs-message-port-server/src/files').FilesService} FilesService
+ * @typedef {import('ipfs-message-port-server/src/files').EncodedStat} EncodedStat
+ * @typedef {import('./client').ClientTransport} Transport
+ */
+
+/**
+ * @class
+ * @extends {Client}
+ */
+class FilesClient extends Client {
+ /**
+ * @param {Transport} transport
+ */
+ constructor (transport) {
+ super('files', ['stat'], transport)
+ }
+
+ /**
+ * @typedef {Object} Stat
+ * @property {CID} cid Content identifier.
+ * @property {number} size File size in bytes.
+ * @property {number} cumulativeSize Size of the DAGNodes making up the file in bytes.
+ * @property {"directory"|"file"} type
+ * @property {number} blocks Number of files making up directory (when a direcotry)
+ * or number of blocks that make up the file (when a file)
+ * @property {boolean} withLocality True when locality information is present
+ * @property {boolean} local True if the queried dag is fully present locally
+ * @property {number} sizeLocal Cumulative size of the data present locally
+ *
+ * @param {string|CID} pathOrCID
+ * @param {Object} [options]
+ * @param {boolean} [options.hash=false] If true will only return hash
+ * @param {boolean} [options.size=false] If true will only return size
+ * @param {boolean} [options.withLocal=false] If true computes size of the dag that is local, and total size when possible
+ * @param {number} [options.timeout]
+ * @param {AbortSignal} [options.signal]
+ * @returns {Promise}
+ */
+ async stat (pathOrCID, options = {}) {
+ const { size, hash, withLocal, timeout, signal } = options
+ const { stat } = await this.remote.stat({
+ path: encodeLocation(pathOrCID),
+ size,
+ hash,
+ withLocal,
+ timeout,
+ signal
+ })
+ return decodeStat(stat)
+ }
+}
+module.exports = FilesClient
+
+/**
+ * Turns content address (path or CID) into path.
+ * @param {string|CID} pathOrCID
+ * @returns {string}
+ */
+const encodeLocation = pathOrCID =>
+ CID.isCID(pathOrCID) ? `/ipfs/${pathOrCID.toString()}` : pathOrCID
+
+/**
+ *
+ * @param {EncodedStat} data
+ * @returns {Stat}
+ */
+const decodeStat = data => {
+ return { ...data, cid: decodeCID(data.cid) }
+}
diff --git a/packages/ipfs-message-port-client/src/index.js b/packages/ipfs-message-port-client/src/index.js
new file mode 100644
index 0000000000..bc174e49c1
--- /dev/null
+++ b/packages/ipfs-message-port-client/src/index.js
@@ -0,0 +1,61 @@
+'use strict'
+/* eslint-env browser */
+
+const { Transport } = require('./client')
+const BlockClient = require('./block')
+const DAGClient = require('./dag')
+const CoreClient = require('./core')
+const FilesClient = require('./files')
+
+/**
+ * @typedef {import('./client').Transport} ClientTransport
+ *
+ * @typedef {Object} ClientOptions
+ * @property {MessagePort} port
+ */
+
+class IPFSClient extends CoreClient {
+ /**
+ * @param {ClientTransport} transport
+ */
+ constructor (transport) {
+ super(transport)
+ this.transport = transport
+ this.dag = new DAGClient(this.transport)
+ this.files = new FilesClient(this.transport)
+ this.block = new BlockClient(this.transport)
+ }
+
+ /**
+ * Attaches IPFS client to the given message port. Throws
+ * exception if client is already attached.
+ * @param {IPFSClient} self
+ * @param {MessagePort} port
+ */
+ static attach (self, port) {
+ self.transport.connect(port)
+ }
+
+ /**
+ * Creates IPFS client that is detached from the `ipfs-message-port-service`.
+ * This can be useful when in a scenario where obtaining message port happens
+ * later on in the application logic. Datached IPFS client will queue all the
+ * API calls and flush them once client is attached.
+ * @returns {IPFSClient}
+ */
+ static detached () {
+ return new IPFSClient(new Transport(undefined))
+ }
+
+ /**
+ * Creates IPFS client from the message port (assumes that
+ * `ipfs-message-port-service` is instantiated on the other end)
+ * @param {MessagePort} port
+ * @returns {IPFSClient}
+ */
+ static from (port) {
+ return new IPFSClient(new Transport(port))
+ }
+}
+
+module.exports = IPFSClient
diff --git a/packages/ipfs-message-port-client/test/interface.core.js b/packages/ipfs-message-port-client/test/interface.core.js
new file mode 100644
index 0000000000..a57b54ba55
--- /dev/null
+++ b/packages/ipfs-message-port-client/test/interface.core.js
@@ -0,0 +1,9 @@
+/* eslint-env mocha, browser */
+'use strict'
+
+const { createSuite } = require('interface-ipfs-core/src/utils/suite')
+
+exports.core = createSuite({
+ add: require('interface-ipfs-core/src/add'),
+ cat: require('interface-ipfs-core/src/cat')
+})
diff --git a/packages/ipfs-message-port-client/test/interface.spec.js b/packages/ipfs-message-port-client/test/interface.spec.js
new file mode 100644
index 0000000000..4d104983cd
--- /dev/null
+++ b/packages/ipfs-message-port-client/test/interface.spec.js
@@ -0,0 +1,134 @@
+/* eslint-env mocha, browser */
+'use strict'
+
+const tests = require('interface-ipfs-core')
+const { core } = require('./interface.core')
+const { activate } = require('./util/client')
+
+describe('interface-ipfs-core tests', () => {
+ const commonFactory = {
+ spawn () {
+ return { api: activate() }
+ },
+ clean () {}
+ }
+
+ tests.dag(commonFactory, {
+ skip: [
+ {
+ name: 'should get a dag-pb node',
+ reason: 'Nodes are not turned into dag-pb DAGNode instances'
+ },
+ {
+ name: 'should get a dag-pb node with path',
+ reason: 'Nodes are not turned into dag-pb DAGNode instances'
+ },
+ {
+ name: 'should get by CID string',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should get by CID string + path',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should get a node added as CIDv1 with a CIDv0',
+ reason: 'ipfs.block API is not implemented'
+ },
+ {
+ name: 'should be able to get part of a dag-cbor node',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should get tree with CID and path as String',
+ reason: 'Passing CID as strings is not supported'
+ }
+ ]
+ })
+
+ core(commonFactory, {
+ skip: [
+ {
+ name: 'should add with only-hash=true',
+ reason: 'ipfs.object.get is not implemented'
+ },
+ {
+ name: 'should add a directory with only-hash=true',
+ reason: 'ipfs.object.get is not implemented'
+ },
+ {
+ name: 'should add with mtime as hrtime',
+ reason: 'process.hrtime is not a function in browser'
+ },
+ {
+ name: 'should add from a URL with only-hash=true',
+ reason: 'ipfs.object.get is not implemented'
+ },
+ {
+ name: 'should cat with a Buffer multihash',
+ reason: 'Passing CID as Buffer is not supported'
+ },
+ {
+ name: 'should add from a HTTP URL',
+ reason: 'https://github.com/ipfs/js-ipfs/issues/3195'
+ },
+ {
+ name: 'should add from a HTTP URL with redirection',
+ reason: 'https://github.com/ipfs/js-ipfs/issues/3195'
+ },
+ {
+ name: 'should add from a URL with only-hash=true',
+ reason: 'https://github.com/ipfs/js-ipfs/issues/3195'
+ },
+ {
+ name: 'should add from a URL with wrap-with-directory=true',
+ reason: 'https://github.com/ipfs/js-ipfs/issues/3195'
+ },
+ {
+ name: 'should add from a URL with wrap-with-directory=true and URL-escaped file name',
+ reason: 'https://github.com/ipfs/js-ipfs/issues/3195'
+ },
+ {
+ name: 'should not add from an invalid url',
+ reason: 'https://github.com/ipfs/js-ipfs/issues/3195'
+ }
+ ]
+ })
+
+ tests.block(commonFactory, {
+ skip: [
+ {
+ name: 'should get by CID in string',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should return an error for an invalid CID',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should put a buffer, using CID string',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should put a buffer, using options',
+ reason: 'ipfs.pin.ls is not implemented'
+ },
+ {
+ name: 'should remove by CID object',
+ reason: 'ipfs.refs.local is not implemented'
+ },
+ {
+ name: 'should remove by CID in string',
+ reason: 'Passing CID as strings is not supported'
+ },
+ {
+ name: 'should remove by CID in buffer',
+ reason: 'Passing CID as Buffer is not supported'
+ },
+ {
+ name: 'should error when removing pinned blocks',
+ reason: 'ipfs.pin.add is not implemented'
+ }
+ ]
+ })
+})
diff --git a/packages/ipfs-message-port-client/test/util/client.js b/packages/ipfs-message-port-client/test/util/client.js
new file mode 100644
index 0000000000..a71e3e8a58
--- /dev/null
+++ b/packages/ipfs-message-port-client/test/util/client.js
@@ -0,0 +1,18 @@
+/* eslint-env browser */
+
+'use strict'
+
+const IPFSClient = require('../../src/index')
+
+const activate = () => {
+ const worker = new SharedWorker(process.env.IPFS_WORKER_URL, 'IPFSService')
+ const client = IPFSClient.from(worker.port)
+ return client
+}
+exports.activate = activate
+
+const detached = () => {
+ const client = IPFSClient.detached()
+ return client
+}
+exports.detached = detached
diff --git a/packages/ipfs-message-port-client/test/util/webpack.config.js b/packages/ipfs-message-port-client/test/util/webpack.config.js
new file mode 100644
index 0000000000..6613f75972
--- /dev/null
+++ b/packages/ipfs-message-port-client/test/util/webpack.config.js
@@ -0,0 +1,13 @@
+'use strict'
+
+const path = require('path')
+
+module.exports = {
+ mode: 'development',
+ devtool: 'source-map',
+ entry: [path.join(__dirname, './worker.js')],
+ output: {
+ path: path.join(__dirname, '../../dist/'),
+ filename: 'worker.bundle.js'
+ }
+}
diff --git a/packages/ipfs-message-port-client/test/util/worker.js b/packages/ipfs-message-port-client/test/util/worker.js
new file mode 100644
index 0000000000..93b81cc3a3
--- /dev/null
+++ b/packages/ipfs-message-port-client/test/util/worker.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const IPFS = require('ipfs')
+const { IPFSService, Server } = require('ipfs-message-port-server')
+
+const main = async connections => {
+ const ipfs = await IPFS.create({ offline: true, start: false })
+ const service = new IPFSService(ipfs)
+ const server = new Server(service)
+
+ for await (const event of connections) {
+ const port = event.ports[0]
+ if (port) {
+ server.connect(port)
+ }
+ }
+}
+
+const listen = function (target, type, options) {
+ const events = []
+ let resume
+ let ready = new Promise(resolve => (resume = resolve))
+
+ const write = event => {
+ events.push(event)
+ resume()
+ }
+ const read = async () => {
+ await ready
+ ready = new Promise(resolve => (resume = resolve))
+ return events.splice(0)
+ }
+
+ const reader = async function * () {
+ try {
+ while (true) {
+ yield * await read()
+ }
+ } finally {
+ target.removeEventListener(type, write, options)
+ }
+ }
+
+ target.addEventListener(type, write, options)
+ return reader()
+}
+
+main(listen(self, 'connect'))
diff --git a/packages/ipfs-message-port-client/tsconfig.json b/packages/ipfs-message-port-client/tsconfig.json
new file mode 100644
index 0000000000..088cc55425
--- /dev/null
+++ b/packages/ipfs-message-port-client/tsconfig.json
@@ -0,0 +1,28 @@
+{
+ "compilerOptions": {
+ "allowJs": true,
+ "checkJs": true,
+ "forceConsistentCasingInFileNames": true,
+ "noImplicitReturns": true,
+ "noImplicitAny": true,
+ "noImplicitThis": true,
+ "noFallthroughCasesInSwitch": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "strictFunctionTypes": true,
+ "strictNullChecks": true,
+ "strictPropertyInitialization": true,
+ "strict": true,
+ "alwaysStrict": true,
+ "esModuleInterop": true,
+ "target": "ES5",
+ "noEmit": true
+ },
+ "exclude": ["dist", "node_modules"],
+ "include": [
+ "src/**/*.js",
+ "../ipfs-message-port-server/src/**/*.js",
+ "../ipfs-message-port-protocol/src/**/*.js"
+ ],
+ "compileOnSave": false
+}
diff --git a/packages/ipfs-message-port-protocol/README.md b/packages/ipfs-message-port-protocol/README.md
new file mode 100644
index 0000000000..d4d32fe9be
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/README.md
@@ -0,0 +1,219 @@
+# ipfs-message-port-protocol
+
+[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai)
+[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
+[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
+[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs)](https://travis-ci.com/ipfs/js-ipfs)
+[![Codecov branch](https://img.shields.io/codecov/c/github/ipfs/js-ipfs/master.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs)
+[![Dependency Status](https://david-dm.org/ipfs/js-ipfs/status.svg?path=packages/ipfs-message-port-protocol)](https://david-dm.org/ipfs/js-ipfs?path=packages/ipfs-message-port-protocol)
+[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard)
+
+> This package serves as a repository code shared between the core `ipfs-message-port-client` and the `ipfs-message-port-server`
+
+## Lead Maintainer
+
+[Alex Potsides](https://github.com/achingbrain)
+
+## Table of Contentens
+
+- [Install](#install)
+- [Usage](#usage)
+ - [Wire protocol codecs](#wire-protocol-codecs)
+ - [Block](#block)
+ - [DAGNode](#dagnode)
+ - [AsyncIterable](#asynciterable)
+ - [Callback][#callback]
+- [Contribute](#contribute)
+- [License](#license)
+
+## Install
+
+```bash
+$ npm install --save ipfs-message-port-protocol
+```
+
+## Usage
+
+## Wire protocol codecs
+
+This module provides encode / decode functions for types that are not supported by [structured cloning algorithm][] and therefore need to be encoded before being posted over the [message channel][] and decoded on the other end.
+
+All encoders take an optional `transfer` array. If provided, the encoder will add all `Transferable` fields of the given value so they can be moved across threads without copying.
+
+### `CID`
+
+Codecs for [CID][] implementation in JavaScript.
+
+```js
+const { CID, encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid')
+
+const cid = new CID('bafybeig6xv5nwphfmvcnektpnojts33jqcuam7bmye2pb54adnrtccjlsu')
+
+const { port1, port2 } = new MessageChannel()
+
+// Will copy underlying memory
+port1.postMessage(encodeCID(cid))
+
+// Will transfer underlying memory (cid is corrupt on this thread)
+const transfer = []
+port1.postMessage(encodeCID(cid, transfer), transfer)
+
+// On the receiver thread
+port2.onmessage = ({data}) => {
+ const cid = decodeCID(data)
+ data instanceof CID // => true
+}
+```
+
+### Block
+
+Codecs for [IPLD Block][] implementation in JavaScript.
+
+```js
+const { Block, encodeBlock, decodeBlock } = require('ipfs-message-port-protocol/src/block')
+
+const data = Buffer.from('hello')
+const cid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+const block = new Block(data, cid)
+
+const { port1, port2 } = new MessageChannel()
+
+// Will copy underlying memory
+port1.postMessage(encodeBlock(block))
+
+// Will transfer underlying memory (block & cid will be corrupt on this thread)
+const transfer = []
+port1.postMessage(encodeBlock(block, transfer), transfer)
+
+
+// On the receiver thread
+port2.onmessage = ({data}) => {
+ const block = decodeBlock(data)
+ block instanceof Block // true
+}
+```
+
+### DAGNode
+
+Codec for DAGNodes accepted by `ipfs.dag.put` API.
+
+```js
+const { encodeNode, decodeNode } = require('ipfs-message-port-protocol/src/dag')
+
+
+const cid = CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+const dagNode = { hi: 'hello', link: cid }
+
+const { port1, port2 } = new MessageChannel()
+
+// Will copy underlying memory
+port1.postMessage(encodeNode(dagNode))
+
+// Will transfer underlying memory (`dagNode.link` will be corrupt on this thread)
+const transfer = []
+port1.postMessage(encodeNode(dagNode, transfer), transfer)
+
+
+// On the receiver thread
+port2.onmessage = ({data}) => {
+ const dagNode = decodeNode(data)
+ dagNode.link instanceof CID // true
+}
+```
+
+### AsyncIterable
+
+This encoder encodes [async iterables][] such that they can be transferred
+across threads and decoded by a consumer on the other end while taking care of
+all the IO coordination between two. It needs to be provided `encoder` /
+`decoder` function to encode / decode each yielded item of the async iterable.
+Unlike other encoders the `transfer` argument is mandatory (because async
+iterable is encoded to a [MessagePort][] that can only be transferred).
+
+
+```js
+const { encodeIterable, decodeIterable } = require('ipfs-message-port-protocol/src/core')
+
+const data = ipfs.cat('/ipfs/QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+
+const { port1, port2 } = new MessageChannel()
+
+// Will copy each chunk to the receiver thread
+{
+ const transfer = []
+ port1.postMessage(
+ encodeIterable(content, chunk => chunk, transfer),
+ transfer
+ )
+}
+
+
+// Will transfer each chunk to the receiver thread (corrupting it on this thread)
+{
+ const transfer = []
+ port1.postMessage(
+ encodeIterable(
+ content,
+ (chunk, transfer) => {
+ transfer.push(chunk.buffer)
+ return chunk
+ },
+ transfer
+ ),
+ transfer
+ )
+}
+
+
+// On the receiver thread
+port2.onmessage = async ({data}) => {
+ for await (const chunk of decodeIterable(data)) {
+ chunk instanceof Uint8Array
+ }
+}
+```
+
+### Callback
+
+Primitive callbacks that take single parameter supported by [structured cloning algorithm][] like progress callback used across IPFS APIs can be encoded / decoded. Unilke most encoders `transfer` argument is required (because value is encoded to a [MessagePort][] that can only be transferred)
+
+```js
+const { encodeCallback, decodeCallback } = require('ipfs-message-port-protocol/src/core')
+
+const { port1, port2 } = new MessageChannel()
+
+const progress = (value) => console.log(progress)
+
+const transfer = []
+port1.postMessage(encodeCallback(progress, transfer))
+
+
+// On the receiver thread
+port2.onmessage = ({data}) => {
+ const progress = decodeCallback(data)
+ // Invokes `progress` on the other end
+ progress(20)
+}
+```
+
+
+[structured cloning algorithm]:https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
+[message channel]:https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel
+[MessagePort]:https://developer.mozilla.org/en-US/docs/Web/API/MessagePort
+[Transferable]:https://developer.mozilla.org/en-US/docs/Web/API/Transferable
+
+[IPLD Block]:https://github.com/ipld/js-ipld-block
+[CID]:https://github.com/multiformats/js-cid
+
+[async iterables]:https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
+
+## Contribute
+
+Contributions welcome. Please check out [the issues](https://github.com/ipfs/js-ipfs/issues).
+
+Check out our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
+
+## License
+
+[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fipfs%2Fjs-ipfs.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fipfs%2Fjs-ipfs?ref=badge_large)
+
diff --git a/packages/ipfs-message-port-protocol/package.json b/packages/ipfs-message-port-protocol/package.json
new file mode 100644
index 0000000000..68520d9ff7
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/package.json
@@ -0,0 +1,50 @@
+{
+ "name": "ipfs-message-port-protocol",
+ "version": "0.0.1",
+ "description": "IPFS client/server protocol over message port",
+ "keywords": [
+ "ipfs"
+ ],
+ "homepage": "https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs-message-port-protocol#readme",
+ "bugs": "https://github.com/ipfs/js-ipfs/issues",
+ "license": "(Apache-2.0 OR MIT)",
+ "leadMaintainer": "Alex Potsides ",
+ "files": [
+ "src",
+ "dist"
+ ],
+ "browser": {},
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/ipfs/js-ipfs.git"
+ },
+ "scripts": {
+ "test": "aegir test",
+ "test:node": "aegir test -t node",
+ "test:browser": "aegir test -t browser",
+ "test:webworker": "aegir test -t webworker",
+ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless",
+ "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless",
+ "lint": "aegir lint",
+ "build": "aegir build",
+ "coverage": "npx nyc -r html npm run test:node -- --bail",
+ "clean": "rm -rf ./dist",
+ "dep-check": "aegir dep-check"
+ },
+ "dependencies": {
+ "buffer": "^5.6.0",
+ "cids": "^0.8.3",
+ "ipld-block": "^0.9.2"
+ },
+ "devDependencies": {
+ "aegir": "^23.0.0",
+ "interface-ipfs-core": "^0.138.0"
+ },
+ "engines": {
+ "node": ">=10.3.0",
+ "npm": ">=3.0.0"
+ },
+ "contributors": [
+ "Irakli Gozalishvili "
+ ]
+}
\ No newline at end of file
diff --git a/packages/ipfs-message-port-protocol/src/block.js b/packages/ipfs-message-port-protocol/src/block.js
new file mode 100644
index 0000000000..e1e321cef9
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/block.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const { encodeCID, decodeCID } = require('./cid')
+const Block = require('ipld-block')
+
+/**
+ * @typedef {import('./cid').EncodedCID} EncodedCID
+ * @typedef {Object} EncodedBlock
+ * @property {Uint8Array} data
+ * @property {EncodedCID} cid
+ */
+
+/**
+ * Encodes Block for over the message channel transfer.
+ *
+ * If `transfer` array is provided all the encountered `ArrayBuffer`s within
+ * this block will be added to the transfer so they are moved across without
+ * copy.
+ * @param {Block} block
+ * @param {Transferable[]} [transfer]
+ * @returns {EncodedBlock}
+ */
+const encodeBlock = ({ cid, data }, transfer) => {
+ if (transfer) {
+ transfer.push(data.buffer)
+ }
+ return { cid: encodeCID(cid, transfer), data }
+}
+exports.encodeBlock = encodeBlock
+
+/**
+ * @param {EncodedBlock} encodedBlock
+ * @returns {Block}
+ */
+const decodeBlock = ({ cid, data }) => {
+ return new Block(data, decodeCID(cid))
+}
+
+exports.decodeBlock = decodeBlock
+
+exports.Block = Block
diff --git a/packages/ipfs-message-port-protocol/src/cid.js b/packages/ipfs-message-port-protocol/src/cid.js
new file mode 100644
index 0000000000..a17f60bc67
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/cid.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const CID = require('cids')
+const { Buffer } = require('buffer')
+
+/**
+ * @typedef {Object} EncodedCID
+ * @property {string} codec
+ * @property {Uint8Array} multihash
+ * @property {number} version
+ */
+
+/**
+ * Encodes CID (well not really encodes it as all own properties are going to be
+ * be cloned anyway). If `transfer` array is passed underlying `ArrayBuffer`
+ * will be added for the transfer list.
+ * @param {CID} cid
+ * @param {Transferable[]} [transfer]
+ * @returns {EncodedCID}
+ */
+const encodeCID = (cid, transfer) => {
+ if (transfer) {
+ transfer.push(cid.multihash.buffer)
+ }
+ return cid
+}
+exports.encodeCID = encodeCID
+
+/**
+ * Decodes encoded CID (well sort of instead it makes nasty mutations to turn
+ * structure cloned CID back into itself).
+ * @param {EncodedCID} encodedCID
+ * @returns {CID}
+ */
+const decodeCID = encodedCID => {
+ /** @type {CID} */
+ const cid = (encodedCID)
+ Object.setPrototypeOf(cid.multihash, Buffer.prototype)
+ Object.setPrototypeOf(cid, CID.prototype)
+ // TODO: Figure out a way to avoid `Symbol.for` here as it can get out of
+ // sync with cids implementation.
+ // See: https://github.com/moxystudio/js-class-is/issues/25
+ Object.defineProperty(cid, Symbol.for('@ipld/js-cid/CID'), { value: true })
+
+ return cid
+}
+exports.decodeCID = decodeCID
+
+exports.CID = CID
diff --git a/packages/ipfs-message-port-protocol/src/core.js b/packages/ipfs-message-port-protocol/src/core.js
new file mode 100644
index 0000000000..56e7552978
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/core.js
@@ -0,0 +1,207 @@
+'use strict'
+
+/* eslint-env browser */
+const { encodeError, decodeError } = require('./error')
+
+/**
+ * @template T
+ * @typedef {Object} RemoteIterable
+ * @property {'RemoteIterable'} type
+ * @property {MessagePort} port
+ */
+
+/**
+ * @template T
+ * @typedef {Object} RemoteCallback
+ * @property {'RemoteCallback'} type
+ * @property {MessagePort} port
+ */
+
+/**
+ * @template T
+ * @typedef {Object} RemoteYield
+ * @property {false} done
+ * @property {T} value
+ * @property {void} error
+ */
+
+/**
+ * @template T
+ * @typedef {Object} RemoteDone
+ * @property {true} done
+ * @property {T|void} value
+ * @property {void} error
+ */
+
+/**
+ * @typedef {import('./error').EncodedError} EncodedError
+ * @typedef {Object} RemoteError
+ * @property {true} done
+ * @property {void} value
+ * @property {EncodedError} error
+ */
+
+/**
+ * @template T
+ * @typedef {RemoteYield|RemoteDone|RemoteError} RemoteNext
+ */
+
+/**
+ * @template I, O
+ * @param {RemoteIterable} remote
+ * @param {function(I):O} decode
+ * @returns {AsyncIterable}
+ */
+const decodeIterable = async function * ({ port }, decode) {
+ /**
+ * @param {RemoteNext} _data
+ */
+ let receive = _data => {}
+ /**
+ * @returns {Promise>}
+ */
+ const wait = () => new Promise(resolve => (receive = resolve))
+ const next = () => {
+ port.postMessage({ method: 'next' })
+ return wait()
+ }
+
+ /**
+ * @param {MessageEvent} event
+ * @returns {void}
+ */
+ port.onmessage = event => receive(event.data)
+
+ let isDone = false
+ try {
+ while (!isDone) {
+ const { done, value, error } = await next()
+ isDone = done
+ if (error != null) {
+ throw decodeError(error)
+ } else if (value != null) {
+ yield decode(value)
+ }
+ }
+ } finally {
+ if (!isDone) {
+ port.postMessage({ method: 'return' })
+ }
+ port.close()
+ }
+}
+exports.decodeIterable = decodeIterable
+
+/**
+ * @template I,O
+ * @param {AsyncIterable|Iterable} iterable
+ * @param {function(I, Transferable[]):O} encode
+ * @param {Transferable[]} transfer
+ * @returns {RemoteIterable}
+ */
+const encodeIterable = (iterable, encode, transfer) => {
+ const { port1: port, port2: remote } = new MessageChannel()
+ /** @type {Transferable[]} */
+ const itemTransfer = []
+ /** @type {Iterator|AsyncIterator} */
+ const iterator = toIterator(iterable)
+
+ port.onmessage = async ({ data: { method } }) => {
+ switch (method) {
+ case 'next': {
+ try {
+ const { done, value } = await iterator.next()
+ if (done) {
+ port.postMessage({ type: 'next', done: true })
+ port.close()
+ } else {
+ itemTransfer.length = 0
+ port.postMessage(
+ {
+ type: 'next',
+ done: false,
+ value: encode(value, itemTransfer)
+ },
+ itemTransfer
+ )
+ }
+ } catch (error) {
+ port.postMessage({
+ type: 'throw',
+ error: encodeError(error)
+ })
+ port.close()
+ }
+ break
+ }
+ case 'return': {
+ port.close()
+ if (iterator.return) {
+ iterator.return()
+ }
+ break
+ }
+ default: {
+ break
+ }
+ }
+ }
+ port.start()
+ transfer.push(remote)
+
+ return { type: 'RemoteIterable', port: remote }
+}
+exports.encodeIterable = encodeIterable
+
+/**
+ * @template I
+ * @param {any} iterable
+ * @returns {Iterator|AsyncIterator}
+ */
+const toIterator = iterable => {
+ if (iterable != null) {
+ if (typeof iterable[Symbol.asyncIterator] === 'function') {
+ return iterable[Symbol.asyncIterator]()
+ }
+
+ if (typeof iterable[Symbol.iterator] === 'function') {
+ return iterable[Symbol.iterator]()
+ }
+ }
+
+ throw TypeError('Value must be async or sync iterable')
+}
+
+/**
+ * @template T
+ * @param {function(T):void} callback
+ * @param {Transferable[]} transfer
+ * @returns {RemoteCallback}
+ */
+const encodeCallback = (callback, transfer) => {
+ // eslint-disable-next-line no-undef
+ const { port1: port, port2: remote } = new MessageChannel()
+ port.onmessage = ({ data }) => callback(data)
+ transfer.push(remote)
+ return { type: 'RemoteCallback', port: remote }
+}
+exports.encodeCallback = encodeCallback
+
+/**
+ * @template T
+ * @param {RemoteCallback} remote
+ * @returns {function(T):void | function(T, Transferable[]):void}
+ */
+const decodeCallback = ({ port }) => {
+ /**
+ * @param {T} value
+ * @param {Transferable[]} [transfer]
+ * @returns {void}
+ */
+ const callback = (value, transfer = []) => {
+ port.postMessage(value, transfer)
+ }
+
+ return callback
+}
+exports.decodeCallback = decodeCallback
diff --git a/packages/ipfs-message-port-protocol/src/dag.js b/packages/ipfs-message-port-protocol/src/dag.js
new file mode 100644
index 0000000000..467b68ce77
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/dag.js
@@ -0,0 +1,94 @@
+'use strict'
+
+const { encodeCID, decodeCID, CID } = require('./cid')
+
+/**
+ * @typedef {import('./data').JSONValue} JSONValue
+ */
+
+/**
+ * @template T
+ * @typedef {import('./data').StringEncoded} StringEncoded
+ */
+
+/**
+ * @typedef {Object} EncodedCID
+ * @property {string} codec
+ * @property {Uint8Array} multihash
+ * @property {number} version
+ * @typedef {JSONValue} DAGNode
+ *
+ * @typedef {Object} EncodedDAGNode
+ * @property {DAGNode} dagNode
+ * @property {CID[]} cids
+ */
+
+/**
+ * @param {EncodedDAGNode} encodedNode
+ * @returns {DAGNode}
+ */
+const decodeNode = ({ dagNode, cids }) => {
+ // It is not ideal to have to mutate prototype chains like
+ // this, but it removes a need of traversing node first on client
+ // and now on server.
+ for (const cid of cids) {
+ decodeCID(cid)
+ }
+
+ return dagNode
+}
+
+exports.decodeNode = decodeNode
+
+/**
+ * Encodes DAG node for over the message channel transfer by collecting all
+ * the CID instances into an array so they could be turned back into CIDs
+ * without traversal on the other end.
+ *
+ * If `transfer` array is provided all the encountered `ArrayBuffer`s within
+ * this node will be added to transfer so they are moved across without copy.
+ * @param {DAGNode} dagNode
+ * @param {Transferable[]} [transfer]
+ * @returns {EncodedDAGNode}
+ */
+const encodeNode = (dagNode, transfer) => {
+ /** @type {CID[]} */
+ const cids = []
+ collectNode(dagNode, cids, transfer)
+ return { dagNode, cids }
+}
+exports.encodeNode = encodeNode
+
+/**
+ * Recursively traverses passed `value` and collects encountered `CID` instances
+ * into provided `cids` array. If `transfer` array is passed collects all the
+ * `ArrayBuffer`s into it.
+ * @param {DAGNode} value
+ * @param {CID[]} cids
+ * @param {Transferable[]} [transfer]
+ * @returns {void}
+ */
+const collectNode = (value, cids, transfer) => {
+ if (value != null && typeof value === 'object') {
+ if (CID.isCID(value)) {
+ cids.push(value)
+ encodeCID(value, transfer)
+ } else if (value instanceof ArrayBuffer) {
+ if (transfer) {
+ transfer.push(value)
+ }
+ } else if (ArrayBuffer.isView(value)) {
+ if (transfer) {
+ transfer.push(value.buffer)
+ }
+ } else if (Array.isArray(value)) {
+ for (const member of value) {
+ collectNode(member, cids, transfer)
+ }
+ } else {
+ for (const member of Object.values(value)) {
+ collectNode(member, cids, transfer)
+ }
+ }
+ }
+}
diff --git a/packages/ipfs-message-port-protocol/src/data.ts b/packages/ipfs-message-port-protocol/src/data.ts
new file mode 100644
index 0000000000..91b31b04e5
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/data.ts
@@ -0,0 +1,38 @@
+export type JSONObject = { [key: string]: JSONValue }
+export type JSONArray = Array
+export type JSONValue =
+ | null
+ | boolean
+ | number
+ | string
+ | JSONArray
+ | JSONObject
+
+export type Encoded<_Data, Representation> = Representation
+export type StringEncoded = Encoded
+
+export type UnixFSTime = {
+ secs: number
+ nsecs: number
+}
+
+export type LooseUnixFSTime = {
+ secs: number
+ nsecs?: number
+}
+
+export type HRTime = [number, number]
+
+export type Time = Date | LooseUnixFSTime | HRTime
+export type Mode = string | number
+export type HashAlg = string
+export type FileType = 'directory' | 'file'
+export type CIDVersion = 0 | 1
+
+export type Result = { ok: true; value: T } | { ok: false; error: X }
+
+export type EncodedError = {
+ message: string
+ name: string
+ stack: string
+}
diff --git a/packages/ipfs-message-port-protocol/src/error.js b/packages/ipfs-message-port-protocol/src/error.js
new file mode 100644
index 0000000000..3e299db827
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/error.js
@@ -0,0 +1,76 @@
+'use strict'
+
+/* eslint-env browser */
+
+/**
+ * @typedef {Error|ErrorData} EncodedError
+ *
+ * Properties added by err-code library
+ * @typedef {Object} ErrorExtension
+ * @property {string} [code]
+ * @property {string} [detail]
+ */
+
+/**
+ * @typedef {Error & ErrorExtension} ExtendedError
+ */
+
+/**
+ * @typedef {Object} ErrorData
+ * @property {string} name
+ * @property {string} message
+ * @property {string|undefined} stack
+ * @property {string|undefined} code
+ * @property {string|undefined} detail
+ *
+ * @param {ExtendedError} error
+ * @returns {EncodedError}
+ */
+const encodeError = error => {
+ const { name, message, stack, code, detail } = error
+ return { name, message, stack, code, detail }
+}
+exports.encodeError = encodeError
+
+/**
+ * @param {EncodedError} error
+ * @returns {Error}
+ */
+const decodeError = error => {
+ if (error instanceof Error) {
+ return error
+ } else {
+ const { name, message, stack, code } = error
+ return Object.assign(createError(name, message), { name, stack, code })
+ }
+}
+exports.decodeError = decodeError
+
+/**
+ * Create error by error name.
+ * @param {string} name
+ * @param {string} message
+ * @returns {Error}
+ */
+const createError = (name, message) => {
+ switch (name) {
+ case 'RangeError': {
+ return new RangeError(message)
+ }
+ case 'ReferenceError': {
+ return ReferenceError(message)
+ }
+ case 'SyntaxError': {
+ return new SyntaxError(message)
+ }
+ case 'TypeError': {
+ return new TypeError(message)
+ }
+ case 'URIError': {
+ return new URIError(message)
+ }
+ default: {
+ return new Error(message)
+ }
+ }
+}
diff --git a/packages/ipfs-message-port-protocol/src/rpc.ts b/packages/ipfs-message-port-protocol/src/rpc.ts
new file mode 100644
index 0000000000..1b9dd836de
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/src/rpc.ts
@@ -0,0 +1,90 @@
+export type Procedure = T extends (arg: infer I) => infer O
+ ? (query: I & QueryOptions) => Return
+ : void
+
+export type Remote = {
+ [K in keyof T]: Procedure
+}
+
+type Return = T extends Promise
+ ? Promise
+ : Promise
+
+export type QueryOptions = {
+ signal?: AbortSignal
+ timeout?: number
+ transfer?: Transferable[]
+}
+
+export type TransferOptions = {
+ transfer?: Transferable[]
+}
+
+export type NonUndefined = A extends undefined ? never : A
+
+export type ProcedureNames = {
+ [K in keyof T]-?: NonUndefined extends Function ? K : never
+}[keyof T][]
+
+
+
+/**
+ * Any method name of the associated with RPC service.
+ */
+export type Method = ServiceQuery['method']
+
+/**
+ * Namespace of the RCP service
+ */
+export type Namespace = ServiceQuery['namespace']
+
+export type Values = T[keyof T]
+export type Keys = keyof T
+
+export type Inn = ServiceQuery['input']
+export type Out = ServiceQuery['result']
+
+export type RPCQuery = Pick<
+ ServiceQuery,
+ 'method' | 'namespace' | 'input' | 'timeout' | 'signal'
+>
+
+
+export type ServiceQuery = Values<
+ {
+ [NS in keyof T]: NamespacedQuery
+ }
+>
+
+
+export type NamespacedQuery = Values<
+ {
+ [M in keyof S]-?: S[M] extends (input: infer I) => infer O
+ ? {
+ namespace: NS
+ method: M
+ input: I & QueryOptions
+ result: R
+ } & QueryOptions
+ : never
+ }
+>
+
+type R = O extends Promise
+ ? Promise>
+ : Promise>
+
+type WithTransferOptions = O extends object ? O & TransferOptions : O
+
+
+export type MultiService = {
+ [NS in keyof T]: NamespacedService
+}
+
+type NamespacedService = {
+ [M in keyof S]: NamespacedMethod
+}
+
+export type NamespacedMethod = T extends (arg: infer I) => infer O
+ ? (query: I & QueryOptions) => Return
+ : never
diff --git a/packages/ipfs-message-port-protocol/test/block.browser.js b/packages/ipfs-message-port-protocol/test/block.browser.js
new file mode 100644
index 0000000000..e769f5c0c3
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/block.browser.js
@@ -0,0 +1,51 @@
+'use strict'
+
+/* eslint-env mocha */
+
+const CID = require('cids')
+const { encodeBlock, decodeBlock } = require('../src/block')
+const { ipc } = require('./util')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+const { Buffer } = require('buffer')
+const Block = require('ipld-block')
+
+describe('block (browser)', function () {
+ this.timeout(10 * 1000)
+ const move = ipc()
+
+ describe('encodeBlock / decodeBlock', () => {
+ it('should decode Block over message channel', async () => {
+ const blockIn = new Block(
+ Buffer.from('hello'),
+ new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+ )
+
+ const blockOut = decodeBlock(await move(encodeBlock(blockIn)))
+
+ expect(blockOut).to.be.deep.equal(blockIn)
+ })
+
+ it('should decode Block over message channel & transfer bytes', async () => {
+ const cid = new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+ const data = Buffer.from('hello')
+ const blockIn = new Block(data, cid)
+
+ const transfer = []
+
+ const blockOut = decodeBlock(
+ await move(encodeBlock(blockIn, transfer), transfer)
+ )
+
+ expect(blockOut).to.be.instanceOf(Block)
+ expect(blockOut).to.be.deep.equal(
+ new Block(
+ Buffer.from('hello'),
+ new CID('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+ )
+ )
+
+ expect(data).to.have.property('byteLength', 0, 'data was cleared')
+ expect(cid.multihash).to.have.property('byteLength', 0, 'cid was cleared')
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-protocol/test/browser.js b/packages/ipfs-message-port-protocol/test/browser.js
new file mode 100644
index 0000000000..2bcacfbe20
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/browser.js
@@ -0,0 +1,7 @@
+'use strict'
+
+require('./cid.browser')
+require('./block.browser')
+
+require('./dag.browser')
+require('./core.browser')
diff --git a/packages/ipfs-message-port-protocol/test/cid.browser.js b/packages/ipfs-message-port-protocol/test/cid.browser.js
new file mode 100644
index 0000000000..ece6fd75ec
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/cid.browser.js
@@ -0,0 +1,47 @@
+'use strict'
+
+/* eslint-env mocha */
+
+const CID = require('cids')
+const { encodeCID, decodeCID } = require('../src/cid')
+const { ipc } = require('./util')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+
+describe('cid (browser)', function () {
+ this.timeout(10 * 1000)
+ const move = ipc()
+
+ describe('encodeCID / decodeCID', () => {
+ it('should decode to CID over message channel', async () => {
+ const cidIn = new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr')
+ const cidDataIn = encodeCID(cidIn)
+ const cidDataOut = await move(cidDataIn)
+ const cidOut = decodeCID(cidDataOut)
+
+ expect(cidOut).to.be.an.instanceof(CID)
+ expect(CID.isCID(cidOut)).to.be.true()
+ expect(cidOut.equals(cidIn)).to.be.true()
+ expect(cidIn.multihash)
+ .property('byteLength')
+ .not.be.equal(0)
+ })
+
+ it('should decode CID and transfer bytes', async () => {
+ const cidIn = new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr')
+ const transfer = []
+ const cidDataIn = encodeCID(cidIn, transfer)
+ const cidDataOut = await move(cidDataIn, transfer)
+ const cidOut = decodeCID(cidDataOut)
+
+ expect(cidOut).to.be.an.instanceof(CID)
+ expect(CID.isCID(cidOut)).to.be.true()
+ expect(cidIn.multihash).property('byteLength', 0)
+ expect(cidOut.multihash)
+ .property('byteLength')
+ .to.not.be.equal(0)
+ expect(cidOut.toString()).to.be.equal(
+ 'Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr'
+ )
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-protocol/test/cid.spec.js b/packages/ipfs-message-port-protocol/test/cid.spec.js
new file mode 100644
index 0000000000..d6aec85304
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/cid.spec.js
@@ -0,0 +1,32 @@
+'use strict'
+
+/* eslint-env mocha */
+
+const CID = require('cids')
+const { encodeCID, decodeCID } = require('../src/cid')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+
+describe('cid', function () {
+ this.timeout(10 * 1000)
+
+ describe('encodeCID / decodeCID', () => {
+ it('should encode CID', () => {
+ const { multihash, codec, version } = encodeCID(
+ new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr')
+ )
+ expect(multihash).to.be.an.instanceof(Uint8Array)
+ expect(version).to.be.a('number')
+ expect(codec).to.be.a('string')
+ })
+
+ it('should decode CID', () => {
+ const { multihash, codec, version } = encodeCID(
+ new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr')
+ )
+ const cid = new CID('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr')
+ const decodecCID = decodeCID({ multihash, codec, version })
+
+ expect(cid.equals(decodecCID)).to.be.true()
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-protocol/test/core.browser.js b/packages/ipfs-message-port-protocol/test/core.browser.js
new file mode 100644
index 0000000000..3f49a977d6
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/core.browser.js
@@ -0,0 +1,492 @@
+'use strict'
+
+/* eslint-env mocha */
+
+const {
+ encodeCallback,
+ decodeCallback,
+ encodeIterable,
+ decodeIterable
+} = require('../src/core')
+const { ipc } = require('./util')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+const { Buffer } = require('buffer')
+
+describe('core', function () {
+ this.timeout(10 * 1000)
+ const move = ipc()
+
+ describe('remote callback', () => {
+ it('remote callback copies arguments', async () => {
+ let deliver = null
+ const callback = progress => {
+ deliver(progress)
+ }
+ const receive = () =>
+ new Promise(resolve => {
+ deliver = resolve
+ })
+
+ const transfer = []
+ const remote = decodeCallback(
+ await move(encodeCallback(callback, transfer), transfer)
+ )
+
+ remote(54)
+ expect(await receive()).to.be.equal(54)
+
+ remote({ hello: 'world' })
+
+ expect(await receive()).to.be.deep.equal({ hello: 'world' })
+ })
+
+ it('remote callback transfers buffers', async () => {
+ let deliver = null
+ const callback = progress => {
+ deliver(progress)
+ }
+ const receive = () =>
+ new Promise(resolve => {
+ deliver = resolve
+ })
+
+ const transfer = []
+ const remote = decodeCallback(
+ await move(encodeCallback(callback, transfer), transfer)
+ )
+
+ remote({ hello: Buffer.from('world') })
+ expect(await receive()).to.be.deep.equal({ hello: Buffer.from('world') })
+
+ const world = Buffer.from('world')
+ remote({ hello: world }, [world.buffer])
+
+ expect(await receive()).to.be.deep.equal({ hello: Buffer.from('world') })
+ expect(world.buffer).property('byteLength', 0, 'buffer was cleared')
+ })
+ })
+
+ describe('remote async iterable', () => {
+ it('remote iterable copies yielded data', async () => {
+ const iterate = async function * () {
+ yield 1
+ await null
+ yield { hello: Buffer.from('world') }
+ yield { items: [Buffer.from('bla'), Buffer.from('bla')] }
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [
+ 1,
+ { hello: Buffer.from('world') },
+ { items: [Buffer.from('bla'), Buffer.from('bla')] }
+ ]
+
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ }
+
+ expect(incoming).to.have.property('length', 0, 'all items were received')
+ })
+
+ it('break in consumer loop propagates to producer loop', async () => {
+ const outgoing = [
+ 1,
+ { hello: Buffer.from('world') },
+ { items: [Buffer.from('bla'), Buffer.from('bla')] },
+ { bye: 'Goodbye' }
+ ]
+
+ const iterate = async function * () {
+ await null
+ while (true) {
+ yield outgoing.shift()
+ }
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [
+ 1,
+ { hello: Buffer.from('world') },
+ { items: [Buffer.from('bla'), Buffer.from('bla')] }
+ ]
+
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ if (incoming.length === 0) {
+ break
+ }
+ }
+
+ expect(incoming).to.have.property('length', 0, 'all items were received')
+ expect(outgoing).to.have.property('length', 1, 'one item remained')
+ })
+
+ it('execption in producer propagate to consumer', async () => {
+ const iterate = async function * () {
+ await null
+ yield 1
+ yield 2
+ throw Error('Producer Boom!')
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [1, 2]
+
+ const consume = async () => {
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ }
+ }
+
+ const result = await consume().catch(error => error)
+
+ expect(result).to.an.instanceOf(Error)
+ expect(result).to.have.property('message', 'Producer Boom!')
+ expect(incoming).to.have.property('length', 0, 'all items were recieved')
+ })
+
+ it('execption in consumer propagate to producer', async () => {
+ const outgoing = [1, 2, 3]
+
+ const iterate = async function * () {
+ await null
+ while (true) {
+ yield outgoing.shift()
+ }
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [1, 2]
+
+ const consume = async () => {
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ if (incoming.length === 0) {
+ throw new Error('Consumer Boom!')
+ }
+ }
+ }
+
+ const result = await consume().catch(error => error)
+
+ expect(result).to.an.instanceOf(Error)
+ expect(result).to.have.property('message', 'Consumer Boom!')
+
+ expect(outgoing).to.be.deep.equal([3], 'Producer loop was broken')
+ })
+
+ it('iterable transfers yield data', async () => {
+ const hi = Buffer.from('hello world')
+ const body = Buffer.from('how are you')
+ const bye = Buffer.from('Bye')
+ const outgoing = [hi, body, bye]
+ const iterate = async function * () {
+ await null
+ yield * outgoing
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ transfer.push(data.buffer)
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [
+ Buffer.from('hello world'),
+ Buffer.from('how are you'),
+ Buffer.from('Bye')
+ ]
+
+ for await (const data of remote) {
+ expect(data).to.be.deep.equal(incoming.shift())
+ }
+
+ expect(outgoing).property('length', 3)
+ expect(hi).property('byteLength', 0)
+ expect(body).property('byteLength', 0)
+ expect(bye).property('byteLength', 0)
+ })
+ })
+
+ describe('remote sync iterable', () => {
+ it('remote iterable copies yielded data', async () => {
+ const iterate = function * () {
+ yield 1
+ yield { hello: Buffer.from('world') }
+ yield { items: [Buffer.from('bla'), Buffer.from('bla')] }
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [
+ 1,
+ { hello: Buffer.from('world') },
+ { items: [Buffer.from('bla'), Buffer.from('bla')] }
+ ]
+
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ }
+
+ expect(incoming).to.have.property('length', 0, 'all items were received')
+ })
+
+ it('break in consumer loop propagates to producer loop', async () => {
+ const outgoing = [
+ 1,
+ { hello: Buffer.from('world') },
+ { items: [Buffer.from('bla'), Buffer.from('bla')] },
+ { bye: 'Goodbye' }
+ ]
+
+ const iterate = async function * () {
+ await null
+ while (true) {
+ yield outgoing.shift()
+ }
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [
+ 1,
+ { hello: Buffer.from('world') },
+ { items: [Buffer.from('bla'), Buffer.from('bla')] }
+ ]
+
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ if (incoming.length === 0) {
+ break
+ }
+ }
+
+ expect(incoming).to.have.property('length', 0, 'all items were received')
+ expect(outgoing).to.have.property('length', 1, 'one item remained')
+ })
+
+ it('execption in producer propagate to consumer', async () => {
+ const iterate = function * () {
+ yield 1
+ yield 2
+ throw Error('Producer Boom!')
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [1, 2]
+
+ const consume = async () => {
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ }
+ }
+
+ const result = await consume().catch(error => error)
+
+ expect(result).to.an.instanceOf(Error)
+ expect(result).to.have.property('message', 'Producer Boom!')
+ expect(incoming).to.have.property('length', 0, 'all items were recieved')
+ })
+
+ it('execption in consumer propagate to producer', async () => {
+ const outgoing = [1, 2, 3]
+
+ const iterate = function * () {
+ while (true) {
+ yield outgoing.shift()
+ }
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [1, 2]
+
+ const consume = async () => {
+ for await (const item of remote) {
+ expect(item).to.be.deep.equal(incoming.shift())
+ if (incoming.length === 0) {
+ throw new Error('Consumer Boom!')
+ }
+ }
+ }
+
+ const result = await consume().catch(error => error)
+
+ expect(result).to.an.instanceOf(Error)
+ expect(result).to.have.property('message', 'Consumer Boom!')
+
+ expect(outgoing).to.be.deep.equal([3], 'Producer loop was broken')
+ })
+
+ it('iterable transfers yield data', async () => {
+ const hi = Buffer.from('hello world')
+ const body = Buffer.from('how are you')
+ const bye = Buffer.from('Bye')
+ const outgoing = [hi, body, bye]
+ const iterate = function * () {
+ yield * outgoing
+ }
+
+ const transfer = []
+
+ const remote = decodeIterable(
+ await move(
+ encodeIterable(
+ iterate(),
+ (data, transfer) => {
+ transfer.push(data.buffer)
+ return data
+ },
+ transfer
+ ),
+ transfer
+ ),
+ a => a
+ )
+
+ const incoming = [
+ Buffer.from('hello world'),
+ Buffer.from('how are you'),
+ Buffer.from('Bye')
+ ]
+
+ for await (const data of remote) {
+ expect(data).to.be.deep.equal(incoming.shift())
+ }
+
+ expect(outgoing).property('length', 3)
+ expect(hi).property('byteLength', 0)
+ expect(body).property('byteLength', 0)
+ expect(bye).property('byteLength', 0)
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-protocol/test/dag.browser.js b/packages/ipfs-message-port-protocol/test/dag.browser.js
new file mode 100644
index 0000000000..a522b3c1d6
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/dag.browser.js
@@ -0,0 +1,88 @@
+'use strict'
+
+/* eslint-env mocha */
+
+const CID = require('cids')
+const { encodeNode, decodeNode } = require('../src/dag')
+const { ipc } = require('./util')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+const { Buffer } = require('buffer')
+
+describe('dag (browser)', function () {
+ this.timeout(10 * 1000)
+ const move = ipc()
+
+ describe('encodeNode / decodeNode', () => {
+ it('should decode dagNode over message channel', async () => {
+ const cid1 = new CID(
+ 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce'
+ )
+ const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ')
+
+ const hi = Buffer.from('hello world')
+ const nodeIn = {
+ hi,
+ nested: {
+ structure: {
+ with: {
+ links: [cid1]
+ }
+ }
+ },
+ other: {
+ link: cid2
+ }
+ }
+
+ const nodeOut = decodeNode(await move(encodeNode(nodeIn)))
+
+ expect(nodeOut).to.be.deep.equal(nodeIn)
+ })
+
+ it('should decode dagNode over message channel & transfer bytes', async () => {
+ const cid1 = new CID(
+ 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce'
+ )
+ const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ')
+
+ const hi = Buffer.from('hello world')
+ const nodeIn = {
+ hi: Buffer.from(hi),
+ nested: {
+ structure: {
+ with: {
+ links: [new CID(cid1)]
+ }
+ }
+ },
+ other: {
+ link: new CID(cid2)
+ }
+ }
+ const transfer = []
+
+ const nodeOut = decodeNode(
+ await move(encodeNode(nodeIn, transfer), transfer)
+ )
+
+ expect(nodeOut).to.be.deep.equal({
+ hi,
+ nested: {
+ structure: {
+ with: {
+ links: [cid1]
+ }
+ }
+ },
+ other: {
+ link: cid2
+ }
+ })
+
+ expect(transfer).to.containSubset(
+ [{ byteLength: 0 }, { byteLength: 0 }, { byteLength: 0 }],
+ 'tarnsferred buffers were cleared'
+ )
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-protocol/test/dag.spec.js b/packages/ipfs-message-port-protocol/test/dag.spec.js
new file mode 100644
index 0000000000..53c0388ac3
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/dag.spec.js
@@ -0,0 +1,112 @@
+'use strict'
+
+/* eslint-env mocha */
+
+const CID = require('cids')
+const { encodeNode } = require('../src/dag')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+const { Buffer } = require('buffer')
+
+describe('dag', function () {
+ this.timeout(10 * 1000)
+
+ describe('encodeNode / decodeNode', () => {
+ it('shoud encode node', () => {
+ const cid1 = new CID(
+ 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce'
+ )
+ const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ')
+ const dagNode = {
+ hi: 'hello',
+ link: cid1,
+ nested: {
+ struff: {
+ here: cid2
+ }
+ }
+ }
+
+ const data = encodeNode(dagNode)
+
+ expect(data.dagNode).to.be.equal(dagNode)
+ expect(data.cids).to.be.an.instanceOf(Array)
+ expect(data.cids).to.have.property('length', 2)
+ expect(data.cids).to.include(cid1)
+ expect(data.cids).to.include(cid2)
+ })
+
+ it('shoud encode and add buffers to transfer list', () => {
+ const cid1 = new CID(
+ 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce'
+ )
+ const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ')
+
+ const hi = Buffer.from('hello world')
+ const dagNode = {
+ hi,
+ nested: {
+ structure: {
+ with: {
+ links: [cid1]
+ }
+ }
+ },
+ other: {
+ link: cid2
+ }
+ }
+
+ const transfer = []
+ const data = encodeNode(dagNode, transfer)
+
+ expect(data.dagNode).to.be.equal(dagNode)
+ expect(data.cids).to.be.an.instanceOf(Array)
+ expect(data.cids).to.have.property('length', 2)
+ expect(data.cids).to.include(cid1)
+ expect(data.cids).to.include(cid2)
+
+ expect(transfer).to.be.an.instanceOf(Array)
+ expect(transfer).to.have.property('length', 3)
+ expect(transfer).to.include(cid1.multihash.buffer)
+ expect(transfer).to.include(cid2.multihash.buffer)
+ expect(transfer).to.include(hi.buffer)
+ })
+
+ it('shoud decode node', () => {
+ const cid1 = new CID(
+ 'bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce'
+ )
+ const cid2 = new CID('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ')
+
+ const hi = Buffer.from('hello world')
+ const dagNode = {
+ hi,
+ nested: {
+ structure: {
+ with: {
+ links: [cid1]
+ }
+ }
+ },
+ other: {
+ link: cid2
+ }
+ }
+
+ const transfer = []
+ const data = encodeNode(dagNode, transfer)
+
+ expect(data.dagNode).to.be.equal(dagNode)
+ expect(data.cids).to.be.an.instanceOf(Array)
+ expect(data.cids).to.have.property('length', 2)
+ expect(data.cids).to.include(cid1)
+ expect(data.cids).to.include(cid2)
+
+ expect(transfer).to.be.an.instanceOf(Array)
+ expect(transfer).to.have.property('length', 3)
+ expect(transfer).to.include(cid1.multihash.buffer)
+ expect(transfer).to.include(cid2.multihash.buffer)
+ expect(transfer).to.include(hi.buffer)
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-protocol/test/node.js b/packages/ipfs-message-port-protocol/test/node.js
new file mode 100644
index 0000000000..c6600875fa
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/node.js
@@ -0,0 +1,4 @@
+'use strict'
+
+require('./cid.spec')
+require('./dag.spec')
diff --git a/packages/ipfs-message-port-protocol/test/util.js b/packages/ipfs-message-port-protocol/test/util.js
new file mode 100644
index 0000000000..299ce49445
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/test/util.js
@@ -0,0 +1,42 @@
+'use strict'
+/* eslint-env browser */
+
+const ipc = () => {
+ const { port1: sender, port2: receiver } = new MessageChannel()
+ let out = true
+ const move = async (data, transfer) => {
+ await out
+ return await new Promise(resolve => {
+ receiver.onmessage = event => resolve(event.data)
+ sender.postMessage(data, transfer)
+ })
+ }
+
+ /**
+ * @template T
+ * @param {T} data
+ * @param {Transferable[]} [transfer]
+ * @returns {Promise}
+ */
+ const ipcMove = async (data, transfer = []) => {
+ out = move(data, transfer)
+ return await out
+ }
+
+ return ipcMove
+}
+exports.ipc = ipc
+
+/**
+ * @returns {[Promise, function(T):void, function(any):void]}
+ */
+const defer = () => {
+ const result = []
+ result.unshift(
+ new Promise((resolve, reject) => {
+ result.push(resolve, reject)
+ })
+ )
+ return result
+}
+exports.defer = defer
diff --git a/packages/ipfs-message-port-protocol/tsconfig.json b/packages/ipfs-message-port-protocol/tsconfig.json
new file mode 100644
index 0000000000..4c65e6338b
--- /dev/null
+++ b/packages/ipfs-message-port-protocol/tsconfig.json
@@ -0,0 +1,21 @@
+{
+ "compilerOptions": {
+ "allowJs": true,
+ "checkJs": true,
+ "noImplicitReturns": true,
+ "noImplicitAny": true,
+ "noImplicitThis": true,
+ "noEmitHelpers": true,
+ "strictFunctionTypes": true,
+ "strictNullChecks": true,
+ "strictPropertyInitialization": true,
+ "strict": true,
+ "esModuleInterop": true,
+ "alwaysStrict": true,
+ "target": "ES5",
+ "outDir": "./dist/"
+ },
+ "exclude": ["dist"],
+ "include": ["src", "../../node_modules/ipld-block/src/index.js"],
+ "compileOnSave": false
+}
diff --git a/packages/ipfs-message-port-server/README.md b/packages/ipfs-message-port-server/README.md
new file mode 100644
index 0000000000..63f9365c66
--- /dev/null
+++ b/packages/ipfs-message-port-server/README.md
@@ -0,0 +1,103 @@
+# ipfs-message-port-server
+
+[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai)
+[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
+[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
+[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs)](https://travis-ci.com/ipfs/js-ipfs)
+[![Codecov branch](https://img.shields.io/codecov/c/github/ipfs/js-ipfs/master.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs)
+[![Dependency Status](https://david-dm.org/ipfs/js-ipfs/status.svg?path=packages/ipfs-message-port-server)](https://david-dm.org/ipfs/js-ipfs?path=packages/ipfs-message-port-server)
+[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard)
+
+> A library for providing IPFS node over [message channel][]. This library enables
+applications running in the different JS context to use [IPFS API](https://github.com/ipfs/js-ipfs/tree/master/docs/core-api) (subset) via `ipfs-message-port-client`.
+
+
+## Lead Maintainer
+
+[Alex Potsides](https://github.com/achingbrain)
+
+## Table of Contentens
+
+- [Install](#install)
+- [Usage](#usage)
+- [Notes on Performance](#notes-on-performance)
+- [Contribute](#contribute)
+- [License](#license)
+
+## Install
+
+```bash
+$ npm install --save ipfs-message-port-server
+```
+
+## Usage
+
+This library can wrap a JS IPFS node and expose it over the [message channel][].
+It assumes `ipfs-message-port-client` on the other end, however it is not
+strictly necessary anything complying with the wire protocol will do.
+
+It provides following API subset:
+
+- [`ipfs.dag`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/DAG.md)
+- [`ipfs.block`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/BLOCK.md)
+- [`ipfs.add`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options)
+- [`ipfs.cat`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfscatipfspath-options)
+- [`ipfs.files.stat`](https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsfilesstatpath-options)
+
+The server is designed to run in a [SharedWorker][] (although it is possible to
+run it in the other JS contexts). The example below illustrates running a js-ipfs
+node in a [SharedWorker][] and exposing it to all connected ports
+
+```js
+const IPFS = require('ipfs')
+const { IPFSService, Server } = require('ipfs-message-port-server')
+
+const main = async () => {
+ const connections = []
+ // queue connections that occur while node was starting.
+ self.onconnect = ({ports}) => connections.push(...ports)
+
+ const ipfs = await IPFS.create()
+ const service = new IPFSService(ipfs)
+ const server = new Server(service)
+
+ // connect new ports and queued ports with the server.
+ self.onconnect = ({ports}) => server.connect(ports[0])
+ for (const port of connections.splice(0)) {
+ server.connect(port)
+ }
+}
+
+main()
+```
+
+
+### Notes on Performance
+
+Since the data sent over the [message channel][] is copied via
+the [structured cloning algorithm][] it may lead to suboptimal
+results (especially with large binary data). In order to avoid unnecessary
+copying the server will transfer all passed [Transferable][]s which will be emptied
+on the server side. This should not be a problem in general as IPFS node itself
+does not retain references to returned values, but is something to keep in mind
+when doing something custom.
+
+
+[message channel]:https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel
+[SharedWorker]:https://developer.mozilla.org/en-US/docs/Web/API/SharedWorker
+[`MessagePort`]:https://developer.mozilla.org/en-US/docs/Web/API/MessagePort
+[structured cloning algorithm]:https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
+[Transferable]:https://developer.mozilla.org/en-US/docs/Web/API/Transferable
+[Blob]:https://developer.mozilla.org/en-US/docs/Web/API/Blob/Blob
+[File]:https://developer.mozilla.org/en-US/docs/Web/API/File
+
+
+## Contribute
+
+Contributions welcome. Please check out [the issues](https://github.com/ipfs/js-ipfs/issues).
+
+Check out our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
+
+## License
+
+[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fipfs%2Fjs-ipfs.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fipfs%2Fjs-ipfs?ref=badge_large)
diff --git a/packages/ipfs-message-port-server/package.json b/packages/ipfs-message-port-server/package.json
new file mode 100644
index 0000000000..7ffe733aa9
--- /dev/null
+++ b/packages/ipfs-message-port-server/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "ipfs-message-port-server",
+ "version": "0.0.1",
+ "description": "IPFS server library for exposing IPFS node over message port",
+ "keywords": [
+ "ipfs",
+ "message-port",
+ "worker"
+ ],
+ "homepage": "https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs-message-port-server#readme",
+ "bugs": "https://github.com/ipfs/js-ipfs/issues",
+ "license": "(Apache-2.0 OR MIT)",
+ "leadMaintainer": "Alex Potsides ",
+ "files": [
+ "src",
+ "dist"
+ ],
+ "main": "src/index.js",
+ "browser": {},
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/ipfs/js-ipfs.git"
+ },
+ "scripts": {
+ "test": "aegir test",
+ "test:browser": "aegir test -t browser",
+ "test:webworker": "aegir test -t webworker",
+ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless",
+ "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless",
+ "lint": "aegir lint",
+ "build": "aegir build",
+ "coverage": "npx nyc -r html npm run test:node -- --bail",
+ "clean": "rm -rf ./dist",
+ "dep-check": "aegir dep-check"
+ },
+ "dependencies": {
+ "cids": "^0.8.3",
+ "it-all": "^1.0.1"
+ },
+ "devDependencies": {
+ "ipfs-message-port-protocol": "~0.0.1",
+ "@types/it-all": "^1.0.0",
+ "ipfs": "^0.48.0",
+ "aegir": "^23.0.0",
+ "cross-env": "^7.0.0",
+ "interface-ipfs-core": "^0.138.0"
+ },
+ "engines": {
+ "node": ">=10.3.0",
+ "npm": ">=3.0.0"
+ },
+ "contributors": [
+ "Irakli Gozalishvili "
+ ]
+}
\ No newline at end of file
diff --git a/packages/ipfs-message-port-server/src/block.js b/packages/ipfs-message-port-server/src/block.js
new file mode 100644
index 0000000000..5078ad5a61
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/block.js
@@ -0,0 +1,149 @@
+'use strict'
+
+const { Buffer } = require('buffer')
+const collect = require('it-all')
+const { encodeError } = require('ipfs-message-port-protocol/src/error')
+const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid')
+const {
+ decodeBlock,
+ encodeBlock
+} = require('ipfs-message-port-protocol/src/block')
+
+/**
+ * @typedef {import('./ipfs').IPFS} IPFS
+ * @typedef {import('cids')} CID
+ * @typedef {import('ipfs-message-port-protocol/src/error').EncodedError} EncodedError
+ * @typedef {import('ipfs-message-port-protocol/src/block').Block} Block
+ * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID
+ * @typedef {import('ipfs-message-port-protocol/src/block').EncodedBlock} EncodedBlock
+ * @typedef {RmEntry} Rm
+ * @typedef {StatResult} Stat
+ */
+
+/**
+ * @class
+ */
+class BlockService {
+ /**
+ * @param {IPFS} ipfs
+ */
+ constructor (ipfs) {
+ this.ipfs = ipfs
+ }
+
+ /**
+ * @typedef {Object} GetResult
+ * @property {EncodedBlock} block
+ * @property {Transferable[]} transfer
+ *
+ * @param {Object} query
+ * @param {EncodedCID} query.cid
+ * @param {number} [query.timeout]
+ * @param {AbortSignal} [query.signal]
+ * @returns {Promise}
+ */
+ async get (query) {
+ const cid = decodeCID(query.cid)
+ const block = await this.ipfs.block.get(cid, query)
+ /** @type {Transferable[]} */
+ const transfer = []
+ return { transfer, block: encodeBlock(block, transfer) }
+ }
+
+ /**
+ * @typedef {Object} PutResult
+ * @property {EncodedBlock} block
+ * @property {Transferable[]} transfer
+ *
+ * Stores input as an IPFS block.
+ * @param {Object} query
+ * @param {EncodedBlock|Uint8Array} query.block
+ * @param {EncodedCID|void} [query.cid]
+ * @param {string} [query.format]
+ * @param {string} [query.mhtype]
+ * @param {number} [query.mhlen]
+ * @param {number} [query.version]
+ * @param {boolean} [query.pin]
+ * @param {number} [query.timeout]
+ * @param {AbortSignal} [query.signal]
+ * @returns {Promise}
+ */
+ async put (query) {
+ const input = query.block
+ /** @type {Buffer|Block} */
+ const block =
+ input instanceof Uint8Array
+ ? Buffer.from(input.buffer, input.byteOffset, input.byteLength)
+ : decodeBlock(input)
+ const result = await this.ipfs.block.put(block, {
+ ...query,
+ cid: query.cid ? decodeCID(query.cid) : query.cid
+ })
+
+ /** @type {Transferable[]} */
+ const transfer = []
+ return { transfer, block: encodeBlock(result, transfer) }
+ }
+
+ /**
+ * Remove one or more IPFS block(s).
+ * @param {Object} query
+ * @param {EncodedCID[]} query.cids
+ * @param {boolean} [query.force]
+ * @param {boolean} [query.quiet]
+ * @param {number} [query.timeout]
+ * @param {AbortSignal} [query.signal]
+ * @returns {Promise}
+ *
+ * @typedef {RmEntry[]} RmResult
+ *
+ * @typedef {Object} RmEntry
+ * @property {EncodedCID} cid
+ * @property {EncodedError|undefined} [error]
+ */
+ async rm (query) {
+ /** @type {Transferable[]} */
+ const transfer = []
+ const result = await collect(
+ this.ipfs.block.rm(query.cids.map(decodeCID), query)
+ )
+
+ return result.map(entry => encodeRmEntry(entry, transfer))
+ }
+
+ /**
+ * Gets information of a raw IPFS block.
+ * @param {Object} query
+ * @param {EncodedCID} query.cid
+ * @param {number} [query.timeout]
+ * @param {AbortSignal} [query.signal]
+ * @returns {Promise}
+ *
+ * @typedef {Object} StatResult
+ * @property {EncodedCID} cid
+ * @property {number} size
+ */
+ async stat (query) {
+ const cid = decodeCID(query.cid)
+ const result = await this.ipfs.block.stat(cid, query)
+ return { ...result, cid: encodeCID(result.cid) }
+ }
+}
+
+/**
+ * @param {Object} entry
+ * @param {CID} entry.cid
+ * @param {Error|void} [entry.error]
+ * @param {Transferable[]} transfer
+ * @returns {RmEntry}
+ */
+const encodeRmEntry = (entry, transfer) => {
+ const cid = encodeCID(entry.cid, transfer)
+ if (entry.error) {
+ return { cid, error: encodeError(entry.error) }
+ } else {
+ return { cid }
+ }
+}
+
+exports.BlockService = BlockService
diff --git a/packages/ipfs-message-port-server/src/core.js b/packages/ipfs-message-port-server/src/core.js
new file mode 100644
index 0000000000..284f591154
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/core.js
@@ -0,0 +1,368 @@
+'use strict'
+
+/* eslint-env browser */
+
+const {
+ decodeIterable,
+ encodeIterable,
+ decodeCallback
+} = require('ipfs-message-port-protocol/src/core')
+const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid')
+
+/**
+
+/**
+ * @typedef {import("./ipfs").IPFS} IPFS
+ * @typedef {import("ipfs-message-port-protocol/src/data").Time} Time
+ * @typedef {import("ipfs-message-port-protocol/src/data").UnixFSTime} UnixFSTime
+ * @typedef {import("ipfs-message-port-protocol/src/data").Mode} Mode
+ * @typedef {import("ipfs-message-port-protocol/src/data").HashAlg} HashAlg
+ * @typedef {import('ipfs-message-port-protocol/src/data').FileType} FileType
+ * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID
+ * @typedef {import("./ipfs").FileOutput} FileOutput
+ * @typedef {import('./ipfs').FileObject} FileObject
+ * @typedef {import('./ipfs').FileContent} DecodedFileContent
+ * @typedef {import('./ipfs').FileInput} DecodedFileInput
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/core').RemoteCallback} RemoteCallback
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/core').RemoteIterable} RemoteIterable
+ */
+
+/**
+ * @typedef {Object} AddOptions
+ * @property {string} [chunker]
+ * @property {number} [cidVersion]
+ * @property {boolean} [enableShardingExperiment]
+ * @property {HashAlg} [hashAlg]
+ * @property {boolean} [onlyHash]
+ * @property {boolean} [pin]
+ * @property {RemoteCallback|void} [progress]
+ * @property {boolean} [rawLeaves]
+ * @property {number} [shardSplitThreshold]
+ * @property {boolean} [trickle]
+ * @property {boolean} [wrapWithDirectory]
+ * @property {number} [timeout]
+ * @property {AbortSignal} [signal]
+ *
+ * @typedef {Object} AddAllInput
+ * @property {MultiFileInput} input
+ *
+ * @typedef {Object} AddInput
+ * @property {SingleFileInput} input
+ *
+ * @typedef {AddInput & AddOptions} AddQuery
+ * @typedef {AddAllInput & AddOptions} AddAllQuery
+ *
+ * @typedef {ArrayBuffer|ArrayBufferView|Blob|string|FileInput|RemoteIterable} SingleFileInput
+ * @typedef {RemoteIterable} MultiFileInput
+ *
+ * @typedef {Object} FileInput
+ * @property {string} [path]
+ * @property {FileContent} content
+ * @property {Mode} [mode]
+ * @property {Time} [mtime]
+ *
+ * @typedef {ArrayBufferView|ArrayBuffer|Blob|string|RemoteIterable|RemoteIterable} FileContent
+ *
+ * @typedef {Object} AddedEntry
+ * @property {string} path
+ * @property {EncodedCID} cid
+ * @property {number} mode
+ * @property {UnixFSTime} mtime
+ * @property {number} size
+ *
+ * @typedef {Object} CatQuery
+ * @property {string} path
+ * @property {number} [offset]
+ * @property {number} [length]
+ *
+ * @typedef {Object} GetQuery
+ * @property {string} path
+ *
+ * @typedef {RemoteIterable} GetResult
+ *
+ * @typedef {Object} FileEntry
+ * @property {string} path
+ * @property {RemoteIterable} content
+ * @property {Mode} [mode]
+ * @property {UnixFSTime} [mtime]
+ *
+ * @typedef {Object} LsQuery
+ * @property {string} path
+ *
+ * @typedef {RemoteIterable} LsResult
+ *
+ * @typedef {Object} LsEntry
+ * @property {number} depth
+ * @property {string} name
+ * @property {string} path
+ * @property {number} size
+ * @property {EncodedCID} cid
+ * @property {FileType} type
+ * @property {Mode} mode
+ * @property {UnixFSTime} mtime
+ */
+
+/**
+ * @class
+ */
+class CoreService {
+ /**
+ *
+ * @param {IPFS} ipfs
+ */
+ constructor (ipfs) {
+ this.ipfs = ipfs
+ }
+
+ /**
+ * @typedef {Object} AddAllResult
+ * @property {RemoteIterable} data
+ * @property {Transferable[]} transfer
+
+ * @param {AddAllQuery} query
+ * @returns {AddAllResult}
+ */
+ addAll (query) {
+ const { input } = query
+ const {
+ chunker,
+ cidVersion,
+ enableShardingExperiment,
+ hashAlg,
+ onlyHash,
+ pin,
+ progress,
+ rawLeaves,
+ shardSplitThreshold,
+ trickle,
+ wrapWithDirectory,
+ timeout,
+ signal
+ } = query
+
+ const options = {
+ chunker,
+ cidVersion,
+ enableShardingExperiment,
+ hashAlg,
+ onlyHash,
+ pin,
+ rawLeaves,
+ shardSplitThreshold,
+ trickle,
+ wrapWithDirectory,
+ timeout,
+ progress: progress != null ? decodeCallback(progress) : undefined,
+ signal
+ }
+
+ const content = decodeAddAllInput(input)
+ return encodeAddAllResult(this.ipfs.addAll(content, options))
+ }
+
+ /**
+ * @typedef {Object} AddResult
+ * @property {AddedEntry} data
+ * @property {Transferable[]} transfer
+
+ * @param {AddQuery} query
+ * @returns {Promise}
+ */
+ async add (query) {
+ const { input } = query
+ const {
+ chunker,
+ cidVersion,
+ enableShardingExperiment,
+ hashAlg,
+ onlyHash,
+ pin,
+ progress,
+ rawLeaves,
+ shardSplitThreshold,
+ trickle,
+ wrapWithDirectory,
+ timeout,
+ signal
+ } = query
+
+ const options = {
+ chunker,
+ cidVersion,
+ enableShardingExperiment,
+ hashAlg,
+ onlyHash,
+ pin,
+ rawLeaves,
+ shardSplitThreshold,
+ trickle,
+ wrapWithDirectory,
+ timeout,
+ progress: progress != null ? decodeCallback(progress) : undefined,
+ signal
+ }
+
+ const content = decodeAddInput(input)
+ return encodeAddResult(await this.ipfs.add(content, options))
+ }
+
+ /**
+ * @typedef {Object} CatResult
+ * @property {RemoteIterable} data
+ * @property {Transferable[]} transfer
+ *
+ * @param {Object} query
+ * @param {string|EncodedCID} query.path
+ * @param {number} [query.offset]
+ * @param {number} [query.length]
+ * @param {number} [query.timeout]
+ * @param {AbortSignal} [query.signal]
+ * @returns {CatResult}
+ */
+ cat (query) {
+ const { path, offset, length, timeout, signal } = query
+ const location = typeof path === 'string' ? path : decodeCID(path)
+ const content = this.ipfs.cat(location, { offset, length, timeout, signal })
+ return encodeCatResult(content)
+ }
+}
+// @returns {string|ArrayBufferView|ArrayBuffer|Blob|AsyncIterable|AsyncIterable|AsyncIterable|AsyncIterable|AsyncIterable}
+
+/**
+ * @param {MultiFileInput} input
+ * @returns {AsyncIterable}
+ */
+const decodeAddAllInput = input =>
+ decodeIterable(input, decodeFileInput)
+
+/**
+ * @param {SingleFileInput} input
+ * @returns {string|ArrayBufferView|ArrayBuffer|Blob|FileObject}
+ */
+const decodeAddInput = input =>
+ matchInput(
+ input,
+ /**
+ * @param {*} data
+ * @returns {*}
+ */
+ data => {
+ if (data.type === 'RemoteIterable') {
+ return { content: decodeIterable(data, decodeFileInput) }
+ } else {
+ return decodeFileInput(data)
+ }
+ }
+ )
+
+/**
+ * @param {ArrayBufferView|ArrayBuffer|string|Blob|FileInput} input
+ * @returns {string|ArrayBuffer|ArrayBufferView|Blob|FileObject}
+ */
+const decodeFileInput = input =>
+ matchInput(input, file => ({
+ ...file,
+ content: decodeFileContent(file.content)
+ }))
+
+/**
+ * @param {FileContent} content
+ * @returns {DecodedFileContent}
+ */
+const decodeFileContent = content =>
+ matchInput(content, input => decodeIterable(input, identity))
+
+/**
+ * @template I,O
+ * @param {string|ArrayBuffer|ArrayBufferView|Blob|I} input
+ * @param {function(I):O} decode
+ * @returns {string|ArrayBuffer|ArrayBufferView|Blob|O}
+ */
+const matchInput = (input, decode) => {
+ if (
+ typeof input === 'string' ||
+ input instanceof ArrayBuffer ||
+ input instanceof Blob ||
+ ArrayBuffer.isView(input)
+ ) {
+ return input
+ } else {
+ return decode(input)
+ }
+}
+
+/**
+ * @param {AsyncIterable} out
+ * @returns {AddAllResult}
+ */
+const encodeAddAllResult = out => {
+ /** @type {Transferable[]} */
+ const transfer = []
+ return {
+ data: encodeIterable(out, encodeFileOutput, transfer),
+ transfer
+ }
+}
+
+/**
+ * @param {FileOutput} out
+ * @returns {AddResult}
+ */
+const encodeAddResult = out => {
+ /** @type {Transferable[]} */
+ const transfer = []
+ return {
+ data: encodeFileOutput(out, transfer),
+ transfer
+ }
+}
+
+/**
+ *
+ * @param {AsyncIterable} content
+ * @returns {CatResult}
+ */
+const encodeCatResult = content => {
+ /** @type {Transferable[]} */
+ const transfer = []
+ return { data: encodeIterable(content, moveBuffer, transfer), transfer }
+}
+
+/**
+ * Adds underlying `ArrayBuffer` to the transfer list.
+ * @param {Buffer} buffer
+ * @param {Transferable[]} transfer
+ * @returns {Buffer}
+ */
+const moveBuffer = (buffer, transfer) => {
+ transfer.push(buffer.buffer)
+ return buffer
+}
+
+/**
+ *
+ * @param {FileOutput} file
+ * @param {Transferable[]} _transfer
+ */
+
+const encodeFileOutput = (file, _transfer) => ({
+ ...file,
+ cid: encodeCID(file.cid)
+})
+
+/**
+ * @template T
+ * @param {T} v
+ * @returns {T}
+ */
+const identity = v => v
+
+exports.CoreService = CoreService
diff --git a/packages/ipfs-message-port-server/src/dag.js b/packages/ipfs-message-port-server/src/dag.js
new file mode 100644
index 0000000000..68900f8a0a
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/dag.js
@@ -0,0 +1,152 @@
+'use strict'
+
+const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid')
+const { decodeNode, encodeNode } = require('ipfs-message-port-protocol/src/dag')
+const collect = require('it-all')
+
+/**
+ * @typedef {import('./ipfs').IPFS} IPFS
+ * @typedef {import('ipfs-message-port-protocol/src/cid').CID} CID
+ * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID
+ * @typedef {import('ipfs-message-port-protocol/src/dag').DAGNode} DAGNode
+ * @typedef {import('ipfs-message-port-protocol/src/dag').EncodedDAGNode} EncodedDAGNode
+ *
+ *
+ * @typedef {Object} DAGEntry
+ * @property {DAGNode} value
+ * @property {string} remainderPath
+ */
+
+/**
+ * @class
+ */
+class DAGService {
+ /**
+ * @param {IPFS} ipfs
+ */
+ constructor (ipfs) {
+ this.ipfs = ipfs
+ }
+
+ /**
+ * @param {Object} query
+ * @param {EncodedDAGNode} query.dagNode
+ * @param {string} [query.format]
+ * @param {string} [query.hashAlg]
+ * @param {EncodedCID|void} [query.cid]
+ * @param {boolean} [query.pin]
+ * @param {boolean} [query.preload]
+ * @param {number} [query.timeout]
+ * @param {AbortSignal} [query.signal]
+ * @returns {Promise}
+ */
+ async put (query) {
+ const dagNode = decodeNode(query.dagNode)
+
+ const cid = await this.ipfs.dag.put(dagNode, {
+ ...query,
+ cid: query.cid ? decodeCID(query.cid) : undefined
+ })
+ return encodeCID(cid)
+ }
+
+ /**
+ * @typedef {Object} GetResult
+ * @property {Transferable[]} transfer
+ * @property {string} remainderPath
+ * @property {EncodedDAGNode} value
+ *
+ * @typedef {Object} GetDAG
+ * @property {EncodedCID} cid
+ * @property {string} [path]
+ * @property {boolean} [localResolve]
+ * @property {number} [timeout]
+ * @property {AbortSignal} [signal]
+ *
+ * @param {GetDAG} query
+ * @returns {Promise}
+ */
+ async get (query) {
+ const { cid, path, localResolve, timeout, signal } = query
+ const { value, remainderPath } = await this.ipfs.dag.get(
+ decodeCID(cid),
+ {
+ path,
+ localResolve,
+ timeout,
+ signal
+ }
+ )
+
+ /** @type {Transferable[]} */
+ const transfer = []
+ return { remainderPath, value: encodeNode(value, transfer), transfer }
+ }
+
+ /**
+ * @typedef {Object} ResolveQuery
+ * @property {EncodedCID|string} cid
+ * @property {string} [path]
+ * @property {number} [timeout]
+ * @property {AbortSignal} [signal]
+ *
+ * @typedef {Object} ResolveResult
+ * @property {EncodedCID} cid
+ * @property {string|void} remainderPath
+ *
+ * @param {ResolveQuery} query
+ * @returns {Promise}
+ */
+ async resolve (query) {
+ const { cid, remainderPath } =
+ await this.ipfs.dag.resolve(decodePathOrCID(query.cid), query)
+
+ return {
+ cid: encodeCID(cid),
+ remainderPath
+ }
+ }
+
+ /**
+ * @typedef {Object} EnumerateDAG
+ * @property {EncodedCID} cid
+ * @property {string} [path]
+ * @property {boolean} [recursive]
+ * @property {number} [timeout]
+ * @property {AbortSignal} [signal]
+ *
+ * @param {EnumerateDAG} query
+ * @returns {Promise}
+ */
+ async tree (query) {
+ const { cid, path, recursive, timeout, signal } = query
+ const result = await this.ipfs.dag.tree(decodeCID(cid), {
+ path,
+ recursive,
+ timeout,
+ signal
+ })
+ const entries = await collect(result)
+
+ return entries
+ }
+}
+
+/**
+ * @param {EncodedCID|string} input
+ * @returns {CID|string}
+ */
+const decodePathOrCID = (input) => {
+ if (typeof input === 'string') {
+ return input
+ } else {
+ return decodeCID(input)
+ }
+}
+
+/**
+ * @param {EncodedDAGNode} value
+ * @returns {DAGNode}
+ */
+
+exports.DAGService = DAGService
diff --git a/packages/ipfs-message-port-server/src/files.js b/packages/ipfs-message-port-server/src/files.js
new file mode 100644
index 0000000000..e88a724d2a
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/files.js
@@ -0,0 +1,62 @@
+'use strict'
+
+/* eslint-env browser */
+
+const { encodeCID } = require('ipfs-message-port-protocol/src/cid')
+
+/**
+ * @typedef {import('ipfs-message-port-protocol/src/dag').EncodedCID} EncodedCID
+ */
+/**
+ * @typedef {import('ipfs-message-port-protocol/src/data').HashAlg} HashAlg
+ * @typedef {import('ipfs-message-port-protocol/src/data').Mode} Mode
+ * @typedef {import('./ipfs').IPFS} IPFS
+ * @typedef {Stat} EncodedStat
+ */
+
+/**
+ * @class
+ */
+class FilesService {
+ /**
+ *
+ * @param {IPFS} ipfs
+ */
+ constructor (ipfs) {
+ this.ipfs = ipfs
+ }
+
+ /**
+ * @typedef {Object} StatQuery
+ * @property {string} path
+ * @property {boolean} [hash=false]
+ * @property {boolean} [size=false]
+ * @property {boolean} [withLocal=false]
+ * @property {number} [timeout]
+ * @property {AbortSignal} [signal]
+ *
+ * @typedef {Object} Stat
+ * @property {EncodedCID} cid
+ * @property {number} size
+ * @property {number} cumulativeSize
+ * @property {'file'|'directory'} type
+ * @property {number} blocks
+ * @property {boolean} withLocality
+ * @property {boolean} local
+ * @property {number} sizeLocal
+ *
+ * @typedef {Object} StatResult
+ * @property {Stat} stat
+ * @property {Transferable[]} transfer
+ *
+ * @param {StatQuery} input
+ * @returns {Promise}
+ */
+ async stat (input) {
+ const stat = await this.ipfs.files.stat(input.path, input)
+ /** @type {Transferable[]} */
+ const transfer = []
+ return { stat: { ...stat, cid: encodeCID(stat.cid, transfer) }, transfer }
+ }
+}
+exports.FilesService = FilesService
diff --git a/packages/ipfs-message-port-server/src/index.js b/packages/ipfs-message-port-server/src/index.js
new file mode 100644
index 0000000000..119aa05894
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/index.js
@@ -0,0 +1,20 @@
+'use strict'
+
+/* eslint-env browser */
+const { DAGService } = require('./dag')
+exports.DAGService = DAGService
+
+const { CoreService } = require('./core')
+exports.CoreService = CoreService
+
+const { FilesService } = require('./files')
+exports.FilesService = FilesService
+
+const { BlockService } = require('./block')
+exports.BlockService = BlockService
+
+const { IPFSService } = require('./service')
+exports.IPFSService = IPFSService
+
+const { Server } = require('./server')
+exports.Server = Server
diff --git a/packages/ipfs-message-port-server/src/ipfs.ts b/packages/ipfs-message-port-server/src/ipfs.ts
new file mode 100644
index 0000000000..1963b17bcb
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/ipfs.ts
@@ -0,0 +1,239 @@
+import { DAGNode } from 'ipfs-message-port-protocol/src/dag'
+import CID from 'cids'
+import {
+ FileType,
+ UnixFSTime,
+ HashAlg,
+ Time,
+ CIDVersion
+} from 'ipfs-message-port-protocol/src/data'
+import { EncodedCID } from './block'
+import { ReadStream } from 'fs'
+
+type Mode = string | number
+export interface IPFS extends Core {
+ dag: DAG
+ files: Files
+ block: BlockService
+}
+
+export interface IPFSFactory {
+ create(): Promise
+}
+
+interface AbortOptions {
+ timeout?: number
+ signal?: AbortSignal
+}
+
+interface PutOptions extends AbortOptions {
+ format?: string | void
+ hashAlg?: string | void
+ cid?: CID | void
+ preload?: boolean
+ pin?: boolean
+}
+
+interface GetOptions extends AbortOptions {
+ path?: string,
+ localResolve?: boolean
+}
+
+interface ResolveOptions extends AbortOptions {
+ path?: string
+}
+
+interface TreeOptions extends AbortOptions {
+ path?: string,
+ recursive?: boolean
+}
+
+export interface DAG {
+ put(dagNode: DAGNode, options: PutOptions): Promise
+ get(cid: CID, options: GetOptions): Promise<{ value: DAGNode; remainderPath: string }>
+ resolve(pathOrCID: string | CID, options: ResolveOptions): Promise<{ cid: CID, remainderPath: string }>
+ tree(cid: CID, options: TreeOptions): AsyncIterable
+}
+
+export interface Core {
+ addAll(inputs: AddAllInput, options: AddOptions): AsyncIterable
+ add(input: AddInput, options: AddOptions): Promise
+ cat(ipfsPath: CID | string, options: CatOptions): AsyncIterable
+}
+
+interface AddOptions extends AbortOptions {
+ chunker?: string
+ cidVersion?: number
+ enableShardingExperiment?: boolean
+ hashAlg?: HashAlg
+ onlyHash?: boolean
+ pin?: boolean
+ progress?: (progress: number) => void
+ rawLeaves?: boolean
+ shardSplitThreshold?: number
+ trickle?: boolean
+ wrapWithDirectory?: boolean
+}
+
+export type FileInput = {
+ path?: string
+ content?: FileContent
+ mode?: string | number | void
+ mtime?: Time
+}
+
+export type FileOutput = {
+ path: string
+ cid: CID
+ mode: number
+ mtime: { secs: number; nsecs: number }
+ size: number
+}
+
+interface CatOptions extends AbortOptions {
+ offset?: number
+ length?: number
+}
+
+export interface Files {
+ chmod(path: string | CID, mode: Mode, options?: ChmodOptions): Promise
+
+ write(
+ path: string,
+ content: WriteContent,
+ options?: WriteOptions
+ ): Promise
+
+ ls(path?: string, opitons?: LsOptions): AsyncIterable
+
+ stat(path: string, options?: StatOptions): Promise
+}
+
+interface ChmodOptions extends AbortOptions {
+ recursive?: boolean
+ flush?: boolean
+ hashAlg?: string
+ cidVersion?: number
+}
+
+interface LsOptions extends AbortOptions {
+ sort?: boolean
+}
+
+type LsEntry = {
+ name: string
+ type: FileType
+ size: number
+ cid: CID
+ mode: Mode
+ mtime: UnixFSTime
+}
+
+interface StatOptions extends AbortOptions {
+ hash?: boolean
+ size?: boolean
+ withLocal?: boolean
+}
+
+type Stat = {
+ cid: CID
+ size: number
+ cumulativeSize: number
+ type: 'file' | 'directory'
+ blocks: number
+ withLocality: boolean
+ local: boolean
+ sizeLocal: number
+}
+
+type WriteContent =
+ | string
+ | ArrayBufferView
+ | ArrayBuffer
+ | Blob
+ | AsyncIterable
+
+type AddInput =
+ | Blob
+ | string
+ | ArrayBufferView
+ | ArrayBuffer
+ | FileInput
+ | ReadStream
+
+
+type AddAllInput =
+ | Iterable
+ | AsyncIterable
+
+export type FileObject = {
+ path?: string
+ content?: FileContent
+ mode?: Mode
+ mtime?: Time
+}
+
+export type FileContent =
+ | string
+ | ArrayBufferView
+ | ArrayBuffer
+ | Blob
+ | Iterable
+ | AsyncIterable
+
+interface WriteOptions extends AbortOptions {
+ offset?: number
+ length?: number
+ create?: boolean
+ parents?: boolean
+ truncate?: boolean
+ rawLeaves?: boolean
+ mode?: Mode
+ mtime?: Time
+ flush?: boolean
+ hashAlg?: HashAlg
+ cidVersion?: CIDVersion
+}
+
+type WriteResult = {
+ cid: CID
+ size: number
+}
+
+interface Block {
+ cid: CID
+ data: Buffer
+}
+
+interface BlockService {
+ get(cid: CID, options?: GetBlockOptions): Promise
+ put(block: Block, options?: PutBlockOptions): Promise
+ put(buffer: Buffer, options?: PutBufferOptions): Promise
+ rm(
+ cid: CID | CID[],
+ options?: RmBlockOptions
+ ): AsyncIterable<{ cid: CID; error?: Error }>
+ stat(
+ cid: CID,
+ options?: StatBlockOptions
+ ): Promise<{ cid: CID; size: number }>
+}
+
+interface GetBlockOptions extends AbortOptions { }
+interface PutBlockOptions extends AbortOptions {
+ format?: string
+ mhtype?: string
+ mhlen?: number
+ version?: number
+ pin?: boolean
+}
+interface PutBufferOptions extends PutBlockOptions {
+ cid?: EncodedCID | void
+}
+
+interface RmBlockOptions extends AbortOptions {
+ force?: boolean
+ quiet?: boolean
+}
+
+interface StatBlockOptions extends AbortOptions { }
diff --git a/packages/ipfs-message-port-server/src/server.js b/packages/ipfs-message-port-server/src/server.js
new file mode 100644
index 0000000000..294abbd7c3
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/server.js
@@ -0,0 +1,282 @@
+'use strict'
+
+/* eslint-env browser */
+
+const { encodeError } = require('ipfs-message-port-protocol/src/error')
+
+/**
+ * @typedef {import('ipfs-message-port-protocol/src/data').EncodedError} EncodedError
+ */
+
+/**
+ * @template X, T
+ * @typedef {import('ipfs-message-port-protocol/src/data').Result} Result
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').ProcedureNames} ProcedureNames
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').Method} Method
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').Namespace} Namespace
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').ServiceQuery} ServiceQuery
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').RPCQuery} RPCQuery
+
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').Inn} Inn
+ */
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').Out} Out
+ */
+
+/**
+ * @template T
+ * @typedef {Object} QueryMessage
+ * @property {'query'} type
+ * @property {Namespace} namespace
+ * @property {Method} method
+ * @property {string} id
+ * @property {Inn} input
+ */
+
+/**
+ * @typedef {Object} AbortMessage
+ * @property {'abort'} type
+ * @property {string} id
+ */
+
+/**
+ * @typedef {Object} TransferOptions
+ * @property {Transferable[]} [transfer]
+ */
+
+/**
+ * @template O
+ * @typedef {O & TransferOptions} QueryResult
+ */
+
+/**
+ * @template T
+ * @typedef {AbortMessage|QueryMessage} Message
+ */
+
+/**
+ * @template T, K
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').NamespacedQuery} NamespacedQuery
+ */
+
+/**
+ * Represents a client query received on the server.
+ * @template T
+ * @extends {ServiceQuery}
+ */
+class Query {
+ /**
+ * @param {Namespace} namespace
+ * @param {Method} method
+ * @param {Inn} input
+ */
+ constructor (namespace, method, input) {
+ this.result = new Promise((resolve, reject) => {
+ this.succeed = resolve
+ this.fail = reject
+ this.namespace = namespace
+ this.method = method
+ this.input = input
+
+ this.abortController = new AbortController()
+ this.signal = this.abortController.signal
+ })
+ }
+
+ /**
+ * Aborts this query if it is still pending.
+ */
+ abort () {
+ this.abortController.abort()
+ this.fail(new AbortError())
+ }
+}
+
+/**
+ * @template T
+ * @typedef {import('ipfs-message-port-protocol/src/rpc').MultiService} MultiService
+ */
+
+/**
+ * Server wraps `T` service and executes queries received from connected ports.
+ * @template T
+ */
+
+class Server {
+ /**
+ * @param {MultiService} services
+ */
+ constructor (services) {
+ this.services = services
+ /** @type {Record>} */
+ this.queries = Object.create(null)
+ }
+
+ /**
+ * @param {MessagePort} port
+ */
+ connect (port) {
+ port.addEventListener('message', this)
+ port.start()
+ }
+
+ /**
+ * @param {MessagePort} port
+ */
+ disconnect (port) {
+ port.removeEventListener('message', this)
+ port.close()
+ }
+
+ /**
+ * Handles messages received from connected clients
+ * @param {MessageEvent} event
+ * @returns {void}
+ */
+ handleEvent (event) {
+ /** @type {Message} */
+ const data = event.data
+ switch (data.type) {
+ case 'query': {
+ this.handleQuery(
+ data.id,
+ new Query(data.namespace, data.method, data.input),
+ /** @type {MessagePort} */
+ (event.target)
+ )
+ return undefined
+ }
+ case 'abort': {
+ return this.abort(data.id)
+ }
+ default: {
+ throw new UnsupportedMessageError(event)
+ }
+ }
+ }
+
+ /**
+ * Abort query for the given id.
+ * @param {string} id
+ */
+ abort (id) {
+ const query = this.queries[id]
+ if (query) {
+ delete this.queries[id]
+ query.abort()
+ }
+ }
+
+ /**
+ * Handles query received from the client.
+ * @param {string} id
+ * @param {Query} query
+ * @param {MessagePort} port
+ */
+ async handleQuery (id, query, port) {
+ this.queries[id] = query
+ await this.run(query)
+ delete this.queries[id]
+ if (!query.signal.aborted) {
+ try {
+ const value = await query.result
+ port.postMessage(
+ { type: 'result', id, result: { ok: true, value } },
+ value.transfer || []
+ )
+ } catch (error) {
+ port.postMessage({
+ type: 'result',
+ id,
+ result: { ok: false, error: encodeError(error) }
+ })
+ }
+ }
+ }
+
+ /**
+ * @param {Query} query
+ * @returns {void}
+ */
+ run (query) {
+ const { services } = this
+ const { namespace, method } = query
+
+ const service = services[namespace]
+ if (service) {
+ if (typeof service[method] === 'function') {
+ try {
+ const result = service[method]({ ...query.input, signal: query.signal })
+ Promise.resolve(result).then(query.succeed, query.fail)
+ } catch (error) {
+ query.fail(error)
+ }
+ } else {
+ query.fail(new RangeError(`Method '${method}' is not found`))
+ }
+ } else {
+ query.fail(new RangeError(`Namespace '${namespace}' is not found`))
+ }
+ }
+
+ /**
+ * @param {RPCQuery} data
+ * @returns {Out}
+ */
+ execute (data) {
+ const query = new Query(data.namespace, data.method, data.input)
+ this.run(query)
+
+ return query.result
+ }
+}
+
+class UnsupportedMessageError extends RangeError {
+ /**
+ * @param {MessageEvent} event
+ */
+ constructor (event) {
+ super('Unexpected message was received by the server')
+ this.event = event
+ }
+
+ get name () {
+ return this.constructor.name
+ }
+}
+
+class AbortError extends Error {
+ get name () {
+ return this.constructor.name
+ }
+}
+
+exports.Query = Query
+exports.Server = Server
+exports.AbortError = AbortError
diff --git a/packages/ipfs-message-port-server/src/service.js b/packages/ipfs-message-port-server/src/service.js
new file mode 100644
index 0000000000..c4820acfce
--- /dev/null
+++ b/packages/ipfs-message-port-server/src/service.js
@@ -0,0 +1,27 @@
+'use strict'
+
+/* eslint-env browser */
+
+const { DAGService } = require('./dag')
+const { CoreService } = require('./core')
+const { FilesService } = require('./files')
+const { BlockService } = require('./block')
+
+/**
+ * @typedef {import('./ipfs').IPFS} IPFS
+ */
+
+class IPFSService {
+ /**
+ *
+ * @param {IPFS} ipfs
+ */
+ constructor (ipfs) {
+ this.dag = new DAGService(ipfs)
+ this.core = new CoreService(ipfs)
+ this.files = new FilesService(ipfs)
+ this.block = new BlockService(ipfs)
+ }
+}
+
+exports.IPFSService = IPFSService
diff --git a/packages/ipfs-message-port-server/test/basic.spec.js b/packages/ipfs-message-port-server/test/basic.spec.js
new file mode 100644
index 0000000000..fc90b4899d
--- /dev/null
+++ b/packages/ipfs-message-port-server/test/basic.spec.js
@@ -0,0 +1,44 @@
+'use strict'
+
+/* eslint-env mocha */
+const { Server } = require('../src/server')
+const { IPFSService } = require('../src/index')
+const { expect } = require('interface-ipfs-core/src/utils/mocha')
+
+describe('dag', function () {
+ this.timeout(10 * 1000)
+
+ describe('Server', () => {
+ it('IPFSService', () => {
+ expect(IPFSService).to.be.a('function')
+ const service = new IPFSService()
+ expect(service).to.have.property('dag')
+ expect(service)
+ .to.have.nested.property('dag.put')
+ .be.a('function')
+ expect(service)
+ .to.have.nested.property('dag.get')
+ .be.a('function')
+ expect(service)
+ .to.have.nested.property('dag.tree')
+ .be.a('function')
+ })
+ it('Server', () => {
+ expect(Server).to.be.a('function')
+ const service = new IPFSService()
+ const server = new Server(service)
+
+ expect(server)
+ .to.have.property('connect')
+ .be.a('function')
+
+ expect(server)
+ .to.have.property('disconnect')
+ .be.a('function')
+
+ expect(server)
+ .to.have.property('execute')
+ .to.be.a('function')
+ })
+ })
+})
diff --git a/packages/ipfs-message-port-server/test/node.js b/packages/ipfs-message-port-server/test/node.js
new file mode 100644
index 0000000000..ccacec309b
--- /dev/null
+++ b/packages/ipfs-message-port-server/test/node.js
@@ -0,0 +1 @@
+'use strict'
diff --git a/packages/ipfs-message-port-server/tsconfig.json b/packages/ipfs-message-port-server/tsconfig.json
new file mode 100644
index 0000000000..ce5f448e9f
--- /dev/null
+++ b/packages/ipfs-message-port-server/tsconfig.json
@@ -0,0 +1,24 @@
+{
+ "compilerOptions": {
+ "allowJs": true,
+ "checkJs": true,
+ "forceConsistentCasingInFileNames": true,
+ "noImplicitReturns": true,
+ "noImplicitAny": true,
+ "noImplicitThis": true,
+ "noFallthroughCasesInSwitch": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "strictFunctionTypes": true,
+ "strictNullChecks": true,
+ "strictPropertyInitialization": true,
+ "strict": true,
+ "alwaysStrict": true,
+ "esModuleInterop": true,
+ "target": "ES5",
+ "noEmit": true
+ },
+ "exclude": ["dist", "node_modules"],
+ "include": ["src/**/*.js", "../ipfs-message-port-protocol/src/**/*.js"],
+ "compileOnSave": false
+}
diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json
index 51cfafe064..3e1ece846f 100644
--- a/packages/ipfs/package.json
+++ b/packages/ipfs/package.json
@@ -104,8 +104,8 @@
"ipfs-utils": "^2.2.2",
"ipld": "^0.26.2",
"ipld-bitcoin": "^0.3.0",
- "ipld-block": "^0.9.1",
- "ipld-dag-cbor": "^0.15.2",
+ "ipld-block": "^0.9.2",
+ "ipld-dag-cbor": "^0.15.3",
"ipld-dag-pb": "^0.19.0",
"ipld-ethereum": "^4.0.0",
"ipld-git": "^0.5.0",
@@ -159,7 +159,7 @@
"peer-id": "^0.13.12",
"pretty-bytes": "^5.3.0",
"progress": "^2.0.1",
- "protons": "^1.2.0",
+ "protons": "^1.2.1",
"semver": "^7.3.2",
"stream-to-it": "^0.2.1",
"streaming-iterables": "^5.0.0",