diff --git a/benchmarks/datastore.js b/benchmarks/datastore.js
index 25edc294..505d7cdf 100644
--- a/benchmarks/datastore.js
+++ b/benchmarks/datastore.js
@@ -2,42 +2,42 @@ import benchmark from 'nanobench'
import { createDataStore } from '../tests/helpers/datastore.js'
const datastore = await createDataStore({
- dataTypes: [
- {
- name: 'example',
- blockPrefix: '0',
- schema: {
- type: 'object',
- properties: {
- id: { type: 'string' },
- version: { type: 'string' },
- value: { type: 'string' },
- created: { type: 'number' },
- updated: { type: 'number' },
- timestamp: { type: 'number' },
- links: { type: 'array' },
- forks: { type: 'array' },
- authorId: { type: 'string' },
- },
- additionalProperties: false,
- },
- extraColumns: `
+ dataTypes: [
+ {
+ name: 'example',
+ blockPrefix: '0',
+ schema: {
+ type: 'object',
+ properties: {
+ id: { type: 'string' },
+ version: { type: 'string' },
+ value: { type: 'string' },
+ created: { type: 'number' },
+ updated: { type: 'number' },
+ timestamp: { type: 'number' },
+ links: { type: 'array' },
+ forks: { type: 'array' },
+ authorId: { type: 'string' },
+ },
+ additionalProperties: false,
+ },
+ extraColumns: `
value TEXT,
created INTEGER,
updated INTEGER,
timestamp INTEGER,
authorId TEXT
`,
- }
- ]
+ },
+ ],
})
benchmark('create', async (b) => {
- b.start()
- for (let i = 0; i < 1000; i = i + 1) {
- await datastore.create('example', {
- value: `value ${i}`,
- })
- }
- b.end()
+ b.start()
+ for (let i = 0; i < 1000; i = i + 1) {
+ await datastore.create('example', {
+ value: `value ${i}`,
+ })
+ }
+ b.end()
})
diff --git a/index.js b/index.js
deleted file mode 100644
index c9c5404f..00000000
--- a/index.js
+++ /dev/null
@@ -1,34 +0,0 @@
-export class Mapeo {
- #corestore
- #sqlite
-
- /**
- *
- * @param {Object} options
- * @param {import('corestore')} options.corestore
- * @param {import('./lib/sqlite').Sqlite} options.sqlite
- */
- constructor(options) {
- const { corestore, sqlite } = options
- this.#corestore = corestore
- this.#sqlite = sqlite
- }
-
- async ready() {}
-
- get keys() {
- return this.cores.map((core) => {
- return core.key.toString('hex')
- })
- }
-
- get cores() {
- return [...this.#corestore.cores.values()]
- }
-
- async sync() {}
-
- async syncAuthStore() {}
-
- async syncDataStores() {}
-}
diff --git a/lib/core-manager/messages.d.ts b/lib/core-manager/messages.d.ts
deleted file mode 100644
index f51aaf2a..00000000
--- a/lib/core-manager/messages.d.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-///
-import _m0 from "protobufjs/minimal.js";
-export interface ProjectExtension {
- authCoreKeys: Buffer[];
- wantCoreKeys: Buffer[];
-}
-export declare const ProjectExtension: {
- encode(message: ProjectExtension, writer?: _m0.Writer): _m0.Writer;
- decode(input: _m0.Reader | Uint8Array, length?: number): ProjectExtension;
-};
diff --git a/lib/core-manager/messages.js b/lib/core-manager/messages.js
deleted file mode 100644
index de6e4cf1..00000000
--- a/lib/core-manager/messages.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/* eslint-disable */
-import _m0 from "protobufjs/minimal.js";
-function createBaseProjectExtension() {
- return { authCoreKeys: [], wantCoreKeys: [] };
-}
-export var ProjectExtension = {
- encode: function (message, writer) {
- if (writer === void 0) { writer = _m0.Writer.create(); }
- for (var _i = 0, _a = message.authCoreKeys; _i < _a.length; _i++) {
- var v = _a[_i];
- writer.uint32(10).bytes(v);
- }
- for (var _b = 0, _c = message.wantCoreKeys; _b < _c.length; _b++) {
- var v = _c[_b];
- writer.uint32(18).bytes(v);
- }
- return writer;
- },
- decode: function (input, length) {
- var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input);
- var end = length === undefined ? reader.len : reader.pos + length;
- var message = createBaseProjectExtension();
- while (reader.pos < end) {
- var tag = reader.uint32();
- switch (tag >>> 3) {
- case 1:
- message.authCoreKeys.push(reader.bytes());
- break;
- case 2:
- message.wantCoreKeys.push(reader.bytes());
- break;
- default:
- reader.skipType(tag & 7);
- break;
- }
- }
- return message;
- }
-};
diff --git a/lib/datastore/README.md b/lib/datastore/README.md
deleted file mode 100644
index 283c284a..00000000
--- a/lib/datastore/README.md
+++ /dev/null
@@ -1,62 +0,0 @@
-# DataStore
-
-> Manage a collection of `DataType` instances.
-
-## Purpose
-
-The `DataStore` class is responsible for managing and indexing a collection of [`DataType`](../datatype/) instances.
-
-## Usage
-
-The `DataStore` class is used internally by the [`AuthStore`](../authstore/) and [`Mapeo`](../../index.js) classes.
-
-The API of this module is primarily a convenient wrapper around the [`DataType`](../datatype/) class.
-
-An example of `DataStore` usage taken from the [datastore tests](../../tests/datastore.js):
-
-```js
-const datastore = new DataStore({
- corestore,
- sqlite,
- keyPair,
- identityPublicKey: identityKeyPair.publicKey,
-})
-
-await datastore.ready()
-t.ok(datastore, 'datastore created')
-
-const example = await datastore.dataType({
- name: 'example',
- blockPrefix: '0',
- schema: {
- type: 'object',
- properties: {
- id: { type: 'string' },
- version: { type: 'string' },
- value: { type: 'string' },
- created: { type: 'number' },
- updated: { type: 'number' },
- timestamp: { type: 'number' },
- links: { type: 'array' },
- forks: { type: 'array' },
- authorId: { type: 'string' },
- },
- additionalProperties: false,
- },
- extraColumns: `
- value TEXT,
- created INTEGER,
- updated INTEGER,
- timestamp INTEGER,
- authorId TEXT
- `,
-})
-```
-
-## API docs
-
-TODO!
-
-## Tests
-
-Tests for this module are in [tests/datastore.js](../../tests/datastore.js)
diff --git a/lib/datastore/index.js b/lib/datastore/index.js
deleted file mode 100644
index 6147895b..00000000
--- a/lib/datastore/index.js
+++ /dev/null
@@ -1,260 +0,0 @@
-import MultiCoreIndexer from 'multi-core-indexer'
-import ram from 'random-access-memory'
-import b4a from 'b4a'
-
-import { DataType } from '../datatype/index.js'
-import { idToKey, keyToId } from '../utils.js'
-
-export class DataStore {
- #corestore
- #sqlite
- #indexer
- #identityPublicKey
- #dataTypesOptions
- #dataTypes = new Map()
-
- /** @type {KeyPair} */
- #keyPair
-
- /** @type {import('hypercore')} */
- #writer
-
- /**
- * @param {Object} options
- * @param {Buffer} options.identityPublicKey the public key of the identity
- * @param {KeyPair} options.keyPair the local writer hypercore
- * @param {import('corestore')} options.corestore
- * @param {DataTypeOptions[]} options.dataTypes
- * @param {import('../sqlite.js').Sqlite} options.sqlite an instance of the internal Sqlite class
- */
- constructor(options) {
- const { identityPublicKey, keyPair, corestore, sqlite, dataTypes } = options
-
- this.#identityPublicKey = identityPublicKey
- this.#keyPair = keyPair
- this.#corestore = corestore
- this.#sqlite = sqlite
- this.#dataTypesOptions = dataTypes
-
- this.#indexer = new MultiCoreIndexer(this.cores, {
- /**
- * @param {String} key
- */
- storage: (key) => {
- return new ram(key)
- },
- /**
- * @param {Entry[]} entries
- */
- batch: (entries) => {
- for (const entry of entries) {
- const { block } = entry
-
- const dataType = this.getDataTypeForBlock(block)
-
- if (!dataType) continue
-
- dataType.index([block])
- }
- },
- })
- }
-
- get cores() {
- return [...this.#corestore.cores.values()]
- }
-
- get dataTypes() {
- return [...this.#dataTypes.values()]
- }
-
- async ready() {
- await this.#corestore.ready()
-
- this.#writer = this.#corestore.get({
- keyPair: this.#keyPair,
- sparse: false,
- })
-
- await this.#writer.ready()
-
- for (const options of this.#dataTypesOptions) {
- const datatype = this.#dataType(options)
- await datatype.ready()
- }
-
- await this.#indexer.addCore(this.#writer)
- }
-
- /**
- * @typedef {Object} DataTypeOptions
- * @property {String} name
- * @property {Object} schema
- * @property {String} blockPrefix
- * @property {String} extraColumns
- *
- * Create a new DataType in the DataStore
- * @param {DataTypeOptions} options
- * @returns {DataType}
- */
- #dataType(options) {
- if (this.#dataTypes.has(options.name)) {
- return this.#dataTypes.get(options.name)
- }
-
- const { name, schema, extraColumns, blockPrefix } = options
-
- const dataType = new DataType({
- name,
- identityPublicKey: this.#identityPublicKey,
- core: this.#writer,
- schema,
- blockPrefix,
- corestore: this.#corestore,
- sqlite: this.#sqlite,
- extraColumns,
- })
-
- this.#dataTypes.set(name, dataType)
-
- for (const core of dataType.cores) {
- core.on('ready', () => {
- if (!b4a.compare(core.key, this.#writer.key)) {
- this.#indexer.addCore(dataType.core)
- }
- })
- }
-
- return dataType
- }
-
- /**
- * @param {string} name
- * @returns {DataType}
- */
- getDataType(name) {
- return this.dataTypes.find((dataType) => {
- return dataType.name === name
- })
- }
-
- /**
- * @param {Block} block
- * @returns {DataType}
- */
- getDataTypeForBlock(block) {
- return this.dataTypes.find((dataType) => {
- return dataType.isType(block)
- })
- }
-
- /**
- * @param {string} dataTypeName
- * @param {Doc} data
- * @returns {Promise}
- * @throws {Error}
- */
- async create(dataTypeName, data) {
- if (!this.#dataTypes.has(dataTypeName)) {
- throw new Error(`Data type ${dataTypeName} not found`)
- }
-
- const dataType = this.#dataTypes.get(dataTypeName)
- return dataType.create(data)
- }
-
- /**
- * @param {string} dataTypeName
- * @param {Doc} data
- * @returns {Promise}
- * @throws {Error}
- */
- async update(dataTypeName, data) {
- if (!this.#dataTypes.has(dataTypeName)) {
- throw new Error(`Data type ${dataTypeName} not found`)
- }
-
- const dataType = this.#dataTypes.get(dataTypeName)
- return dataType.update(data)
- }
-
- /**
- * Wait for the indexer to finish available blocks
- * @returns {Promise}
- */
- async indexing() {
- return new Promise((resolve) => {
- /** @type {ReturnType} */
- let timeoutId
-
- const onIdle = () => {
- clearTimeout(timeoutId)
- timeoutId = setTimeout(() => {
- this.#indexer.off('idle', onIdle)
- this.#indexer.off('indexing', onIndexing)
- resolve()
- }, 5)
- }
-
- if (this.#indexer.state.current === 'idle') {
- onIdle()
- }
-
- const onIndexing = () => {
- clearTimeout(timeoutId)
- }
-
- this.#indexer.on('idle', onIdle)
- this.#indexer.on('indexing', onIndexing)
- })
- }
-
- /**
- * @param {string} sql
- * @param {any[]} [params]
- * @returns {Doc[]}
- */
- query(sql, params) {
- return this.#sqlite.query(sql, params)
- }
-
- createReadStream() {
- return this.#writer.createReadStream(...arguments)
- }
-
- /**
- * @param {Boolean} isInitiator - a boolean indicating whether this device is initiating or responding to a connection
- * @param {Object} options - Options object passed to `corestore.replicate`
- */
- replicate(isInitiator, options) {
- return this.#corestore.replicate(isInitiator, options)
- }
-
- /**
- * Get a core by key
- * @param {PublicKey|String} coreKey
- * @param {object} [options] options object passed to `corestore.get`
- * @returns {Promise}
- */
- async getCore(coreKey, options) {
- const key = idToKey(coreKey)
-
- if (keyToId(key) === keyToId(this.#writer.key)) {
- return this.#writer
- }
-
- const core = this.#corestore.get({ key, sparse: false, ...options })
- await core.ready()
- this.#indexer.addCore(core)
- return core
- }
-
- /**
- * Close the datastore
- * @returns {Promise}
- */
- async close() {
- await this.#corestore.close()
- this.#sqlite.close()
- }
-}
diff --git a/lib/datatype/README.md b/lib/datatype/README.md
deleted file mode 100644
index 20b9200f..00000000
--- a/lib/datatype/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-# DataType
-
-> Create, read, update, delete, and query data.
-
-## Purpose
-
-The `DataType` class composes our [`Indexer` class](./indexer/) with a [`Corestore` instance](https://npmjs.com/corestore) used to store the local writer [hypercore](https://npmjs.com/hypercore) and all the relevant hypercores of peers in a project.
-
-## Usage
-
-The `DataType` class is used internally by the [`DataStore` class](../datastore/).
-
-Currently it isn't easily usable on its own as it assumes it is used along with [multi-core-indexer](https://npmjs.com/multi-core-indexer) as part of the `DataStore` class.
-
-A usage example with multi-core-indexer taken from the [DataType test helpers](../../tests/helpers/datatype.js):
-
-```js
-const dataType = new DataType({
- name,
- schema,
- blockPrefix,
- identityPublicKey: identityKeyPair.publicKey,
- corestore,
- keyPair,
- sqlite,
- extraColumns,
-})
-
-await dataType.ready()
-
-const cores = [...corestore.cores.values()]
-const indexer = new MultiCoreIndexer(cores, {
- storage: (key) => {
- return new ram(key)
- },
- batch: (entries) => {
- dataType.index(entries.map((entry) => entry.block))
- },
-})
-```
-
-## API docs
-
-TODO!
-
-## Tests
-
-Tests for this module are in [tests/datatype.js](../../tests/datatype.js)
diff --git a/lib/datatype/index.js b/lib/datatype/index.js
deleted file mode 100644
index 13cde045..00000000
--- a/lib/datatype/index.js
+++ /dev/null
@@ -1,333 +0,0 @@
-import b4a from 'b4a'
-import { randomBytes } from 'crypto'
-
-import { getBlockPrefix, idToKey, keyToId, parseVersion } from '../utils.js'
-import { Indexer } from './indexer/index.js'
-
-/**
- * @callback EncodeDataType
- * @param {Doc} doc
- * @returns {Block}
- */
-
-/**
- * @callback DecodeDataType
- * @param {Block} block
- * @returns {Doc}
- */
-
-/**
- * The DataType class provides methods for managing a single type of data.
- */
-export class DataType {
- #corestore
- #sqlite
- #indexer
- #writer
-
- /** @type {EncodeDataType} */
- #encode = function defaultEncode(obj) {
- const block = this.blockPrefix + JSON.stringify(obj)
- return b4a.from(block)
- }
-
- /** @type {DecodeDataType} */
- #decode = function defaultDecode(block) {
- const blockPrefix = getBlockPrefix(block)
-
- if (blockPrefix !== this.blockPrefix) {
- throw new Error(
- `Data processed by ${this.name} DataType with blockPrefix ${blockPrefix} found, expected ${this.blockPrefix}`
- )
- }
-
- return JSON.parse(b4a.toString(block, 'utf8', 1))
- }
-
- /**
- * @param {Object} options
- * @param {String} options.name the name of the data type used as sqlite table name
- * @param {String} options.blockPrefix the prefix used to identify the data type
- * @param {Object} options.schema the schema used to validate the data type
- * @param {String} options.extraColumns the extra columns to add to the sqlite table
- * @param {Buffer} options.identityPublicKey the public key of the identity
- * @param {import('hypercore')} options.core the local writer hypercore
- * @param {import('corestore')} options.corestore an instance of the [Corestore](https://npmjs.com/corestore) class
- * @param {import('../sqlite.js').Sqlite} options.sqlite an instance of the internal Sqlite class
- * @param {import('./indexer/index.js').Indexer} [options.indexer] an instance of the [Indexer](../indexer/) class
- * @param {EncodeDataType} [options.encode] a function to encode the data type
- * @param {DecodeDataType} [options.decode] a function to decode the data type
- */
- constructor(options) {
- const {
- name,
- identityPublicKey,
- core,
- schema,
- blockPrefix,
- corestore,
- indexer,
- sqlite,
- extraColumns,
- encode,
- decode,
- } = options
- this.name = name
- this.blockPrefix = blockPrefix
- this.schema = schema
- this.#corestore = corestore
- this.#sqlite = sqlite
-
- this.#indexer =
- indexer ||
- new Indexer({
- name: this.name,
- sqlite,
- extraColumns,
- })
-
- this.#writer = core
- this.identityPublicKey = identityPublicKey
- this.identityId = keyToId(identityPublicKey)
-
- if (encode) {
- this.#encode = encode
- }
-
- if (decode) {
- this.#decode = decode
- }
- }
-
- /**
- * Wait for the corestore and writer hypercore to be ready
- * @returns {Promise}
- */
- async ready() {
- await this.#corestore.ready()
- await this.#writer.ready()
- }
-
- /**
- * @param {Doc} doc
- * @returns {Block}
- */
- encode(doc) {
- return this.#encode(doc)
- }
-
- /**
- * @param {Block} block
- * @returns {Doc}
- */
- decode(block) {
- return this.#decode(block)
- }
-
- get key() {
- return this.#writer.key
- }
-
- get keys() {
- return this.cores.map((core) => {
- return core.key.toString('hex')
- })
- }
-
- get core() {
- return this.#writer
- }
-
- get cores() {
- return [...this.#corestore.cores.values()]
- }
-
- async info({ writerOnly = false } = {}) {
- if (writerOnly) {
- return this.#writer.info()
- }
-
- return Promise.all(
- this.cores.map(async (core) => {
- return core.info()
- })
- )
- }
-
- /**
- * @param {PublicKey} coreKey
- * @returns {import('hypercore')|undefined}
- */
- getCore(coreKey) {
- return this.cores.find((core) => core.key.equals(coreKey))
- }
-
- /**
- * Get a doc by id
- * @param {String} id
- * @returns {Doc}
- */
- getById(id) {
- return this.#sqlite.get(`select * from ${this.name} where id = '${id}'`)
- }
-
- /**
- * Get a doc by version
- * @param {String} version
- * @returns {Promise}
- */
- async getByVersion(version) {
- const { coreId, blockIndex } = parseVersion(version)
- const core = this.getCore(idToKey(coreId))
-
- if (core) {
- return /** @type {Promise} */ (core.get(blockIndex))
- }
- }
-
- /**
- * Get a doc by version
- * @param {String} version
- * @returns {Promise}
- */
- async getBlockByVersion(version) {
- const { coreId, blockIndex } = parseVersion(version)
- const core = this.getCore(idToKey(coreId))
- if (core) {
- return /** @type {Promise} */ (core.get(blockIndex))
- }
- }
-
- /**
- * Put a doc
- * @param {Doc} data
- * @returns {Promise}
- */
- async #put(data) {
- await this.#writer.ready()
-
- const created = data.created || new Date().getTime()
- const doc = Object.assign({}, data, {
- id: data.id || randomBytes(8).toString('hex'),
- authorId: this.identityId,
- // TODO: remove the version property when integrating mapeo-schema
- version: `${this.#writer.key.toString('hex')}@${this.#writer.length}`,
- created,
- timestamp: created,
- })
-
- if (!doc.links) {
- doc.links = []
- }
-
- const encodedDoc = this.encode(doc)
- const indexing = new Promise((resolve) => {
- this.#indexer.onceWriteDoc(doc.version, (doc) => {
- resolve(doc)
- })
- })
-
- await this.#writer.append(encodedDoc)
- await indexing
- return doc
- }
-
- /**
- * Create a doc
- * @param {Doc} data
- * @returns {Promise}
- */
- async create(data) {
- return this.#put(data)
- }
-
- /**
- * Update a doc
- * @param {Doc} data
- * @returns {Promise}
- */
- async update(data) {
- const doc = Object.assign({}, data, {
- updated: new Date().getTime(),
- })
-
- await this.#shouldUpdate(doc)
-
- return this.#put(doc)
- }
-
- /**
- * Check if a doc should be updated
- * @param {Doc} doc
- * @throws {Error} if the doc should not be updated
- * @returns {Promise}
- */
- async #shouldUpdate(doc) {
- const { id, links } = doc
-
- if (!id) {
- throw new Error('Cannot update a doc without an id')
- }
-
- if (!links || !links.length) {
- throw new Error(
- 'Cannot update a doc without a link to the previous version'
- )
- }
-
- for (const version of links) {
- const block = await this.getBlockByVersion(version)
-
- if (!block) {
- throw new Error(`Block not found for version ${version}`)
- }
-
- if (!this.isType(block)) {
- throw new Error(
- `Block with version ${version} is not of type ${this.name}`
- )
- }
- }
- }
-
- /**
- * Check if a block is of this DataType
- * @param {Block} block
- * @returns {boolean}
- */
- isType(block) {
- return getBlockPrefix(block) === this.blockPrefix
- }
-
- /**
- * Index an array of blocks
- * @param {Block[]} blocks
- * @returns {void}
- */
- index(blocks) {
- const docs = blocks.map((block) => {
- return this.decode(block)
- })
-
- this.#indexer.batch(docs)
- }
-
- /**
- * Query indexed docs
- * @param {string} sql sql statement
- * @param {any[]} params
- * @returns {Doc[]}
- */
- query(sql, params) {
- return this.#sqlite.query(sql, params)
- }
-
- /**
- * Create a read stream of blocks from the local writer hypercore
- * @param {Object} [options]
- * @return {import('streamx').Readable}
- */
- createReadStream(options) {
- return this.#writer.createReadStream(options)
- }
-}
diff --git a/lib/datatype/indexer/README.md b/lib/datatype/indexer/README.md
deleted file mode 100644
index fffc5daa..00000000
--- a/lib/datatype/indexer/README.md
+++ /dev/null
@@ -1,21 +0,0 @@
-# Indexer
-
-> Index Mapeo data in Sqlite3
-
-## Purpose
-
-The `Indexer` class is a wrapper around a [`DataType` instance](../datatype/), an instance of [better-sqlite](https://npmjs.com/better-sqlite3), and internally instantiates [@mapeo/sqlite-indexer](https://npmjs.com/@mapeo/sqlite-indexer). It provides methods for querying data, adding batches of documents for indexing, and a way to listen for documents of specific versions to be indexed.
-
-## Usage
-
-This class is used internally in the [`DataStore` class](../datastore/) and isn't well-suited to use on its own.
-
-For similar functionality look into using a package like [@mapeo/sqlite-indexer](https://npmjs.com/@mapeo/sqlite-indexer) in combination with [multi-core-indexer](https://npmjs.com/multi-core-indexer) to implement indexing using a similar approach.
-
-## API docs
-
-TODO!
-
-## Tests
-
-Tests for this module are in [tests/indexer.js](../../tests/indexer.js)
diff --git a/lib/datatype/indexer/index.js b/lib/datatype/indexer/index.js
deleted file mode 100644
index 9d74d586..00000000
--- a/lib/datatype/indexer/index.js
+++ /dev/null
@@ -1,98 +0,0 @@
-import SqliteIndexer from '@mapeo/sqlite-indexer'
-
-/**
- * Internal indexer for Mapeo Core
- */
-export class Indexer {
- #sqlite
-
- /**
- * Create an indexer for a DataType
- * @param {object} options
- * @param {string} options.name the name of the DataType
- * @param {import('../../sqlite.js').Sqlite} options.sqlite an instance of the internal Sqlite class
- * @param {string} options.extraColumns any additional column definitions needed for this table, passed to `CREATE TABLE` statement
- */
- constructor(options) {
- const { name, sqlite, extraColumns } = options
-
- this.name = name
- this.#sqlite = sqlite
- this.extraColumns = extraColumns
-
- this.#sqlite.run(
- `CREATE TABLE IF NOT EXISTS ${this.name}
- (
- id TEXT PRIMARY KEY NOT NULL,
- version TEXT NOT NULL,
- links TEXT NOT NULL,
- forks TEXT NOT NULL
- ${this.extraColumns ? ', ' + this.extraColumns : ''}
- )
- WITHOUT ROWID`
- )
-
- this.#sqlite.run(
- `CREATE TABLE IF NOT EXISTS ${this.name}_backlinks
- (version TEXT PRIMARY KEY NOT NULL)
- WITHOUT ROWID`
- )
-
- this.sqliteIndexer = new SqliteIndexer(this.#sqlite.db, {
- docTableName: this.name,
- backlinkTableName: `${this.name}_backlinks`,
- extraColumns: this.extraColumns,
- })
- }
-
- /**
- * @typedef {string} DocVersion
- */
-
- /**
- * @callback IndexCallback
- * @param {IndexedDocument | IndexableDocument} doc
- */
-
- /**
- * Set a listener on a version of a doc that is called when it is finished indexing
- * @param {DocVersion} version
- * @param {IndexCallback} listener
- * @returns {void}
- */
- onceWriteDoc(version, listener) {
- this.sqliteIndexer.onceWriteDoc(version, listener)
- }
-
- /**
- * Index a batch of documents
- * @param {Doc[]} docs an array of docs
- * @returns {void}
- */
- batch(docs) {
- const flattenedDocs = docs.map((doc) => {
- for (const [key, value] of Object.entries(doc)) {
- if (
- typeof value === 'object' &&
- ['links', 'forks'].includes(key) === false
- ) {
- /* @ts-ignore */
- doc[key] = JSON.stringify(value)
- }
- }
- return doc
- })
-
- this.sqliteIndexer.batch(flattenedDocs)
- }
-
- /**
- * Query documents from the sqlite database
- * @param {string} sql
- * @param {any[]} params
- * @returns {Doc[]} an array of docs
- */
- query(sql, params) {
- return this.#sqlite.query(sql, params)
- }
-}
diff --git a/lib/rpc/messages.d.ts b/lib/rpc/messages.d.ts
deleted file mode 100644
index 8fdb6915..00000000
--- a/lib/rpc/messages.d.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-///
-import _m0 from "protobufjs/minimal.js";
-export interface Invite {
- projectKey: Buffer;
- encryptionKeys?: Invite_EncryptionKeys | undefined;
- projectConfig?: Buffer | undefined;
-}
-export interface Invite_EncryptionKeys {
- auth?: Buffer | undefined;
- data?: Buffer | undefined;
- blobIndex?: Buffer | undefined;
- blob?: Buffer | undefined;
-}
-export interface InviteResponse {
- projectKey: Buffer;
- decision: InviteResponse_Decision;
-}
-export declare enum InviteResponse_Decision {
- REJECT = "REJECT",
- ACCEPT = "ACCEPT",
- ALREADY = "ALREADY",
- UNRECOGNIZED = "UNRECOGNIZED"
-}
-export declare function inviteResponse_DecisionFromJSON(object: any): InviteResponse_Decision;
-export declare function inviteResponse_DecisionToNumber(object: InviteResponse_Decision): number;
-export declare const Invite: {
- encode(message: Invite, writer?: _m0.Writer): _m0.Writer;
- decode(input: _m0.Reader | Uint8Array, length?: number): Invite;
-};
-export declare const Invite_EncryptionKeys: {
- encode(message: Invite_EncryptionKeys, writer?: _m0.Writer): _m0.Writer;
- decode(input: _m0.Reader | Uint8Array, length?: number): Invite_EncryptionKeys;
-};
-export declare const InviteResponse: {
- encode(message: InviteResponse, writer?: _m0.Writer): _m0.Writer;
- decode(input: _m0.Reader | Uint8Array, length?: number): InviteResponse;
-};
diff --git a/lib/rpc/messages.js b/lib/rpc/messages.js
deleted file mode 100644
index f66f91ed..00000000
--- a/lib/rpc/messages.js
+++ /dev/null
@@ -1,162 +0,0 @@
-/* eslint-disable */
-import _m0 from "protobufjs/minimal.js";
-export var InviteResponse_Decision;
-(function (InviteResponse_Decision) {
- InviteResponse_Decision["REJECT"] = "REJECT";
- InviteResponse_Decision["ACCEPT"] = "ACCEPT";
- InviteResponse_Decision["ALREADY"] = "ALREADY";
- InviteResponse_Decision["UNRECOGNIZED"] = "UNRECOGNIZED";
-})(InviteResponse_Decision || (InviteResponse_Decision = {}));
-export function inviteResponse_DecisionFromJSON(object) {
- switch (object) {
- case 0:
- case "REJECT":
- return InviteResponse_Decision.REJECT;
- case 1:
- case "ACCEPT":
- return InviteResponse_Decision.ACCEPT;
- case 2:
- case "ALREADY":
- return InviteResponse_Decision.ALREADY;
- case -1:
- case "UNRECOGNIZED":
- default:
- return InviteResponse_Decision.UNRECOGNIZED;
- }
-}
-export function inviteResponse_DecisionToNumber(object) {
- switch (object) {
- case InviteResponse_Decision.REJECT:
- return 0;
- case InviteResponse_Decision.ACCEPT:
- return 1;
- case InviteResponse_Decision.ALREADY:
- return 2;
- case InviteResponse_Decision.UNRECOGNIZED:
- default:
- return -1;
- }
-}
-function createBaseInvite() {
- return { projectKey: Buffer.alloc(0) };
-}
-export var Invite = {
- encode: function (message, writer) {
- if (writer === void 0) { writer = _m0.Writer.create(); }
- if (message.projectKey.length !== 0) {
- writer.uint32(10).bytes(message.projectKey);
- }
- if (message.encryptionKeys !== undefined) {
- Invite_EncryptionKeys.encode(message.encryptionKeys, writer.uint32(18).fork()).ldelim();
- }
- if (message.projectConfig !== undefined) {
- writer.uint32(26).bytes(message.projectConfig);
- }
- return writer;
- },
- decode: function (input, length) {
- var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input);
- var end = length === undefined ? reader.len : reader.pos + length;
- var message = createBaseInvite();
- while (reader.pos < end) {
- var tag = reader.uint32();
- switch (tag >>> 3) {
- case 1:
- message.projectKey = reader.bytes();
- break;
- case 2:
- message.encryptionKeys = Invite_EncryptionKeys.decode(reader, reader.uint32());
- break;
- case 3:
- message.projectConfig = reader.bytes();
- break;
- default:
- reader.skipType(tag & 7);
- break;
- }
- }
- return message;
- }
-};
-function createBaseInvite_EncryptionKeys() {
- return {};
-}
-export var Invite_EncryptionKeys = {
- encode: function (message, writer) {
- if (writer === void 0) { writer = _m0.Writer.create(); }
- if (message.auth !== undefined) {
- writer.uint32(10).bytes(message.auth);
- }
- if (message.data !== undefined) {
- writer.uint32(18).bytes(message.data);
- }
- if (message.blobIndex !== undefined) {
- writer.uint32(26).bytes(message.blobIndex);
- }
- if (message.blob !== undefined) {
- writer.uint32(34).bytes(message.blob);
- }
- return writer;
- },
- decode: function (input, length) {
- var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input);
- var end = length === undefined ? reader.len : reader.pos + length;
- var message = createBaseInvite_EncryptionKeys();
- while (reader.pos < end) {
- var tag = reader.uint32();
- switch (tag >>> 3) {
- case 1:
- message.auth = reader.bytes();
- break;
- case 2:
- message.data = reader.bytes();
- break;
- case 3:
- message.blobIndex = reader.bytes();
- break;
- case 4:
- message.blob = reader.bytes();
- break;
- default:
- reader.skipType(tag & 7);
- break;
- }
- }
- return message;
- }
-};
-function createBaseInviteResponse() {
- return { projectKey: Buffer.alloc(0), decision: InviteResponse_Decision.REJECT };
-}
-export var InviteResponse = {
- encode: function (message, writer) {
- if (writer === void 0) { writer = _m0.Writer.create(); }
- if (message.projectKey.length !== 0) {
- writer.uint32(10).bytes(message.projectKey);
- }
- if (message.decision !== InviteResponse_Decision.REJECT) {
- writer.uint32(16).int32(inviteResponse_DecisionToNumber(message.decision));
- }
- return writer;
- },
- decode: function (input, length) {
- var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input);
- var end = length === undefined ? reader.len : reader.pos + length;
- var message = createBaseInviteResponse();
- while (reader.pos < end) {
- var tag = reader.uint32();
- switch (tag >>> 3) {
- case 1:
- message.projectKey = reader.bytes();
- break;
- case 2:
- message.decision = inviteResponse_DecisionFromJSON(reader.int32());
- break;
- default:
- reader.skipType(tag & 7);
- break;
- }
- }
- return message;
- }
-};
diff --git a/package-lock.json b/package-lock.json
index efb06ee2..019bc864 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -19,6 +19,7 @@
"b4a": "^1.6.3",
"base32.js": "^0.1.0",
"better-sqlite3": "^8.3.0",
+ "compact-encoding": "^2.12.0",
"corestore": "^6.8.4",
"drizzle-orm": "^0.27.2",
"fastify-plugin": "^4.5.0",
@@ -57,6 +58,7 @@
"random-access-file": "^4.0.4",
"random-access-memory": "^6.2.0",
"rimraf": "^5.0.0",
+ "streamx": "^2.15.1",
"tempy": "^3.1.0",
"ts-proto": "^1.147.1",
"type-fest": "^3.10.0",
@@ -5942,9 +5944,9 @@
"license": "MIT"
},
"node_modules/streamx": {
- "version": "2.15.0",
- "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.15.0.tgz",
- "integrity": "sha512-HcxY6ncGjjklGs1xsP1aR71INYcsXFJet5CU1CHqihQ2J5nOsbd4OjgjHO42w/4QNv9gZb3BueV+Vxok5pLEXg==",
+ "version": "2.15.1",
+ "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.15.1.tgz",
+ "integrity": "sha512-fQMzy2O/Q47rgwErk/eGeLu/roaFWV0jVsogDmrszM9uIw8L5OA+t+V93MgYlufNptfjmYR1tOMWhei/Eh7TQA==",
"dependencies": {
"fast-fifo": "^1.1.0",
"queue-tick": "^1.0.1"
diff --git a/package.json b/package.json
index b18ce1d4..261149f4 100644
--- a/package.json
+++ b/package.json
@@ -6,13 +6,13 @@
"type": "module",
"exports": {
".": "./index.js",
- "./authstore": "./lib/authstore/index.js",
- "./datastore": "./lib/datastore/index.js",
- "./discovery": "./lib/discovery/index.js",
- "./datatype": "./lib/datatype/index.js",
- "./sqlite": "./lib/sqlite.js",
- "./blob-store": "./lib/blob-store/index.js",
- "./blob-server": "./lib/blob-server/index.js"
+ "./authstore": "./src/authstore/index.js",
+ "./datastore": "./src/datastore/index.js",
+ "./discovery": "./src/discovery/index.js",
+ "./datatype": "./src/datatype/index.js",
+ "./sqlite": "./src/sqlite.js",
+ "./blob-store": "./src/blob-store/index.js",
+ "./blob-server": "./src/blob-server/index.js"
},
"scripts": {
"lint": "eslint .",
@@ -23,7 +23,7 @@
"deps": "depcheck --ignore-dirs=docs,types --ignores=@types/*,typescript,typedoc,typedoc-plugin-markdown,@hyperswarm/testnet",
"doc": "typedoc --plugin typedoc-plugin-markdown --out docs/api",
"protobuf": "node ./scripts/build-messages.js",
- "db:generate:project": "drizzle-kit generate:sqlite --schema lib/schema/project.js --out drizzle/project"
+ "db:generate:project": "drizzle-kit generate:sqlite --schema src/schema/project.js --out drizzle/project"
},
"prettier": {
"semi": false,
@@ -62,6 +62,7 @@
"random-access-file": "^4.0.4",
"random-access-memory": "^6.2.0",
"rimraf": "^5.0.0",
+ "streamx": "^2.15.1",
"tempy": "^3.1.0",
"ts-proto": "^1.147.1",
"type-fest": "^3.10.0",
@@ -83,6 +84,7 @@
"b4a": "^1.6.3",
"base32.js": "^0.1.0",
"better-sqlite3": "^8.3.0",
+ "compact-encoding": "^2.12.0",
"corestore": "^6.8.4",
"drizzle-orm": "^0.27.2",
"fastify-plugin": "^4.5.0",
diff --git a/scripts/build-messages.js b/scripts/build-messages.js
index eedec636..6b0f63b7 100755
--- a/scripts/build-messages.js
+++ b/scripts/build-messages.js
@@ -2,7 +2,7 @@
import { execSync } from 'child_process'
import fs from 'fs'
-import rimraf from 'rimraf'
+import { rimraf } from 'rimraf'
import path from 'path'
const protoURL = new URL('../proto', import.meta.url)
@@ -12,8 +12,8 @@ const buildPath = path.join(protoURL.pathname, './build')
rimraf.sync(buildPath)
const destinations = {
- extensions: path.join(projectRootURL.pathname, './lib/core-manager'),
- rpc: path.join(projectRootURL.pathname, './lib/rpc')
+ extensions: path.join(projectRootURL.pathname, './src/core-manager'),
+ rpc: path.join(projectRootURL.pathname, './src/rpc'),
}
const command1 = 'buf generate .'
diff --git a/lib/authstore/README.md b/src/authstore/README.md
similarity index 100%
rename from lib/authstore/README.md
rename to src/authstore/README.md
diff --git a/lib/authstore/authtypes.js b/src/authstore/authtypes.js
similarity index 100%
rename from lib/authstore/authtypes.js
rename to src/authstore/authtypes.js
diff --git a/lib/authstore/index.js b/src/authstore/index.js
similarity index 100%
rename from lib/authstore/index.js
rename to src/authstore/index.js
diff --git a/lib/blob-server/fastify-plugin.js b/src/blob-server/fastify-plugin.js
similarity index 98%
rename from lib/blob-server/fastify-plugin.js
rename to src/blob-server/fastify-plugin.js
index b03175ee..56854046 100644
--- a/lib/blob-server/fastify-plugin.js
+++ b/src/blob-server/fastify-plugin.js
@@ -10,7 +10,7 @@ export default fp(blobServerPlugin, {
name: 'mapeo-blob-server',
})
-/** @typedef {import('../types').BlobId} BlobId */
+/** @typedef {import('../types.js').BlobId} BlobId */
/**
* @typedef {Object} BlobServerPluginOpts
diff --git a/lib/blob-server/index.js b/src/blob-server/index.js
similarity index 100%
rename from lib/blob-server/index.js
rename to src/blob-server/index.js
diff --git a/lib/blob-store/index.js b/src/blob-store/index.js
similarity index 97%
rename from lib/blob-store/index.js
rename to src/blob-store/index.js
index 258c1bbb..e30f78e4 100644
--- a/lib/blob-store/index.js
+++ b/src/blob-store/index.js
@@ -5,7 +5,7 @@ import { TypedEmitter } from 'tiny-typed-emitter'
import { LiveDownload } from './live-download.js'
/** @typedef {TypedEmitter<{ 'add-drive': (drive: import('hyperdrive')) => void }>} InternalDriveEmitter */
-/** @typedef {import('../types').BlobId} BlobId */
+/** @typedef {import('../types.js').BlobId} BlobId */
// prop = blob type name
// value = array of blob variants supported for that type
@@ -97,7 +97,7 @@ export class BlobStore {
* If no filter is specified, all blobs will be downloaded. If a filter is
* specified, then _only_ blobs that match the filter will be downloaded.
*
- * @param {import('../types').BlobFilter} [filter] Filter blob types and/or variants to download. Filter is { [BlobType]: BlobVariants[] }. At least one blob variant must be specified for each blob type.
+ * @param {import('../types.js').BlobFilter} [filter] Filter blob types and/or variants to download. Filter is { [BlobType]: BlobVariants[] }. At least one blob variant must be specified for each blob type.
* @param {object} options
* @param {AbortSignal} [options.signal] Optional AbortSignal to cancel in-progress download
* @returns EventEmitter with `.state` propery, emits `state` with new state when it updates
diff --git a/lib/blob-store/live-download.js b/src/blob-store/live-download.js
similarity index 90%
rename from lib/blob-store/live-download.js
rename to src/blob-store/live-download.js
index 6f7b36db..7f7b969e 100644
--- a/lib/blob-store/live-download.js
+++ b/src/blob-store/live-download.js
@@ -36,10 +36,10 @@ export class LiveDownload extends TypedEmitter {
* @param {Iterable} drives
* @param {import('./index.js').InternalDriveEmitter} emitter
* @param {object} options
- * @param {import('../types').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
+ * @param {import('../types.js').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
* @param {AbortSignal} [options.signal]
*/
- constructor (drives, emitter, { filter, signal }) {
+ constructor(drives, emitter, { filter, signal }) {
super()
this.#signal = signal
@@ -48,10 +48,10 @@ export class LiveDownload extends TypedEmitter {
}
/** @param {import('hyperdrive')} drive */
- const addDrive = drive => {
+ const addDrive = (drive) => {
const download = new DriveLiveDownload(drive, {
filter,
- signal
+ signal,
})
this.#driveLiveDownloads.add(download)
download.on('state', emitState)
@@ -75,7 +75,7 @@ export class LiveDownload extends TypedEmitter {
/**
* @returns {BlobDownloadState | BlobDownloadStateError}
*/
- get state () {
+ get state() {
return combineStates(this.#driveLiveDownloads, { signal: this.#signal })
}
}
@@ -101,10 +101,10 @@ export class DriveLiveDownload extends TypedEmitter {
* Like drive.download() but 'live',
* @param {import('hyperdrive')} drive
* @param {object} options
- * @param {import('../types').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
+ * @param {import('../types.js').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
* @param {AbortSignal} [options.signal]
*/
- constructor (drive, { filter, signal } = {}) {
+ constructor(drive, { filter, signal } = {}) {
super()
this.#drive = drive
this.#folders = filterToFolders(filter)
@@ -126,7 +126,7 @@ export class DriveLiveDownload extends TypedEmitter {
/**
* @returns {BlobDownloadState | BlobDownloadStateError}
*/
- get state () {
+ get state() {
if (this.#error)
return {
haveCount: this.#haveCount,
@@ -134,7 +134,7 @@ export class DriveLiveDownload extends TypedEmitter {
wantCount: this.#downloads.size,
wantBytes: this.#wantBytes,
error: this.#error,
- status: 'error'
+ status: 'error',
}
return {
haveCount: this.#haveCount,
@@ -148,11 +148,11 @@ export class DriveLiveDownload extends TypedEmitter {
? 'checking'
: this.#downloads.size > 0
? 'downloading'
- : 'downloaded'
+ : 'downloaded',
}
}
- async #start () {
+ async #start() {
const blobsCore = await this.#getBlobsCore()
/* c8 ignore next */
if (this.#signal?.aborted || !blobsCore) return // Can't get here in tests
@@ -164,7 +164,7 @@ export class DriveLiveDownload extends TypedEmitter {
const entryStream = this.#drive.list(folder, { recursive: true })
if (this.#signal) {
this.#signal.addEventListener('abort', () => entryStream.destroy(), {
- once: true
+ once: true,
})
}
for await (const entry of entryStream) {
@@ -186,10 +186,14 @@ export class DriveLiveDownload extends TypedEmitter {
// edits of blobs, so this should not be an issue. `keyEncoding` is
// necessary because hyperdrive stores file index data under the `files`
// sub-encoding key
- const historyStream = bee.createHistoryStream({ live: true, gt: seq, keyEncoding })
+ const historyStream = bee.createHistoryStream({
+ live: true,
+ gt: seq,
+ keyEncoding,
+ })
if (this.#signal) {
this.#signal.addEventListener('abort', () => historyStream.destroy(), {
- once: true
+ once: true,
})
}
for await (const entry of historyStream) {
@@ -221,7 +225,7 @@ export class DriveLiveDownload extends TypedEmitter {
*
* @returns {Promise}
*/
- async #getBlobsCore () {
+ async #getBlobsCore() {
if (this.#drive.blobs) return this.#drive.blobs.core
await this.#drive.ready()
await this.#drive.core.update({ wait: true })
@@ -243,7 +247,7 @@ export class DriveLiveDownload extends TypedEmitter {
}
/** @param {Error} e */
- #handleError (e) {
+ #handleError(e) {
this.#error = e
this.emit('state', this.state)
}
@@ -254,7 +258,7 @@ export class DriveLiveDownload extends TypedEmitter {
* @param {import('hypercore').default} core
* @param {{ blockOffset: number, blockLength: number, byteLength: number }} blob
*/
- async #processEntry (
+ async #processEntry(
core,
{ blockOffset: start, blockLength: length, byteLength }
) {
@@ -289,7 +293,7 @@ export class DriveLiveDownload extends TypedEmitter {
* @param {{ signal?: AbortSignal }} options
* @returns
*/
-export function combineStates (liveDownloads, { signal } = {}) {
+export function combineStates(liveDownloads, { signal } = {}) {
/** @type {BlobDownloadState | BlobDownloadStateError} */
let combinedState = {
haveCount: 0,
@@ -297,7 +301,7 @@ export function combineStates (liveDownloads, { signal } = {}) {
wantCount: 0,
wantBytes: 0,
error: null,
- status: 'downloaded'
+ status: 'downloaded',
}
for (const { state } of liveDownloads) {
combinedState.haveCount += state.haveCount
@@ -329,15 +333,15 @@ export function combineStates (liveDownloads, { signal } = {}) {
/**
* Convert a filter to an array of folders that need to be downloaded
*
- * @param {import('../types').BlobFilter} [filter]
+ * @param {import('../types.js').BlobFilter} [filter]
* @returns {string[]} array of folders that match the filter
*/
-function filterToFolders (filter) {
+function filterToFolders(filter) {
if (!filter) return ['/']
const folders = []
for (const [
type,
- variants
+ variants,
] of /** @type {import('type-fest').Entries} */ (
Object.entries(filter)
)) {
@@ -356,14 +360,14 @@ function filterToFolders (filter) {
* @param {string[]} folders
* @returns {boolean}
*/
-function matchesFolder (path, folders) {
+function matchesFolder(path, folders) {
for (const folder of folders) {
if (path.startsWith(folder)) return true
}
return false
}
-/** @param {Pick} opts */
-function makePath ({ type, variant }) {
+/** @param {Pick} opts */
+function makePath({ type, variant }) {
return `/${type}/${variant}`
}
diff --git a/lib/core-manager/core-index.js b/src/core-manager/core-index.js
similarity index 93%
rename from lib/core-manager/core-index.js
rename to src/core-manager/core-index.js
index cb5fc905..314e25f7 100644
--- a/lib/core-manager/core-index.js
+++ b/src/core-manager/core-index.js
@@ -3,7 +3,6 @@ import crypto from 'hypercore-crypto'
/** @typedef {import('./index.js').Namespace} Namespace */
/** @typedef {import('./index.js').CoreRecord} CoreRecord */
-
/**
* An in-memory index of open cores.
*/
@@ -27,7 +26,7 @@ export class CoreIndex {
* @param {Namespace} options.namespace
* @param {boolean} [options.writer] Is this a writer core?
*/
- add ({ core, key, namespace, writer = false }) {
+ add({ core, key, namespace, writer = false }) {
const discoveryKey = crypto.discoveryKey(key)
const discoveryId = discoveryKey.toString('hex')
const record = { core, key, namespace }
@@ -43,7 +42,7 @@ export class CoreIndex {
* @param {Namespace} namespace
* @returns {CoreRecord[]}
*/
- getByNamespace (namespace) {
+ getByNamespace(namespace) {
const records = []
for (const record of this.#coresByDiscoveryId.values()) {
if (record.namespace === namespace) records.push(record)
@@ -57,7 +56,7 @@ export class CoreIndex {
* @param {Namespace} namespace
* @returns {CoreRecord}
*/
- getWriter (namespace) {
+ getWriter(namespace) {
const writerRecord = this.#writersByNamespace.get(namespace)
// Shouldn't happen, since we add all the writers in the contructor
if (!writerRecord)
@@ -71,7 +70,7 @@ export class CoreIndex {
* @param {string} discoveryId
* @returns {CoreRecord | undefined}
*/
- getByDiscoveryId (discoveryId) {
+ getByDiscoveryId(discoveryId) {
return this.#coresByDiscoveryId.get(discoveryId)
}
@@ -81,7 +80,7 @@ export class CoreIndex {
* @param {Buffer} coreKey
* @returns {CoreRecord | undefined}
*/
- getByCoreKey (coreKey) {
+ getByCoreKey(coreKey) {
const discoveryId = crypto.discoveryKey(coreKey).toString('hex')
return this.#coresByDiscoveryId.get(discoveryId)
}
diff --git a/lib/core-manager/index.js b/src/core-manager/index.js
similarity index 94%
rename from lib/core-manager/index.js
rename to src/core-manager/index.js
index e30f15f1..ae165108 100644
--- a/lib/core-manager/index.js
+++ b/src/core-manager/index.js
@@ -14,7 +14,7 @@ export const NAMESPACES = /** @type {const} */ ([
'auth',
'data',
'blobIndex',
- 'blob'
+ 'blob',
])
// WARNING: If changed once in production then we need a migration strategy
const TABLE = 'cores'
@@ -50,7 +50,7 @@ export class CoreManager extends TypedEmitter {
/** @type {'opened' | 'closing' | 'closed'} */
#state = 'opened'
- static get namespaces () {
+ static get namespaces() {
return NAMESPACES
}
@@ -63,13 +63,13 @@ export class CoreManager extends TypedEmitter {
* @param {Partial>} [options.encryptionKeys] Encryption keys for each namespace
* @param {import('hypercore').HypercoreStorage} options.storage Folder to store all hypercore data
*/
- constructor ({
+ constructor({
sqlite,
keyManager,
projectKey,
projectSecretKey,
encryptionKeys = {},
- storage
+ storage,
}) {
super()
assert(
@@ -131,11 +131,11 @@ export class CoreManager extends TypedEmitter {
this.#extension = this.#creatorCore.registerExtension('mapeo/project', {
onmessage: (data, peer) => {
this.#handleExtensionMessage(data, peer)
- }
+ },
})
}
- get creatorCore () {
+ get creatorCore() {
return this.#creatorCore
}
@@ -144,7 +144,7 @@ export class CoreManager extends TypedEmitter {
*
* @param {Namespace} namespace
*/
- getWriterCore (namespace) {
+ getWriterCore(namespace) {
return this.#coreIndex.getWriter(namespace)
}
@@ -154,7 +154,7 @@ export class CoreManager extends TypedEmitter {
* @param {Namespace} namespace
* @returns
*/
- getCores (namespace) {
+ getCores(namespace) {
return this.#coreIndex.getByNamespace(namespace)
}
@@ -164,7 +164,7 @@ export class CoreManager extends TypedEmitter {
* @param {Buffer} key
* @returns {Core | undefined}
*/
- getCoreByKey (key) {
+ getCoreByKey(key) {
const coreRecord = this.#coreIndex.getByCoreKey(key)
return coreRecord && coreRecord.core
}
@@ -173,7 +173,7 @@ export class CoreManager extends TypedEmitter {
* Close all open cores and end any replication streams
* TODO: gracefully close replication streams
*/
- async close () {
+ async close() {
this.#state = 'closing'
const promises = []
for (const { core } of this.#coreIndex) {
@@ -194,7 +194,7 @@ export class CoreManager extends TypedEmitter {
* @param {Namespace} namespace
* @returns {import('./core-index.js').CoreRecord}
*/
- addCore (key, namespace) {
+ addCore(key, namespace) {
return this.#addCore({ publicKey: key }, namespace, true)
}
@@ -206,7 +206,7 @@ export class CoreManager extends TypedEmitter {
* @param {boolean} [persist=false]
* @returns {import('./core-index.js').CoreRecord}
*/
- #addCore (keyPair, namespace, persist = false) {
+ #addCore(keyPair, namespace, persist = false) {
// No-op if core is already managed
const existingCore = this.#coreIndex.getByCoreKey(keyPair.publicKey)
if (existingCore) return existingCore
@@ -215,7 +215,7 @@ export class CoreManager extends TypedEmitter {
const writer = !!secretKey
const core = this.#corestore.get({
keyPair,
- encryptionKey: this.#encryptionKeys[namespace]
+ encryptionKey: this.#encryptionKeys[namespace],
})
// @ts-ignore - ensure key is defined before hypercore is ready
core.key = key
@@ -224,7 +224,7 @@ export class CoreManager extends TypedEmitter {
// **Hack** As soon as a peer is added, eagerly send a "want" for the entire
// core. This ensures that the peer sends back its entire bitfield.
// Otherwise this would only happen once we call core.download()
- core.on('peer-add', peer => {
+ core.on('peer-add', (peer) => {
if (core.length === 0) return
// **Warning** uses internal method, but should be covered by tests
peer._maybeWant(0, core.length)
@@ -251,7 +251,7 @@ export class CoreManager extends TypedEmitter {
if (rsm.state.enabledNamespaces.has(namespace)) {
core.replicate(stream)
} else {
- rsm.on('enable-namespace', function onNamespace (enabledNamespace) {
+ rsm.on('enable-namespace', function onNamespace(enabledNamespace) {
if (enabledNamespace !== namespace) return
if (!cores.has(core)) {
core.replicate(stream)
@@ -286,7 +286,7 @@ export class CoreManager extends TypedEmitter {
*
* @param {NoiseStream | ProtocolStream} noiseStream framed noise secret stream, i.e. @hyperswarm/secret-stream
*/
- replicate (noiseStream) {
+ replicate(noiseStream) {
if (this.#state !== 'opened') throw new Error('Core manager is closed')
if (/** @type {ProtocolStream} */ (noiseStream).noiseStream?.userData) {
console.warn(
@@ -305,7 +305,7 @@ export class CoreManager extends TypedEmitter {
// replicated to a stream if we want sharing of unknown auth cores to work.
protocol.pair(
{ protocol: 'hypercore/alpha' },
- /** @param {Buffer} discoveryKey */ discoveryKey => {
+ /** @param {Buffer} discoveryKey */ (discoveryKey) => {
this.#handleDiscoveryKey(discoveryKey, stream)
}
)
@@ -322,7 +322,7 @@ export class CoreManager extends TypedEmitter {
const replicationRecord = { stream, rsm, cores: replicatingCores }
this.#replications.add(replicationRecord)
- rsm.on('enable-namespace', namespace => {
+ rsm.on('enable-namespace', (namespace) => {
for (const { core } of this.getCores(namespace)) {
if (!replicatingCores.has(core)) {
core.replicate(stream)
@@ -343,7 +343,7 @@ export class CoreManager extends TypedEmitter {
* @param {Buffer} discoveryKey
* @param {any} stream
*/
- async #handleDiscoveryKey (discoveryKey, stream) {
+ async #handleDiscoveryKey(discoveryKey, stream) {
const discoveryId = discoveryKey.toString('hex')
const peer = await this.#findPeer(stream.remotePublicKey)
if (!peer) {
@@ -355,7 +355,7 @@ export class CoreManager extends TypedEmitter {
if (this.#coreIndex.getByDiscoveryId(discoveryId)) return
const message = ProjectExtension.encode({
wantCoreKeys: [discoveryKey],
- authCoreKeys: []
+ authCoreKeys: [],
}).finish()
this.#extension.send(message, peer)
}
@@ -363,9 +363,9 @@ export class CoreManager extends TypedEmitter {
/**
* @param {Buffer} publicKey
*/
- async #findPeer (publicKey) {
+ async #findPeer(publicKey) {
await this.#creatorCore.ready()
- return this.#creatorCore.peers.find(peer =>
+ return this.#creatorCore.peers.find((peer) =>
peer.remotePublicKey.equals(publicKey)
)
}
@@ -374,7 +374,7 @@ export class CoreManager extends TypedEmitter {
* @param {Buffer} data
* @param {any} peer
*/
- #handleExtensionMessage (data, peer) {
+ #handleExtensionMessage(data, peer) {
const { wantCoreKeys, authCoreKeys } = ProjectExtension.decode(data)
for (const discoveryKey of wantCoreKeys) {
const discoveryId = discoveryKey.toString('hex')
@@ -383,7 +383,7 @@ export class CoreManager extends TypedEmitter {
if (coreRecord.namespace === 'auth') {
const message = ProjectExtension.encode({
authCoreKeys: [coreRecord.key],
- wantCoreKeys: []
+ wantCoreKeys: [],
}).finish()
this.#extension.send(message, peer)
}
diff --git a/src/core-manager/messages.d.ts b/src/core-manager/messages.d.ts
new file mode 100644
index 00000000..31f38f70
--- /dev/null
+++ b/src/core-manager/messages.d.ts
@@ -0,0 +1,10 @@
+///
+import _m0 from 'protobufjs/minimal.js'
+export interface ProjectExtension {
+ authCoreKeys: Buffer[]
+ wantCoreKeys: Buffer[]
+}
+export declare const ProjectExtension: {
+ encode(message: ProjectExtension, writer?: _m0.Writer): _m0.Writer
+ decode(input: _m0.Reader | Uint8Array, length?: number): ProjectExtension
+}
diff --git a/src/core-manager/messages.js b/src/core-manager/messages.js
new file mode 100644
index 00000000..ddea65a5
--- /dev/null
+++ b/src/core-manager/messages.js
@@ -0,0 +1,41 @@
+/* eslint-disable */
+import _m0 from 'protobufjs/minimal.js'
+function createBaseProjectExtension() {
+ return { authCoreKeys: [], wantCoreKeys: [] }
+}
+export var ProjectExtension = {
+ encode: function (message, writer) {
+ if (writer === void 0) {
+ writer = _m0.Writer.create()
+ }
+ for (var _i = 0, _a = message.authCoreKeys; _i < _a.length; _i++) {
+ var v = _a[_i]
+ writer.uint32(10).bytes(v)
+ }
+ for (var _b = 0, _c = message.wantCoreKeys; _b < _c.length; _b++) {
+ var v = _c[_b]
+ writer.uint32(18).bytes(v)
+ }
+ return writer
+ },
+ decode: function (input, length) {
+ var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input)
+ var end = length === undefined ? reader.len : reader.pos + length
+ var message = createBaseProjectExtension()
+ while (reader.pos < end) {
+ var tag = reader.uint32()
+ switch (tag >>> 3) {
+ case 1:
+ message.authCoreKeys.push(reader.bytes())
+ break
+ case 2:
+ message.wantCoreKeys.push(reader.bytes())
+ break
+ default:
+ reader.skipType(tag & 7)
+ break
+ }
+ }
+ return message
+ },
+}
diff --git a/lib/core-manager/random-access-file-pool.js b/src/core-manager/random-access-file-pool.js
similarity index 92%
rename from lib/core-manager/random-access-file-pool.js
rename to src/core-manager/random-access-file-pool.js
index 512b3940..3e68bc89 100644
--- a/lib/core-manager/random-access-file-pool.js
+++ b/src/core-manager/random-access-file-pool.js
@@ -5,14 +5,14 @@
*/
export class RandomAccessFilePool {
/** @param {number} maxSize max number of file descriptors to use */
- constructor (maxSize) {
+ constructor(maxSize) {
this.maxSize = maxSize
/** @type {Set} */
this.active = new Set()
}
/** @param {import('random-access-file')} file */
- _onactive (file) {
+ _onactive(file) {
if (this.active.size >= this.maxSize) {
// suspend least recently inserted this manually iterates in insertion
// order, but only iterates to the first one (least recently inserted)
@@ -24,7 +24,7 @@ export class RandomAccessFilePool {
}
/** @param {import('random-access-file')} file */
- _oninactive (file) {
+ _oninactive(file) {
this.active.delete(file)
}
}
diff --git a/lib/core-manager/replication-state-machine.js b/src/core-manager/replication-state-machine.js
similarity index 94%
rename from lib/core-manager/replication-state-machine.js
rename to src/core-manager/replication-state-machine.js
index 890f4a40..534de4aa 100644
--- a/lib/core-manager/replication-state-machine.js
+++ b/src/core-manager/replication-state-machine.js
@@ -18,10 +18,10 @@ import { TypedEmitter } from 'tiny-typed-emitter'
export class ReplicationStateMachine extends TypedEmitter {
/** @type {ReplicationState} */
#state = {
- enabledNamespaces: new Set(['auth'])
+ enabledNamespaces: new Set(['auth']),
}
- get state () {
+ get state() {
return this.#state
}
@@ -30,7 +30,7 @@ export class ReplicationStateMachine extends TypedEmitter {
* to the replication stream
*
* @param {Namespace} namespace */
- enableNamespace (namespace) {
+ enableNamespace(namespace) {
if (this.#state.enabledNamespaces.has(namespace)) return
this.#state.enabledNamespaces.add(namespace)
this.emit('enable-namespace', namespace)
@@ -51,7 +51,7 @@ export class ReplicationStateMachine extends TypedEmitter {
* Should only be called when the stream is closed, because no obvious way to
* implement this otherwise.
*/
- disableAll () {
+ disableAll() {
if (!this.#state.enabledNamespaces.size) return
this.#state.enabledNamespaces.clear()
this.emit('state', this.#state)
diff --git a/src/datastore/README.md b/src/datastore/README.md
new file mode 100644
index 00000000..02236db3
--- /dev/null
+++ b/src/datastore/README.md
@@ -0,0 +1,44 @@
+# DataStore
+
+> Manage reading cores for indexing, and reading and writing documents to cores.
+
+## Purpose
+
+The `DataStore` class is an API over a CoreManager namespace, responsible for reading blocks for indexing from all cores in a namespace; writing new documents to the namespace writer core, and reading existing documents from any core in the namespace based on the `versionId`. `DataStore` does not write documents to an index, it only reads them for indexing - it will call the `IndexWriter.batch()` function with entries that are read from cores in the namespace that the datastore manages. Writes will only resolve once `IndexWriter.batch()` resolves (e.g. once a document has been written to the SQLite index tables).
+
+## Usage
+
+The `DataStore` class is used internally by the [`DataType`](../datatype/) class.
+
+An example of `DataStore` usage taken from the [datastore tests](../../tests/datastore.js):
+
+```js
+const datastore = new DataStore({
+ coreManager,
+ indexWriter,
+ namespace: 'data',
+})
+
+/** @type {MapeoDoc} */
+const newObservation = await datastore.write(observationValue)
+/** @type {MapeoDoc} */
+const existingObservation = await datastore.read(versionId)
+
+datastore.on('index-state', ({ current, remaining, entriesPerSecond }) => {
+ if (current === 'idle') {
+ // indexing done for now
+ } else if (current === 'indexing') {
+ // show state to user that indexing is happening
+ }
+})
+
+const { current, remaining, entriesPerSecond } = datastore.getIndexState()
+```
+
+## API docs
+
+TODO!
+
+## Tests
+
+Tests for this module are in [tests/datastore.js](../../tests/datastore.js)
diff --git a/lib/datastore/data-store-new.js b/src/datastore/index.js
similarity index 96%
rename from lib/datastore/data-store-new.js
rename to src/datastore/index.js
index 5e82485e..ea8792e1 100644
--- a/lib/datastore/data-store-new.js
+++ b/src/datastore/index.js
@@ -10,7 +10,7 @@ import pDefer from 'p-defer'
* @typedef {import('@mapeo/schema').MapeoDoc} MapeoDoc
*/
/**
- * @typedef {import('../datatype/data-type-new.js').MapeoDocTablesMap} MapeoDocTablesMap
+ * @typedef {import('../datatype/index.js').MapeoDocTablesMap} MapeoDocTablesMap
*/
/**
* @typedef {object} DefaultEmitterEvents
@@ -50,7 +50,7 @@ export class DataStore extends TypedEmitter {
* @param {object} opts
* @param {import('../core-manager/index.js').CoreManager} opts.coreManager
* @param {TNamespace} opts.namespace
- * @param {import('../index-writer.js').IndexWriter} opts.indexWriter
+ * @param {import('../index-writer/index.js').IndexWriter} opts.indexWriter
* @param {MultiCoreIndexer.StorageParam} opts.storage
*/
constructor({ coreManager, namespace, indexWriter, storage }) {
diff --git a/src/datatype/README.md b/src/datatype/README.md
new file mode 100644
index 00000000..1eb63458
--- /dev/null
+++ b/src/datatype/README.md
@@ -0,0 +1,33 @@
+# DataType
+
+> Create, read, update, delete, and query data.
+
+## Purpose
+
+The `DataType` class implements CRUD methods for a particular Mapeo data type. Reads and queries are done from the SQLite indexes / materialized views. Historical data read from a `versionId` is read from the associated `DataStore`.
+
+## Usage
+
+`DataType` is exposed directly on the client API for data types that can be read/written directly by the client. It serves as an abstraction for reading indexed data and creating/updating documents for all data types stored in Mapeo core namespaces.
+
+A usage example:
+
+```js
+const dataType = new DataType({
+ table: observationTable, // Drizzle table schema definition
+ db, // Drizzle instance
+ dataStore, // DataStore instance
+})
+
+const observation = await observation.getByDocId(id)
+const updated = await observation.update(observation.versionId, newValue)
+const allObservations = await observation.getMany()
+```
+
+## API docs
+
+TODO!
+
+## Tests
+
+Tests for this module are in [tests/datatype.js](../../tests/datatype.js)
diff --git a/lib/datatype/data-type-new.js b/src/datatype/index.js
similarity index 98%
rename from lib/datatype/data-type-new.js
rename to src/datatype/index.js
index 7b9c0bf1..7011a5f6 100644
--- a/lib/datatype/data-type-new.js
+++ b/src/datatype/index.js
@@ -44,7 +44,7 @@ function generateDate() {
}
/**
- * @template {import('../datastore/data-store-new.js').DataStore} TDataStore
+ * @template {import('../datastore/index.js').DataStore} TDataStore
* @template {TDataStore['schemas'][number]} TSchemaName
* @template {MapeoDocTablesMap[TSchemaName]} TTable
* @template {MapeoDocMap[TSchemaName]} TDoc
diff --git a/lib/discovery/README.md b/src/discovery/README.md
similarity index 100%
rename from lib/discovery/README.md
rename to src/discovery/README.md
diff --git a/lib/discovery/index.js b/src/discovery/index.js
similarity index 98%
rename from lib/discovery/index.js
rename to src/discovery/index.js
index 637b5a0c..f16072ee 100644
--- a/lib/discovery/index.js
+++ b/src/discovery/index.js
@@ -242,9 +242,9 @@ export class Discovery extends TypedEmitter {
return
}
-// const socketAddress = /** @type {import('net').AddressInfo} */ (
-// socket.address()
-// )
+ // const socketAddress = /** @type {import('net').AddressInfo} */ (
+ // socket.address()
+ // )
if (!socket.remoteAddress) {
console.error('Socket not connected')
@@ -262,7 +262,7 @@ export class Discovery extends TypedEmitter {
})
connection.on('connect', async () => {
- if(!connection.remotePublicKey || !connection.publicKey) return
+ if (!connection.remotePublicKey || !connection.publicKey) return
const remotePublicKey = encodeHex(connection.remotePublicKey)
if (remotePublicKey === this.identityPublicKey) {
@@ -270,18 +270,22 @@ export class Discovery extends TypedEmitter {
return
}
- const keepNew = !connection.isInitiator || b4a.compare(connection.publicKey, connection.remotePublicKey) > 0
+ const keepNew =
+ !connection.isInitiator ||
+ b4a.compare(connection.publicKey, connection.remotePublicKey) > 0
let existing = this.#peers.get(remotePublicKey)
if (existing && keepNew) {
// to handle close event while still negotiating connection
let closed = false
- let onClose = () => { closed = true }
+ let onClose = () => {
+ closed = true
+ }
connection.on('close', onClose)
connection.on('error', noop)
- try{
- await destroyConnection(existing.connection)
- }catch(e){
+ try {
+ await destroyConnection(existing.connection)
+ } catch (e) {
console.error('error destroying connection', e)
}
if (closed) return
@@ -1484,15 +1488,15 @@ export function decodeHex(str) {
return Buffer.from(str, 'hex')
}
-function noop(){}
+function noop() {}
/**
* @param {NoiseSecretStream} socket
* @returns {Promise}
*/
-async function destroyConnection (socket) {
+async function destroyConnection(socket) {
socket.on('error', noop)
- return new Promise(res => {
+ return new Promise((res) => {
socket.on('close', res)
})
}
diff --git a/src/index-writer/README.md b/src/index-writer/README.md
new file mode 100644
index 00000000..24c500a4
--- /dev/null
+++ b/src/index-writer/README.md
@@ -0,0 +1,38 @@
+# IndexWriter
+
+> Index documents by `docId` in a SQLite database
+
+## Purpose
+
+The `IndexWriter` class resolves the DAG (Directed Acyclic Graph) for each `docId` to create a materialized view in SQLite of the current "head" for each `docId`. A single `IndexWriter` is responsible for decoding hypercore entries and writing them to the appropriate table (one table for each Mapeo data type). Unknown entries / data types are ignored.
+
+## Usage
+
+`IndexWriter` is used by `DataStore` instances for writing entries that are read from the cores in the namespace managed by each `DataStore`. It exports a single method `batch(entries)` where an entry is:
+
+```ts
+type Entry = {
+ block: Buffer // raw data entry read from a hypercore
+ key: Buffer // public key of the hypercore where the block was read from
+ index: number // index of the block in the hypercore
+}
+```
+
+A usage example:
+
+```js
+const indexWriter = new IndexWriter({
+ tables, // Array of Drizzle table schema definitions to index
+ sqlite, // better-sqlite3 Database instance
+})
+
+indexWriter.batch(entries)
+```
+
+## API docs
+
+TODO!
+
+## Tests
+
+There are no unit tests for the IndexWriter, tests coverage will be from end-to-end / integration tests.
diff --git a/lib/index-writer.js b/src/index-writer/index.js
similarity index 93%
rename from lib/index-writer.js
rename to src/index-writer/index.js
index c550e6b4..893fbf1a 100644
--- a/lib/index-writer.js
+++ b/src/index-writer/index.js
@@ -1,10 +1,10 @@
import { decode } from '@mapeo/schema'
import SqliteIndexer from '@mapeo/sqlite-indexer'
import { getTableConfig } from 'drizzle-orm/sqlite-core'
-import { getBacklinkTableName } from './schema/utils.js'
+import { getBacklinkTableName } from '../schema/utils.js'
/**
- * @typedef {import('./datatype/data-type-new.js').MapeoDocTables} MapeoDocTables
+ * @typedef {import('../datatype/index.js').MapeoDocTables} MapeoDocTables
*/
/**
* @typedef {import('@mapeo/schema').MapeoDoc} MapeoDoc
diff --git a/src/index.js b/src/index.js
new file mode 100644
index 00000000..f06a9614
--- /dev/null
+++ b/src/index.js
@@ -0,0 +1 @@
+export { MapeoProject } from './mapeo-project.js'
diff --git a/lib/mapeo-project.js b/src/mapeo-project.js
similarity index 95%
rename from lib/mapeo-project.js
rename to src/mapeo-project.js
index e79854fc..e344c4a7 100644
--- a/lib/mapeo-project.js
+++ b/src/mapeo-project.js
@@ -3,9 +3,9 @@ import { drizzle } from 'drizzle-orm/better-sqlite3'
import { migrate } from 'drizzle-orm/better-sqlite3/migrator'
import { CoreManager } from './core-manager/index.js'
-import { DataStore } from './datastore/data-store-new.js'
-import { DataType } from './datatype/data-type-new.js'
-import { IndexWriter } from './index-writer.js'
+import { DataStore } from './datastore/index.js'
+import { DataType } from './datatype/index.js'
+import { IndexWriter } from './index-writer/index.js'
import { observationTable } from './schema/project.js'
import RandomAccessFile from 'random-access-file'
import RAM from 'random-access-memory'
diff --git a/lib/rpc/index.js b/src/rpc/index.js
similarity index 92%
rename from lib/rpc/index.js
rename to src/rpc/index.js
index 28868df6..4360c4d1 100644
--- a/lib/rpc/index.js
+++ b/src/rpc/index.js
@@ -15,7 +15,7 @@ const PROTOCOL_NAME = 'mapeo/rpc'
// when we switch to Typescript v5
const MESSAGE_TYPES = /** @type {const} */ ({
Invite: 0,
- InviteResponse: 1
+ InviteResponse: 1,
})
const MESSAGES_MAX_ID = Math.max.apply(null, [...Object.values(MESSAGE_TYPES)])
@@ -43,14 +43,14 @@ class Peer {
* @param {Buffer} options.publicKey
* @param {ReturnType} options.channel
*/
- constructor ({ publicKey, channel }) {
+ constructor({ publicKey, channel }) {
this.#publicKey = publicKey
this.#channel = channel
}
- get info () {
+ get info() {
return {
status: this.#state,
- id: keyToId(this.#publicKey)
+ id: keyToId(this.#publicKey),
}
}
/**
@@ -59,7 +59,7 @@ class Peer {
*
* @param {'connect' | 'disconnect'} type
*/
- action (type) {
+ action(type) {
switch (type) {
case 'connect':
/* c8 ignore next 3 */
@@ -82,20 +82,20 @@ class Peer {
}
}
/** @param {Invite} invite */
- sendInvite (invite) {
+ sendInvite(invite) {
this.#assertConnected()
const buf = Buffer.from(Invite.encode(invite).finish())
const messageType = MESSAGE_TYPES.Invite
this.#channel.messages[messageType].send(buf)
}
/** @param {InviteResponse} response */
- sendInviteResponse (response) {
+ sendInviteResponse(response) {
this.#assertConnected()
const buf = Buffer.from(InviteResponse.encode(response).finish())
const messageType = MESSAGE_TYPES.InviteResponse
this.#channel.messages[messageType].send(buf)
}
- #assertConnected () {
+ #assertConnected() {
if (this.#state === 'connected' && !this.#channel.closed) return
/* c8 ignore next */
throw new PeerDisconnectedError() // TODO: report error - this should not happen
@@ -113,7 +113,7 @@ export class MapeoRPC extends TypedEmitter {
/** @type {Map} */
#peers = new Map()
- constructor () {
+ constructor() {
super()
}
@@ -131,7 +131,7 @@ export class MapeoRPC extends TypedEmitter {
* @param {number} [options.timeout] timeout waiting for invite response before rejecting (default 1 minute)
* @returns {Promise}
*/
- async invite (peerId, { timeout, ...invite }) {
+ async invite(peerId, { timeout, ...invite }) {
const peer = this.#peers.get(peerId)
if (!peer) throw new UnknownPeerError('Unknown peer ' + peerId)
/** @type {Promise} */
@@ -157,12 +157,12 @@ export class MapeoRPC extends TypedEmitter {
peer.sendInvite(invite)
/** @type {typeof origResolve} */
- function resolve (value) {
+ function resolve(value) {
clearTimeout(timeoutId)
origResolve(value)
}
/** @type {typeof origReject} */
- function reject (reason) {
+ function reject(reason) {
clearTimeout(timeoutId)
origReject(reason)
}
@@ -177,7 +177,7 @@ export class MapeoRPC extends TypedEmitter {
* @param {InviteResponse['projectKey']} options.projectKey project key of the invite you are responding to
* @param {InviteResponse['decision']} options.decision response to invite, one of "ACCEPT", "REJECT", or "ALREADY" (already on project)
*/
- inviteResponse (peerId, options) {
+ inviteResponse(peerId, options) {
const peer = this.#peers.get(peerId)
if (!peer) throw new UnknownPeerError('Unknown peer ' + peerId)
peer.sendInviteResponse(options)
@@ -188,7 +188,7 @@ export class MapeoRPC extends TypedEmitter {
*
* @param {NoiseStream | ProtocolStream} stream a NoiseSecretStream from @hyperswarm/secret-stream
*/
- connect (stream) {
+ connect(stream) {
if (!stream.noiseStream) throw new Error('Invalid stream')
const protomux =
stream.userData && Protomux.isProtomux(stream.userData)
@@ -197,7 +197,7 @@ export class MapeoRPC extends TypedEmitter {
// noiseSecretStream.remotePublicKey can be null before the stream has
// opened, so this helped awaits the open
- openedNoiseSecretStream(stream).then(stream => {
+ openedNoiseSecretStream(stream).then((stream) => {
if (stream.destroyed) return
const { remotePublicKey } = stream
@@ -209,7 +209,7 @@ export class MapeoRPC extends TypedEmitter {
for (const [type, id] of Object.entries(MESSAGE_TYPES)) {
messages[id] = {
encoding: cenc.raw,
- onmessage: this.#handleMessage.bind(this, remotePublicKey, type)
+ onmessage: this.#handleMessage.bind(this, remotePublicKey, type),
}
}
@@ -218,7 +218,7 @@ export class MapeoRPC extends TypedEmitter {
protocol: PROTOCOL_NAME,
messages,
onopen: this.#openPeer.bind(this, remotePublicKey),
- onclose: this.#closePeer.bind(this, remotePublicKey)
+ onclose: this.#closePeer.bind(this, remotePublicKey),
})
channel.open()
@@ -237,7 +237,7 @@ export class MapeoRPC extends TypedEmitter {
}
/** @param {Buffer} publicKey */
- #openPeer (publicKey) {
+ #openPeer(publicKey) {
const peerId = keyToId(publicKey)
const peer = this.#peers.get(peerId)
/* c8 ignore next */
@@ -250,7 +250,7 @@ export class MapeoRPC extends TypedEmitter {
}
/** @param {Buffer} publicKey */
- #closePeer (publicKey) {
+ #closePeer(publicKey) {
const peerId = publicKey.toString('hex')
const peer = this.#peers.get(peerId)
/* c8 ignore next */
@@ -263,17 +263,17 @@ export class MapeoRPC extends TypedEmitter {
this.#emitPeers()
}
- get peers () {
+ get peers() {
return /** @type {PeerInfo[]} */ (
[...this.#peers.values()]
- .map(peer => peer.info)
+ .map((peer) => peer.info)
// A peer is only 'connecting' for a single tick, so to avoid complex
// async code around sending messages we don't expose 'connecting' peers
- .filter(peerInfo => peerInfo.status !== 'connecting')
+ .filter((peerInfo) => peerInfo.status !== 'connecting')
)
}
- #emitPeers () {
+ #emitPeers() {
this.emit('peers', this.peers)
}
@@ -283,7 +283,7 @@ export class MapeoRPC extends TypedEmitter {
* @param {keyof typeof MESSAGE_TYPES} type
* @param {Buffer} value
*/
- #handleMessage (peerPublicKey, type, value) {
+ #handleMessage(peerPublicKey, type, value) {
const peerId = keyToId(peerPublicKey)
const peer = this.#peers.get(peerId)
/* c8 ignore next */
@@ -310,14 +310,14 @@ export class MapeoRPC extends TypedEmitter {
}
/* c8 ignore next 2 */
default:
- // TODO: report unhandled message error
+ // TODO: report unhandled message error
}
}
}
export class TimeoutError extends Error {
/** @param {string} [message] */
- constructor (message) {
+ constructor(message) {
super(message)
this.name = 'TimeoutError'
}
@@ -325,7 +325,7 @@ export class TimeoutError extends Error {
export class UnknownPeerError extends Error {
/** @param {string} [message] */
- constructor (message) {
+ constructor(message) {
super(message)
this.name = 'UnknownPeerError'
}
@@ -333,7 +333,7 @@ export class UnknownPeerError extends Error {
export class PeerDisconnectedError extends Error {
/** @param {string} [message] */
- constructor (message) {
+ constructor(message) {
super(message)
this.name = 'PeerDisconnectedError'
}
diff --git a/src/rpc/messages.d.ts b/src/rpc/messages.d.ts
new file mode 100644
index 00000000..3f2e391c
--- /dev/null
+++ b/src/rpc/messages.d.ts
@@ -0,0 +1,41 @@
+///
+import _m0 from 'protobufjs/minimal.js'
+export interface Invite {
+ projectKey: Buffer
+ encryptionKeys?: Invite_EncryptionKeys | undefined
+ projectConfig?: Buffer | undefined
+}
+export interface Invite_EncryptionKeys {
+ auth?: Buffer | undefined
+ data?: Buffer | undefined
+ blobIndex?: Buffer | undefined
+ blob?: Buffer | undefined
+}
+export interface InviteResponse {
+ projectKey: Buffer
+ decision: InviteResponse_Decision
+}
+export declare enum InviteResponse_Decision {
+ REJECT = 'REJECT',
+ ACCEPT = 'ACCEPT',
+ ALREADY = 'ALREADY',
+ UNRECOGNIZED = 'UNRECOGNIZED',
+}
+export declare function inviteResponse_DecisionFromJSON(
+ object: any
+): InviteResponse_Decision
+export declare function inviteResponse_DecisionToNumber(
+ object: InviteResponse_Decision
+): number
+export declare const Invite: {
+ encode(message: Invite, writer?: _m0.Writer): _m0.Writer
+ decode(input: _m0.Reader | Uint8Array, length?: number): Invite
+}
+export declare const Invite_EncryptionKeys: {
+ encode(message: Invite_EncryptionKeys, writer?: _m0.Writer): _m0.Writer
+ decode(input: _m0.Reader | Uint8Array, length?: number): Invite_EncryptionKeys
+}
+export declare const InviteResponse: {
+ encode(message: InviteResponse, writer?: _m0.Writer): _m0.Writer
+ decode(input: _m0.Reader | Uint8Array, length?: number): InviteResponse
+}
diff --git a/src/rpc/messages.js b/src/rpc/messages.js
new file mode 100644
index 00000000..76f2d2af
--- /dev/null
+++ b/src/rpc/messages.js
@@ -0,0 +1,177 @@
+/* eslint-disable */
+import _m0 from 'protobufjs/minimal.js'
+export var InviteResponse_Decision
+;(function (InviteResponse_Decision) {
+ InviteResponse_Decision['REJECT'] = 'REJECT'
+ InviteResponse_Decision['ACCEPT'] = 'ACCEPT'
+ InviteResponse_Decision['ALREADY'] = 'ALREADY'
+ InviteResponse_Decision['UNRECOGNIZED'] = 'UNRECOGNIZED'
+})(InviteResponse_Decision || (InviteResponse_Decision = {}))
+export function inviteResponse_DecisionFromJSON(object) {
+ switch (object) {
+ case 0:
+ case 'REJECT':
+ return InviteResponse_Decision.REJECT
+ case 1:
+ case 'ACCEPT':
+ return InviteResponse_Decision.ACCEPT
+ case 2:
+ case 'ALREADY':
+ return InviteResponse_Decision.ALREADY
+ case -1:
+ case 'UNRECOGNIZED':
+ default:
+ return InviteResponse_Decision.UNRECOGNIZED
+ }
+}
+export function inviteResponse_DecisionToNumber(object) {
+ switch (object) {
+ case InviteResponse_Decision.REJECT:
+ return 0
+ case InviteResponse_Decision.ACCEPT:
+ return 1
+ case InviteResponse_Decision.ALREADY:
+ return 2
+ case InviteResponse_Decision.UNRECOGNIZED:
+ default:
+ return -1
+ }
+}
+function createBaseInvite() {
+ return { projectKey: Buffer.alloc(0) }
+}
+export var Invite = {
+ encode: function (message, writer) {
+ if (writer === void 0) {
+ writer = _m0.Writer.create()
+ }
+ if (message.projectKey.length !== 0) {
+ writer.uint32(10).bytes(message.projectKey)
+ }
+ if (message.encryptionKeys !== undefined) {
+ Invite_EncryptionKeys.encode(
+ message.encryptionKeys,
+ writer.uint32(18).fork()
+ ).ldelim()
+ }
+ if (message.projectConfig !== undefined) {
+ writer.uint32(26).bytes(message.projectConfig)
+ }
+ return writer
+ },
+ decode: function (input, length) {
+ var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input)
+ var end = length === undefined ? reader.len : reader.pos + length
+ var message = createBaseInvite()
+ while (reader.pos < end) {
+ var tag = reader.uint32()
+ switch (tag >>> 3) {
+ case 1:
+ message.projectKey = reader.bytes()
+ break
+ case 2:
+ message.encryptionKeys = Invite_EncryptionKeys.decode(
+ reader,
+ reader.uint32()
+ )
+ break
+ case 3:
+ message.projectConfig = reader.bytes()
+ break
+ default:
+ reader.skipType(tag & 7)
+ break
+ }
+ }
+ return message
+ },
+}
+function createBaseInvite_EncryptionKeys() {
+ return {}
+}
+export var Invite_EncryptionKeys = {
+ encode: function (message, writer) {
+ if (writer === void 0) {
+ writer = _m0.Writer.create()
+ }
+ if (message.auth !== undefined) {
+ writer.uint32(10).bytes(message.auth)
+ }
+ if (message.data !== undefined) {
+ writer.uint32(18).bytes(message.data)
+ }
+ if (message.blobIndex !== undefined) {
+ writer.uint32(26).bytes(message.blobIndex)
+ }
+ if (message.blob !== undefined) {
+ writer.uint32(34).bytes(message.blob)
+ }
+ return writer
+ },
+ decode: function (input, length) {
+ var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input)
+ var end = length === undefined ? reader.len : reader.pos + length
+ var message = createBaseInvite_EncryptionKeys()
+ while (reader.pos < end) {
+ var tag = reader.uint32()
+ switch (tag >>> 3) {
+ case 1:
+ message.auth = reader.bytes()
+ break
+ case 2:
+ message.data = reader.bytes()
+ break
+ case 3:
+ message.blobIndex = reader.bytes()
+ break
+ case 4:
+ message.blob = reader.bytes()
+ break
+ default:
+ reader.skipType(tag & 7)
+ break
+ }
+ }
+ return message
+ },
+}
+function createBaseInviteResponse() {
+ return {
+ projectKey: Buffer.alloc(0),
+ decision: InviteResponse_Decision.REJECT,
+ }
+}
+export var InviteResponse = {
+ encode: function (message, writer) {
+ if (writer === void 0) {
+ writer = _m0.Writer.create()
+ }
+ if (message.projectKey.length !== 0) {
+ writer.uint32(10).bytes(message.projectKey)
+ }
+ if (message.decision !== InviteResponse_Decision.REJECT) {
+ writer.uint32(16).int32(inviteResponse_DecisionToNumber(message.decision))
+ }
+ return writer
+ },
+ decode: function (input, length) {
+ var reader = input instanceof _m0.Reader ? input : new _m0.Reader(input)
+ var end = length === undefined ? reader.len : reader.pos + length
+ var message = createBaseInviteResponse()
+ while (reader.pos < end) {
+ var tag = reader.uint32()
+ switch (tag >>> 3) {
+ case 1:
+ message.projectKey = reader.bytes()
+ break
+ case 2:
+ message.decision = inviteResponse_DecisionFromJSON(reader.int32())
+ break
+ default:
+ reader.skipType(tag & 7)
+ break
+ }
+ }
+ return message
+ },
+}
diff --git a/lib/schema/client.js b/src/schema/client.js
similarity index 100%
rename from lib/schema/client.js
rename to src/schema/client.js
diff --git a/lib/schema/project.js b/src/schema/project.js
similarity index 100%
rename from lib/schema/project.js
rename to src/schema/project.js
diff --git a/lib/schema/schema-to-drizzle.js b/src/schema/schema-to-drizzle.js
similarity index 100%
rename from lib/schema/schema-to-drizzle.js
rename to src/schema/schema-to-drizzle.js
diff --git a/lib/schema/utils.js b/src/schema/utils.js
similarity index 100%
rename from lib/schema/utils.js
rename to src/schema/utils.js
diff --git a/lib/sqlite.js b/src/sqlite.js
similarity index 100%
rename from lib/sqlite.js
rename to src/sqlite.js
diff --git a/lib/sync/replication-state.js b/src/sync/replication-state.js
similarity index 90%
rename from lib/sync/replication-state.js
rename to src/sync/replication-state.js
index b5498f43..945597c5 100644
--- a/lib/sync/replication-state.js
+++ b/src/sync/replication-state.js
@@ -24,10 +24,10 @@ export class ReplicationState extends TypedEmitter {
})
}
- get state () {
+ get state() {
/** @type {PeerState} */
const cores = {}
- for (const [ corePublicId, state ] of this.#coreStates.entries()) {
+ for (const [corePublicId, state] of this.#coreStates.entries()) {
cores[corePublicId] = state.state
}
@@ -36,23 +36,23 @@ export class ReplicationState extends TypedEmitter {
return { cores, synced }
}
- get peers () {
+ get peers() {
const peers = new Set()
for (const state of this.#coreStates.values()) {
- for (const peer of state.peers) {
- peers.add(peer)
- }
+ for (const peer of state.peers) {
+ peers.add(peer)
+ }
}
return [...peers]
}
- isSynced () {
+ isSynced() {
for (const state of this.#coreStates.values()) {
- if (!state.isSynced()) {
- return false
- }
+ if (!state.isSynced()) {
+ return false
+ }
}
return true
@@ -104,7 +104,6 @@ export class ReplicationState extends TypedEmitter {
* @private
*/
-
export class CoreReplicationState extends TypedEmitter {
/** @type {import('hypercore')} */
#core
@@ -201,7 +200,7 @@ export class CoreReplicationState extends TypedEmitter {
get peers() {
return [...this.#state.keys()].filter((peerId) => {
- return peerId !== this.coreId
+ return peerId !== this.coreId
})
}
@@ -251,7 +250,10 @@ export class CoreReplicationState extends TypedEmitter {
}
#getPeerState(peerPublicKey) {
- return Object.assign({}, this.#state.get(keyToId(peerPublicKey)) || createState({ remote: true }))
+ return Object.assign(
+ {},
+ this.#state.get(keyToId(peerPublicKey)) || createState({ remote: true })
+ )
}
#setLocalState(state) {
@@ -284,20 +286,20 @@ export class CoreReplicationState extends TypedEmitter {
let unavailable = 0
for (let i = 0; i < length; i++) {
- const local = localBitfield.get(i)
- const remote = remoteBitfield.get(i)
+ const local = localBitfield.get(i)
+ const remote = remoteBitfield.get(i)
- if (local) {
- haveLocal++
- }
+ if (local) {
+ haveLocal++
+ }
- if (remote) {
- haveRemote++
- }
+ if (remote) {
+ haveRemote++
+ }
- if (!remote && !local) {
- unavailable++
- }
+ if (!remote && !local) {
+ unavailable++
+ }
}
localState.have = haveLocal
@@ -336,7 +338,7 @@ function createState(options = { remote: false }) {
have: 0,
want: 0,
unavailable: 0,
- length: 0
+ length: 0,
}
}
@@ -356,7 +358,7 @@ function diffState(oldState, newState) {
return diff
}
-function getMaxLength (core) {
+function getMaxLength(core) {
const max = core.peers.reduce((max, peer) => {
return Math.max(max, peer.remoteLength)
}, core.length)
diff --git a/lib/types.d.ts b/src/types.d.ts
similarity index 100%
rename from lib/types.d.ts
rename to src/types.d.ts
diff --git a/lib/types.js b/src/types.js
similarity index 100%
rename from lib/types.js
rename to src/types.js
diff --git a/lib/utils.js b/src/utils.js
similarity index 100%
rename from lib/utils.js
rename to src/utils.js
diff --git a/test-e2e/project-crud.js b/test-e2e/project-crud.js
index 411325de..d5048ed9 100644
--- a/test-e2e/project-crud.js
+++ b/test-e2e/project-crud.js
@@ -1,7 +1,7 @@
import { test } from 'brittle'
import { randomBytes } from 'crypto'
import { KeyManager } from '@mapeo/crypto'
-import { MapeoProject } from '../lib/mapeo-project.js'
+import { MapeoProject } from '../src/mapeo-project.js'
/** @type {import('@mapeo/schema').ObservationValue} */
const obsValue = {
diff --git a/tests/authstore.js b/tests/authstore.js
index 8d20b62f..2c53c16c 100644
--- a/tests/authstore.js
+++ b/tests/authstore.js
@@ -2,7 +2,9 @@ import test from 'brittle'
import { createAuthStores } from './helpers/authstore.js'
import { waitForIndexing } from './helpers/index.js'
-test('authstore - core ownership, project creator', async (t) => {
+// Skipping tests until migrated to new DataStore & DataType API
+
+test.skip('authstore - core ownership, project creator', async (t) => {
t.plan(7)
const [peer1, peer2] = await createAuthStores(2)
@@ -49,7 +51,7 @@ test('authstore - core ownership, project creator', async (t) => {
)
})
-test('authstore - device add, remove, restore, set role', async (t) => {
+test.skip('authstore - device add, remove, restore, set role', async (t) => {
t.plan(10)
const [peer1, peer2] = await createAuthStores(2)
diff --git a/tests/blob-server.js b/tests/blob-server.js
index b99bf307..1a1cb33f 100644
--- a/tests/blob-server.js
+++ b/tests/blob-server.js
@@ -4,9 +4,9 @@ import { readdirSync } from 'fs'
import { readFile } from 'fs/promises'
import path from 'path'
import { createCoreManager } from './helpers/core-manager.js'
-import { BlobStore } from '../lib/blob-store/index.js'
-import { createBlobServer } from '../lib/blob-server/index.js'
-import BlobServerPlugin from '../lib/blob-server/fastify-plugin.js'
+import { BlobStore } from '../src/blob-store/index.js'
+import { createBlobServer } from '../src/blob-server/index.js'
+import BlobServerPlugin from '../src/blob-server/fastify-plugin.js'
import fastify from 'fastify'
import { replicateBlobs } from './helpers/blob-store.js'
@@ -238,10 +238,10 @@ const IMAGE_FIXTURES_PATH = new URL('./fixtures/images', import.meta.url)
const IMAGE_FIXTURES = readdirSync(IMAGE_FIXTURES_PATH)
/**
- * @param {import('../lib/blob-store').BlobStore} blobStore
+ * @param {import('../src/blob-store').BlobStore} blobStore
*/
async function populateStore(blobStore) {
- /** @type {{blobId: import('../lib/types').BlobId, image: {data: Buffer, ext: string}}[]} */
+ /** @type {{blobId: import('../src/types').BlobId, image: {data: Buffer, ext: string}}[]} */
const data = []
for (const fixture of IMAGE_FIXTURES) {
diff --git a/tests/blob-store/blob-store.js b/tests/blob-store/blob-store.js
index 9bebe87b..136c75f5 100644
--- a/tests/blob-store/blob-store.js
+++ b/tests/blob-store/blob-store.js
@@ -6,61 +6,70 @@ import { randomBytes } from 'node:crypto'
import fs from 'fs'
import { readFile } from 'fs/promises'
import { createCoreManager } from '../helpers/core-manager.js'
-import { BlobStore } from '../../lib/blob-store/index.js'
+import { BlobStore } from '../../src/blob-store/index.js'
import { setTimeout } from 'node:timers/promises'
import { replicateBlobs, concat } from '../helpers/blob-store.js'
// Test with buffers that are 3 times the default blockSize for hyperblobs
const TEST_BUF_SIZE = 3 * 64 * 1024
-test('blobStore.put(blobId, buf) and blobStore.get(blobId)', async t => {
+test('blobStore.put(blobId, buf) and blobStore.get(blobId)', async (t) => {
const { blobStore } = await testenv()
const diskbuf = await readFile(new URL(import.meta.url))
const blobId = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'test-file'
+ name: 'test-file',
})
const driveId = await blobStore.put(blobId, diskbuf)
const bndlbuf = await blobStore.get({ ...blobId, driveId })
t.alike(bndlbuf, diskbuf, 'should be equal')
})
-test('get(), driveId not found', async t => {
+test('get(), driveId not found', async (t) => {
const { blobStore } = await testenv()
- await t.exception(async () => await blobStore.get({
- type: 'photo',
- variant : 'original',
- name: 'test-file',
- driveId: randomBytes(32).toString('hex')
- }))
+ await t.exception(
+ async () =>
+ await blobStore.get({
+ type: 'photo',
+ variant: 'original',
+ name: 'test-file',
+ driveId: randomBytes(32).toString('hex'),
+ })
+ )
})
-test('get(), valid driveId, missing file', async t => {
+test('get(), valid driveId, missing file', async (t) => {
const { blobStore, coreManager } = await testenv()
const driveId = coreManager.getWriterCore('blobIndex').key.toString('hex')
- await t.exception(async () => await blobStore.get({
- type: 'photo',
- variant : 'original',
- name: 'test-file',
- driveId
- }))
+ await t.exception(
+ async () =>
+ await blobStore.get({
+ type: 'photo',
+ variant: 'original',
+ name: 'test-file',
+ driveId,
+ })
+ )
})
-test('get(), uninitialized drive', async t => {
+test('get(), uninitialized drive', async (t) => {
const { blobStore, coreManager } = await testenv()
const driveKey = randomBytes(32)
const driveId = driveKey.toString('hex')
coreManager.addCore(driveKey, 'blobIndex')
- await t.exception(async () => await blobStore.get({
- type: 'photo',
- variant : 'original',
- name: 'test-file',
- driveId
- }))
+ await t.exception(
+ async () =>
+ await blobStore.get({
+ type: 'photo',
+ variant: 'original',
+ name: 'test-file',
+ driveId,
+ })
+ )
})
-test('get(), initialized but unreplicated drive', async t => {
+test('get(), initialized but unreplicated drive', async (t) => {
const projectKey = randomBytes(32)
const { blobStore: bs1, coreManager: cm1 } = await testenv({ projectKey })
const { blobStore: bs2, coreManager: cm2 } = await testenv({ projectKey })
@@ -69,7 +78,7 @@ test('get(), initialized but unreplicated drive', async t => {
const blob1Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob1'
+ name: 'blob1',
})
const driveId = await bs1.put(blob1Id, blob1)
@@ -80,13 +89,16 @@ test('get(), initialized but unreplicated drive', async t => {
await destroy()
t.is(replicatedCore.contiguousLength, 0, 'data is not downloaded')
t.ok(replicatedCore.length > 0, 'proof of length has updated')
- await t.exception(async () => await bs2.get({
- ...blob1Id,
- driveId
- }))
+ await t.exception(
+ async () =>
+ await bs2.get({
+ ...blob1Id,
+ driveId,
+ })
+ )
})
-test('get(), replicated blobIndex, but blobs not replicated', async t => {
+test('get(), replicated blobIndex, but blobs not replicated', async (t) => {
const projectKey = randomBytes(32)
const { blobStore: bs1, coreManager: cm1 } = await testenv({ projectKey })
const { blobStore: bs2, coreManager: cm2 } = await testenv({ projectKey })
@@ -95,7 +107,7 @@ test('get(), replicated blobIndex, but blobs not replicated', async t => {
const blob1Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob1'
+ name: 'blob1',
})
const driveId = await bs1.put(blob1Id, blob1)
@@ -106,26 +118,35 @@ test('get(), replicated blobIndex, but blobs not replicated', async t => {
await replicatedCore.download({ end: replicatedCore.length }).done()
await destroy()
- t.is(replicatedCore.contiguousLength, replicatedCore.length, 'blobIndex has downloaded')
+ t.is(
+ replicatedCore.contiguousLength,
+ replicatedCore.length,
+ 'blobIndex has downloaded'
+ )
t.ok(replicatedCore.length > 0)
- await t.exception(async () => await bs2.get({
- ...blob1Id,
- driveId
- }))
+ await t.exception(
+ async () =>
+ await bs2.get({
+ ...blob1Id,
+ driveId,
+ })
+ )
})
-test('blobStore.createWriteStream(blobId) and blobStore.createReadStream(blobId)', async t => {
+test('blobStore.createWriteStream(blobId) and blobStore.createReadStream(blobId)', async (t) => {
const { blobStore } = await testenv()
const diskbuf = await readFile(new URL(import.meta.url))
const blobId = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'test-file'
+ name: 'test-file',
})
const ws = blobStore.createWriteStream(blobId)
const { driveId } = ws
await pipeline(fs.createReadStream(new URL(import.meta.url)), ws)
- const bndlbuf = await concat(blobStore.createReadStream({ ...blobId, driveId }))
+ const bndlbuf = await concat(
+ blobStore.createReadStream({ ...blobId, driveId })
+ )
t.alike(bndlbuf, diskbuf, 'should be equal')
})
@@ -142,13 +163,13 @@ test('live download', async function (t) {
const blob1Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob1'
+ name: 'blob1',
})
const blob2 = randomBytes(TEST_BUF_SIZE)
const blob2Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob2'
+ name: 'blob2',
})
// STEP 1: Write a blob to CM1
@@ -190,19 +211,19 @@ test('sparse live download', async function (t) {
const blob1Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob1'
+ name: 'blob1',
})
const blob2 = randomBytes(TEST_BUF_SIZE)
const blob2Id = /** @type {const} */ ({
type: 'photo',
variant: 'preview',
- name: 'blob2'
+ name: 'blob2',
})
const blob3 = randomBytes(TEST_BUF_SIZE)
const blob3Id = /** @type {const} */ ({
type: 'photo',
variant: 'thumbnail',
- name: 'blob3'
+ name: 'blob3',
})
const driveId = await bs1.put(blob1Id, blob1)
@@ -234,13 +255,13 @@ test('cancelled live download', async function (t) {
const blob1Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob1'
+ name: 'blob1',
})
const blob2 = randomBytes(TEST_BUF_SIZE)
const blob2Id = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'blob2'
+ name: 'blob2',
})
// STEP 1: Write a blob to CM1
@@ -275,13 +296,13 @@ test('cancelled live download', async function (t) {
)
})
-test('blobStore.getEntryBlob(driveId, entry)', async t => {
+test('blobStore.getEntryBlob(driveId, entry)', async (t) => {
const { blobStore } = await testenv()
const diskbuf = await readFile(new URL(import.meta.url))
const blobId = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'test-file'
+ name: 'test-file',
})
const driveId = await blobStore.put(blobId, diskbuf)
const entry = await blobStore.entry({ ...blobId, driveId })
@@ -291,13 +312,13 @@ test('blobStore.getEntryBlob(driveId, entry)', async t => {
t.alike(buf, diskbuf, 'should be equal')
})
-test('blobStore.getEntryReadStream(driveId, entry)', async t => {
+test('blobStore.getEntryReadStream(driveId, entry)', async (t) => {
const { blobStore } = await testenv()
const diskbuf = await readFile(new URL(import.meta.url))
const blobId = /** @type {const} */ ({
type: 'photo',
variant: 'original',
- name: 'test-file'
+ name: 'test-file',
})
const driveId = await blobStore.put(blobId, diskbuf)
const entry = await blobStore.entry({ ...blobId, driveId })
@@ -309,7 +330,7 @@ test('blobStore.getEntryReadStream(driveId, entry)', async t => {
t.alike(buf, diskbuf, 'should be equal')
})
-async function testenv (opts) {
+async function testenv(opts) {
const coreManager = createCoreManager(opts)
const blobStore = new BlobStore({ coreManager })
return { blobStore, coreManager }
@@ -321,9 +342,9 @@ async function testenv (opts) {
* @param {ReturnType} liveDownload
* @returns {Promise}
*/
-async function downloaded (liveDownload) {
- return new Promise(res => {
- liveDownload.on('state', function onState (state) {
+async function downloaded(liveDownload) {
+ return new Promise((res) => {
+ liveDownload.on('state', function onState(state) {
if (state.status !== 'downloaded') return
liveDownload.off('state', onState)
res()
diff --git a/tests/blob-store/combine-states.js b/tests/blob-store/combine-states.js
index 192a7121..cd16e8b0 100644
--- a/tests/blob-store/combine-states.js
+++ b/tests/blob-store/combine-states.js
@@ -1,4 +1,4 @@
-import { combineStates } from '../../lib/blob-store/live-download.js'
+import { combineStates } from '../../src/blob-store/live-download.js'
import test from 'brittle'
const partial = {
@@ -6,35 +6,35 @@ const partial = {
haveBytes: 0,
wantCount: 0,
wantBytes: 0,
- error: null
+ error: null,
}
const fixtures = [
{
statuses: ['checking', 'downloading', 'downloaded'],
- expected: 'checking'
+ expected: 'checking',
},
{
statuses: ['checking', 'downloading', 'downloading'],
- expected: 'checking'
+ expected: 'checking',
},
{
statuses: ['downloading', 'downloading', 'downloaded'],
- expected: 'downloading'
+ expected: 'downloading',
},
{
statuses: ['downloaded', 'downloaded', 'downloaded'],
- expected: 'downloaded'
+ expected: 'downloaded',
},
{
statuses: ['checking', 'checking', 'checking'],
- expected: 'checking'
- }
+ expected: 'checking',
+ },
]
-test('expected combined state, no error or abort', t => {
+test('expected combined state, no error or abort', (t) => {
for (const { statuses, expected } of fixtures) {
- const inputs = statuses.map(status => ({ state: { ...partial, status } }))
+ const inputs = statuses.map((status) => ({ state: { ...partial, status } }))
const expectedState = { ...partial, status: expected }
for (const permuted of permute(inputs)) {
t.alike(combineStates(permuted), expectedState)
@@ -42,9 +42,9 @@ test('expected combined state, no error or abort', t => {
}
})
-test('expected combined state, with error', t => {
+test('expected combined state, with error', (t) => {
for (const { statuses } of fixtures) {
- const inputs = statuses.map(status => ({ state: { ...partial, status } }))
+ const inputs = statuses.map((status) => ({ state: { ...partial, status } }))
inputs.push({ state: { ...partial, error: new Error(), status: 'error' } })
const expectedState = { ...partial, error: new Error(), status: 'error' }
for (const permuted of permute(inputs)) {
@@ -53,12 +53,12 @@ test('expected combined state, with error', t => {
}
})
-test('expected combined state, with abort', t => {
+test('expected combined state, with abort', (t) => {
const controller = new AbortController()
controller.abort()
const { signal } = controller
for (const { statuses } of fixtures) {
- const inputs = statuses.map(status => ({ state: { ...partial, status } }))
+ const inputs = statuses.map((status) => ({ state: { ...partial, status } }))
const expectedState = { ...partial, status: 'aborted' }
for (const permuted of permute(inputs)) {
t.alike(combineStates(permuted, { signal }), expectedState)
@@ -66,11 +66,11 @@ test('expected combined state, with abort', t => {
}
})
-test('arithmetic test', t => {
+test('arithmetic test', (t) => {
const counts = [
- [1,2,3,4],
- [1,2,3,4],
- [1,2,3,4]
+ [1, 2, 3, 4],
+ [1, 2, 3, 4],
+ [1, 2, 3, 4],
]
const expected = {
haveCount: 3,
@@ -78,11 +78,18 @@ test('arithmetic test', t => {
wantCount: 9,
wantBytes: 12,
error: null,
- status: 'downloaded'
+ status: 'downloaded',
}
const inputs = counts.map(([haveCount, haveBytes, wantCount, wantBytes]) => {
return {
- state: { haveCount, haveBytes, wantCount, wantBytes, error: null, status: 'downloaded'}
+ state: {
+ haveCount,
+ haveBytes,
+ wantCount,
+ wantBytes,
+ error: null,
+ status: 'downloaded',
+ },
}
})
t.alike(combineStates(inputs), expected)
@@ -95,7 +102,7 @@ test('arithmetic test', t => {
* @param {Array} arr
* @returns {IterableIterator>}
*/
-export function* permute (arr) {
+export function* permute(arr) {
var length = arr.length,
c = Array(length).fill(0),
i = 1,
diff --git a/tests/blob-store/live-download.js b/tests/blob-store/live-download.js
index 54132b21..5ea20c41 100644
--- a/tests/blob-store/live-download.js
+++ b/tests/blob-store/live-download.js
@@ -1,5 +1,5 @@
// @ts-check
-import { DriveLiveDownload } from '../../lib/blob-store/live-download.js'
+import { DriveLiveDownload } from '../../src/blob-store/live-download.js'
import Hyperdrive from 'hyperdrive'
import Corestore from 'corestore'
import RAM from 'random-access-memory'
@@ -11,12 +11,12 @@ import { randomBytes } from 'node:crypto'
// Test with buffers that are 3 times the default blockSize for hyperblobs
const TEST_BUF_SIZE = 3 * 64 * 1024
-test('live download', async t => {
+test('live download', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
await drive1.put('/foo', randomBytes(TEST_BUF_SIZE))
const {
- value: { blob: blob1 }
+ value: { blob: blob1 },
} = await drive1.entry('/foo')
const stream = replicate()
@@ -45,7 +45,7 @@ test('live download', async t => {
t.alike(await drive2.get('/bar'), expected, 'Second blob is downloaded')
})
-test('sparse live download', async t => {
+test('sparse live download', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
const buf1 = randomBytes(TEST_BUF_SIZE)
@@ -57,7 +57,7 @@ test('sparse live download', async t => {
const stream = replicate()
const download = new DriveLiveDownload(drive2, {
- filter: { photo: ['original'] }
+ filter: { photo: ['original'] },
})
await waitForState(download, 'downloaded')
@@ -83,7 +83,7 @@ test('sparse live download', async t => {
)
})
-test('Abort download (same tick)', async t => {
+test('Abort download (same tick)', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
await drive1.put('/foo', randomBytes(TEST_BUF_SIZE))
const stream = replicate()
@@ -98,12 +98,12 @@ test('Abort download (same tick)', async t => {
wantCount: 0,
wantBytes: 0,
error: null,
- status: 'aborted'
+ status: 'aborted',
})
t.is(await drive2.get('/foo'), null, 'nothing downloaded')
})
-test('Abort download (next event loop)', async t => {
+test('Abort download (next event loop)', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
await drive1.put('/one', randomBytes(TEST_BUF_SIZE))
const stream = replicate()
@@ -120,7 +120,7 @@ test('Abort download (next event loop)', async t => {
wantCount: 0,
wantBytes: 0,
error: null,
- status: 'aborted'
+ status: 'aborted',
})
await t.exception(
drive2.get('/foo', { wait: false }),
@@ -129,7 +129,7 @@ test('Abort download (next event loop)', async t => {
)
})
-test('Abort download (after initial download)', async t => {
+test('Abort download (after initial download)', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
const buf1 = randomBytes(TEST_BUF_SIZE)
@@ -158,7 +158,7 @@ test('Abort download (after initial download)', async t => {
)
})
-test('Live download when data is already downloaded', async t => {
+test('Live download when data is already downloaded', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
const buf1 = randomBytes(20)
@@ -184,7 +184,7 @@ test('Live download when data is already downloaded', async t => {
wantCount: 0,
wantBytes: 0,
error: null,
- status: 'downloaded'
+ status: 'downloaded',
},
'Blob already downloaded is included in state'
)
@@ -199,7 +199,7 @@ test('Live download when data is already downloaded', async t => {
t.alike(await drive2.get('/two'), buf2, 'Second blob is downloaded')
})
-test('Live download continues across disconnection and reconnect', async t => {
+test('Live download continues across disconnection and reconnect', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
const buf1 = randomBytes(TEST_BUF_SIZE)
@@ -227,14 +227,14 @@ test('Live download continues across disconnection and reconnect', async t => {
t.alike(await drive2.get('/two'), buf2, 'Second blob is downloaded')
})
-test('Initial status', async t => {
+test('Initial status', async (t) => {
const { drive1 } = await testEnv()
const download = new DriveLiveDownload(drive1)
t.is(download.state.status, 'checking', "initial status is 'checking'")
})
-test('Unitialized drive with no data', async t => {
+test('Unitialized drive with no data', async (t) => {
// This test is important because it catches an edge case where a drive might
// have been added by its key, but has never replicated, so it has no data so
// the content feed will never be read from the header, which might result in
@@ -250,12 +250,12 @@ test('Unitialized drive with no data', async t => {
)
})
-test('live download started before initial replication', async t => {
+test('live download started before initial replication', async (t) => {
const { drive1, drive2, replicate } = await testEnv()
await drive1.put('/foo', randomBytes(TEST_BUF_SIZE))
const {
- value: { blob: blob1 }
+ value: { blob: blob1 },
} = await drive1.entry('/foo')
const download = new DriveLiveDownload(drive2)
@@ -289,9 +289,9 @@ test('live download started before initial replication', async t => {
})
/** @returns {Promise} */
-async function waitForState (download, status) {
- return new Promise(res => {
- download.on('state', function onState (state) {
+async function waitForState(download, status) {
+ return new Promise((res) => {
+ download.on('state', function onState(state) {
// console.log('download state', state)
if (state.status !== status) return
download.off('state', onState)
@@ -300,7 +300,7 @@ async function waitForState (download, status) {
})
}
-async function testEnv () {
+async function testEnv() {
const store1 = new Corestore(RAM)
const store2 = new Corestore(RAM)
const drive1 = new Hyperdrive(store1)
@@ -308,7 +308,7 @@ async function testEnv () {
const drive2 = new Hyperdrive(store2, drive1.key)
await drive2.ready()
- function replicate () {
+ function replicate() {
const s = store1.replicate(true)
s.pipe(store2.replicate(false)).pipe(s)
return s
@@ -317,6 +317,6 @@ async function testEnv () {
return {
drive1,
drive2,
- replicate
+ replicate,
}
}
diff --git a/tests/core-manager.js b/tests/core-manager.js
index 9a1dd2c6..76e2fa40 100644
--- a/tests/core-manager.js
+++ b/tests/core-manager.js
@@ -6,15 +6,15 @@ import { createCoreManager, replicate } from './helpers/core-manager.js'
import { randomBytes } from 'crypto'
import Sqlite from 'better-sqlite3'
import { KeyManager } from '@mapeo/crypto'
-import { CoreManager } from '../lib/core-manager/index.js'
+import { CoreManager } from '../src/core-manager/index.js'
import assert from 'assert'
import { temporaryDirectoryTask } from 'tempy'
import { exec } from 'child_process'
-import { RandomAccessFilePool } from '../lib/core-manager/random-access-file-pool.js'
+import { RandomAccessFilePool } from '../src/core-manager/random-access-file-pool.js'
import RandomAccessFile from 'random-access-file'
import path from 'path'
-async function createCore (...args) {
+async function createCore(...args) {
const core = new Hypercore(RAM, ...args)
await core.ready()
return core
@@ -29,7 +29,7 @@ test('shares auth cores', async function (t) {
await Promise.all([
waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
const cm1Keys = getKeys(cm1, 'auth').sort(Buffer.compare)
@@ -39,22 +39,22 @@ test('shares auth cores', async function (t) {
})
test('project creator auth core has project key', async function (t) {
- const db = new Sqlite(':memory:')
+ const sqlite = new Sqlite(':memory:')
const keyManager = new KeyManager(randomBytes(16))
const { publicKey: projectKey, secretKey: projectSecretKey } =
keyManager.getHypercoreKeypair('auth', randomBytes(32))
const cm = new CoreManager({
- db,
+ sqlite,
keyManager,
storage: RAM,
projectKey,
- projectSecretKey
+ projectSecretKey,
})
const { key: authCoreKey } = cm.getWriterCore('auth')
t.ok(authCoreKey.equals(projectKey))
})
-test('getCreatorCore()', async t => {
+test('getCreatorCore()', async (t) => {
const projectKey = randomBytes(32)
const cm = createCoreManager({ projectKey })
await cm.creatorCore.ready()
@@ -88,7 +88,7 @@ test('eagerly updates remote bitfields', async function (t) {
t.ok(cm2Core, 'writer core has replicated')
// Need to wait for now, since no event for when a remote bitfield is updated
- await new Promise(res => setTimeout(res, 200))
+ await new Promise((res) => setTimeout(res, 200))
t.is(cm2Core.length, cm1Core.length)
@@ -117,7 +117,7 @@ test('eagerly updates remote bitfields', async function (t) {
// direction, e.g. from the non-writer to the writer
const { destroy } = replicate(cm1, cm2)
// Need to wait for now, since no event for when a remote bitfield is updated
- await new Promise(res => setTimeout(res, 200))
+ await new Promise((res) => setTimeout(res, 200))
t.ok(
bitfieldEquals(
cm1Core.peers[0].remoteBitfield,
@@ -136,7 +136,7 @@ test('eagerly updates remote bitfields', async function (t) {
replicate(cm1, cm2)
replicate(cm2, cm3)
- await new Promise(res => setTimeout(res, 200))
+ await new Promise((res) => setTimeout(res, 200))
const cm3Core = cm3.getCoreByKey(cm1Core.key)
t.alike(cm3Core.length, cm1Core.length)
@@ -154,7 +154,7 @@ test('eagerly updates remote bitfields', async function (t) {
await cm1Core.append(['k', 'l', 'm', 'o', 'p'])
await cm2Core.download({ start: 9, end: 12 }).done()
- await new Promise(res => setTimeout(res, 200))
+ await new Promise((res) => setTimeout(res, 200))
t.alike(cm3Core.length, cm1Core.length)
t.ok(
@@ -185,7 +185,7 @@ test('works with an existing protocol stream for replications', async function (
await Promise.all([
waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
const cm1Keys = getKeys(cm1, 'auth').sort(Buffer.compare)
@@ -216,7 +216,7 @@ test.skip('can mux other project replications over same stream', async function
await Promise.all([
waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
cm1.replicate(n1)
@@ -258,7 +258,7 @@ test('multiplexing waits for cores to be added', async function (t) {
t.alike(await b2.get(0), Buffer.from('ho'))
})
-test('close()', async t => {
+test('close()', async (t) => {
const cm = createCoreManager()
for (const namespace of CoreManager.namespaces) {
cm.addCore(randomBytes(32), namespace)
@@ -274,15 +274,15 @@ test('close()', async t => {
t.exception(() => cm.replicate(ns), /closed/)
})
-test('Added cores are persisted', async t => {
- const db = new Sqlite(':memory:')
+test('Added cores are persisted', async (t) => {
+ const sqlite = new Sqlite(':memory:')
const keyManager = new KeyManager(randomBytes(16))
const projectKey = randomBytes(32)
const cm1 = new CoreManager({
- db,
+ sqlite,
keyManager,
storage: RAM,
- projectKey
+ projectKey,
})
const key = randomBytes(32)
cm1.addCore(key, 'auth')
@@ -290,10 +290,10 @@ test('Added cores are persisted', async t => {
await cm1.close()
const cm2 = new CoreManager({
- db,
+ sqlite,
keyManager,
storage: RAM,
- projectKey
+ projectKey,
})
t.ok(cm2.getCoreByKey(key), 'Added core is persisted')
@@ -335,15 +335,15 @@ test('poolSize limits number of open file descriptors', async function (t) {
keyManager.getHypercoreKeypair('auth', randomBytes(32))
const CORE_COUNT = 500
- await temporaryDirectoryTask(async tempPath => {
- const db = new Sqlite(':memory:')
- const storage = name => new RandomAccessFile(path.join(tempPath, name))
+ await temporaryDirectoryTask(async (tempPath) => {
+ const sqlite = new Sqlite(':memory:')
+ const storage = (name) => new RandomAccessFile(path.join(tempPath, name))
const cm = new CoreManager({
- db,
+ sqlite,
keyManager,
storage,
projectKey,
- projectSecretKey
+ projectSecretKey,
})
// -1 because CoreManager creates a writer core already
for (let i = 0; i < CORE_COUNT - 1; i++) {
@@ -357,18 +357,18 @@ test('poolSize limits number of open file descriptors', async function (t) {
t.ok(fdCount > CORE_COUNT, 'without pool, at least one fd per core')
})
- await temporaryDirectoryTask(async tempPath => {
+ await temporaryDirectoryTask(async (tempPath) => {
const POOL_SIZE = 100
- const db = new Sqlite(':memory:')
+ const sqlite = new Sqlite(':memory:')
const pool = new RandomAccessFilePool(POOL_SIZE)
- const storage = name =>
+ const storage = (name) =>
new RandomAccessFile(path.join(tempPath, name), { pool })
const cm = new CoreManager({
- db,
+ sqlite,
keyManager,
storage,
projectKey,
- projectSecretKey
+ projectSecretKey,
})
// -1 because we CoreManager creates a writer core already
for (let i = 0; i < CORE_COUNT - 1; i++) {
@@ -386,11 +386,11 @@ test('poolSize limits number of open file descriptors', async function (t) {
})
})
-async function waitForCores (coreManager, keys) {
+async function waitForCores(coreManager, keys) {
const allKeys = getAllKeys(coreManager)
if (hasKeys(keys, allKeys)) return
- return new Promise(res => {
- coreManager.on('add-core', function onAddCore ({ key }) {
+ return new Promise((res) => {
+ coreManager.on('add-core', function onAddCore({ key }) {
allKeys.push(key)
if (hasKeys(keys, allKeys)) {
coreManager.off('add-core', onAddCore)
@@ -400,7 +400,7 @@ async function waitForCores (coreManager, keys) {
})
}
-function getAllKeys (coreManager) {
+function getAllKeys(coreManager) {
const keys = []
for (const namespace of CoreManager.namespaces) {
keys.push.apply(keys, getKeys(coreManager, namespace))
@@ -408,13 +408,13 @@ function getAllKeys (coreManager) {
return keys
}
-function getKeys (coreManager, namespace) {
+function getKeys(coreManager, namespace) {
return coreManager.getCores(namespace).map(({ key }) => key)
}
-function hasKeys (someKeys, allKeys) {
+function hasKeys(someKeys, allKeys) {
for (const key of someKeys) {
- if (!allKeys.find(k => k.equals(key))) return false
+ if (!allKeys.find((k) => k.equals(key))) return false
}
return true
}
@@ -423,7 +423,7 @@ const DEBUG = process.env.DEBUG
// Compare two bitfields (instance of core.core.bitfield or peer.remoteBitfield)
// Need to pass len, since bitfields don't know their own length
-function bitfieldEquals (actual, expected, len) {
+function bitfieldEquals(actual, expected, len) {
assert(typeof len === 'number')
let actualStr = ''
let expectedStr = ''
@@ -457,7 +457,7 @@ function bitfieldEquals (actual, expected, len) {
* @param {string} dir folder for counting open file descriptors
* @returns {Promise}
*/
-async function countOpenFileDescriptors (dir) {
+async function countOpenFileDescriptors(dir) {
return new Promise((res, rej) => {
exec(`lsof +D '${dir}' | wc -l`, (error, stdout) => {
if (error) return rej(error)
diff --git a/tests/datastore-new.js b/tests/datastore-new.js
deleted file mode 100644
index 8c0d010d..00000000
--- a/tests/datastore-new.js
+++ /dev/null
@@ -1,98 +0,0 @@
-// @ts-check
-import test from 'brittle'
-import { DataStore } from '../lib/datastore/data-store-new.js'
-import { createCoreManager } from './helpers/core-manager.js'
-import { getVersionId } from '@mapeo/schema'
-import { once } from 'events'
-import RAM from 'random-access-memory'
-
-/** @type {Omit} */
-const obs = {
- docId: 'abc',
- links: [],
- createdAt: new Date().toISOString(),
- updatedAt: new Date().toISOString(),
- schemaName: 'observation',
- refs: [],
- tags: {},
- attachments: [],
- metadata: {},
-}
-
-test('read and write', async (t) => {
- const cm = createCoreManager()
- const writerCore = cm.getWriterCore('data').core
- await writerCore.ready()
- const indexedVersionIds = []
- const dataStore = new DataStore({
- coreManager: cm,
- namespace: 'data',
- indexWriter: /** @type {any} faking IndexWriter for unit test */ ({
- async batch(entries) {
- for (const { index, key } of entries) {
- const versionId = getVersionId({ coreKey: key, index })
- indexedVersionIds.push(versionId)
- }
- },
- }),
- storage: () => new RAM(),
- })
- const written = await dataStore.write(obs)
- const expectedVersionId = getVersionId({ coreKey: writerCore.key, index: 0 })
- t.is(
- written.versionId,
- expectedVersionId,
- 'versionId is set to expected value'
- )
- const read = await dataStore.read(written.versionId)
- t.alike(
- read,
- written,
- 'data returned from write matches data returned from read'
- )
- t.alike(
- indexedVersionIds,
- [written.versionId],
- 'The indexEntries function is called with all data that is added'
- )
-})
-
-test('index events', async (t) => {
- const cm = createCoreManager()
- const writerCore = cm.getWriterCore('data').core
- await writerCore.ready()
- const indexStates = []
- const dataStore = new DataStore({
- coreManager: cm,
- namespace: 'data',
- indexWriter: /** @type {any} faking IndexWriter for unit test */ ({
- async batch() {
- await new Promise((res) => setTimeout(res, 10))
- },
- }),
- storage: () => new RAM(),
- })
- dataStore.on('index-state', (state) => {
- // eslint-disable-next-line no-unused-vars
- const { entriesPerSecond, ...rest } = state
- indexStates.push(rest)
- })
- const idlePromise = once(dataStore, 'idle')
- await dataStore.write(obs)
- await idlePromise
- const expectedStates = [
- {
- current: 'idle',
- remaining: 0,
- },
- {
- current: 'indexing',
- remaining: 1,
- },
- {
- current: 'idle',
- remaining: 0,
- },
- ]
- t.alike(indexStates, expectedStates, 'expected index states emitted')
-})
diff --git a/tests/datastore.js b/tests/datastore.js
index 3c5235ad..22721adc 100644
--- a/tests/datastore.js
+++ b/tests/datastore.js
@@ -1,150 +1,98 @@
+// @ts-check
import test from 'brittle'
-
-import ram from 'random-access-memory'
-import Corestore from 'corestore'
-import b4a from 'b4a'
-
-import { DataStore } from '../lib/datastore/index.js'
-import { Sqlite } from '../lib/sqlite.js'
-
-import { createIdentityKeys } from './helpers/index.js'
-import { getBlockPrefix } from '../lib/utils.js'
-
-test('datastore - create, update, query two datatypes', async (t) => {
- t.plan(13)
-
- const { identityKeyPair, keyManager } = createIdentityKeys()
- const keyPair = keyManager.getHypercoreKeypair('data', b4a.alloc(32))
- const corestore = new Corestore(ram)
- const sqlite = new Sqlite(':memory:')
-
- const example1 = {
- name: 'example1',
- blockPrefix: '0',
- schema: {
- type: 'object',
- properties: {
- id: { type: 'string' },
- version: { type: 'string' },
- value: { type: 'string' },
- created: { type: 'number' },
- updated: { type: 'number' },
- timestamp: { type: 'number' },
- links: { type: 'array' },
- forks: { type: 'array' },
- authorId: { type: 'string' },
- },
- additionalProperties: false,
- },
- extraColumns: `
- value TEXT,
- created INTEGER,
- updated INTEGER,
- timestamp INTEGER,
- authorId TEXT
- `,
- }
-
- const example2 = {
- name: 'example2',
- blockPrefix: '1',
- schema: {
- type: 'object',
- properties: {
- id: { type: 'string' },
- version: { type: 'string' },
- value: { type: 'string' },
- created: { type: 'number' },
- updated: { type: 'number' },
- timestamp: { type: 'number' },
- links: { type: 'array' },
- forks: { type: 'array' },
- authorId: { type: 'string' },
+import { DataStore } from '../src/datastore/index.js'
+import { createCoreManager } from './helpers/core-manager.js'
+import { getVersionId } from '@mapeo/schema'
+import { once } from 'events'
+import RAM from 'random-access-memory'
+
+/** @type {Omit} */
+const obs = {
+ docId: 'abc',
+ links: [],
+ createdAt: new Date().toISOString(),
+ updatedAt: new Date().toISOString(),
+ schemaName: 'observation',
+ refs: [],
+ tags: {},
+ attachments: [],
+ metadata: {},
+}
+
+test('read and write', async (t) => {
+ const cm = createCoreManager()
+ const writerCore = cm.getWriterCore('data').core
+ await writerCore.ready()
+ const indexedVersionIds = []
+ const dataStore = new DataStore({
+ coreManager: cm,
+ namespace: 'data',
+ indexWriter: /** @type {any} faking IndexWriter for unit test */ ({
+ async batch(entries) {
+ for (const { index, key } of entries) {
+ const versionId = getVersionId({ coreKey: key, index })
+ indexedVersionIds.push(versionId)
+ }
},
- additionalProperties: false,
- },
- extraColumns: `
- value TEXT,
- created INTEGER,
- updated INTEGER,
- timestamp INTEGER,
- authorId TEXT
- `,
- }
-
- const datastore = new DataStore({
- corestore,
- sqlite,
- keyPair,
- identityPublicKey: identityKeyPair.publicKey,
- dataTypes: [example1, example2],
+ }),
+ storage: () => new RAM(),
})
-
- await datastore.ready()
- t.ok(datastore, 'datastore created')
-
- // example1 create doc
- const doc = await datastore.create('example1', { value: 'example1' })
- t.is(doc.value, 'example1', 'doc created')
-
- const [gotDoc] = datastore.query(
- `select * from example1 where id = '${doc.id}'`
+ const written = await dataStore.write(obs)
+ const expectedVersionId = getVersionId({ coreKey: writerCore.key, index: 0 })
+ t.is(
+ written.versionId,
+ expectedVersionId,
+ 'versionId is set to expected value'
)
- t.is(gotDoc.value, 'example1', 'doc queried')
-
- // example1 update doc
- const updatedDocVersion = Object.assign({}, doc, {
- value: 'updated',
- links: [doc.version],
- })
-
- const updatedDoc = await datastore.update('example1', updatedDocVersion)
- t.is(updatedDoc.value, 'updated', 'doc updated')
-
- const [gotUpdatedDoc] = datastore.query(
- `select * from example1 where id = '${doc.id}'`
+ const read = await dataStore.read(written.versionId)
+ t.alike(
+ read,
+ written,
+ 'data returned from write matches data returned from read'
)
- t.is(gotUpdatedDoc.value, 'updated', 'updated doc queried')
-
- // example2 create doc
- const example2Doc = await datastore.create('example2', { value: 'example2' })
- const [gotDoc2] = datastore.query(
- `select * from example2 where id = '${example2Doc.id}'`
+ t.alike(
+ indexedVersionIds,
+ [written.versionId],
+ 'The indexEntries function is called with all data that is added'
)
- t.is(gotDoc2.value, 'example2', 'example2 doc queried')
+})
- // example2 update doc
- const updatedDocVersion2 = Object.assign({}, example2Doc, {
- value: 'updated2',
- links: [example2Doc.version],
+test('index events', async (t) => {
+ const cm = createCoreManager()
+ const writerCore = cm.getWriterCore('data').core
+ await writerCore.ready()
+ const indexStates = []
+ const dataStore = new DataStore({
+ coreManager: cm,
+ namespace: 'data',
+ indexWriter: /** @type {any} faking IndexWriter for unit test */ ({
+ async batch() {
+ await new Promise((res) => setTimeout(res, 10))
+ },
+ }),
+ storage: () => new RAM(),
})
-
- const updatedDoc2 = await datastore.update('example2', updatedDocVersion2)
- t.is(updatedDoc2.value, 'updated2', 'doc updated')
-
- const [gotUpdatedDoc2] = datastore.query(
- `select * from example2 where id = '${example2Doc.id}'`
- )
- t.is(gotUpdatedDoc2.value, 'updated2', 'updated doc queried')
-
- // check hypercore block count
- const counts = { example1: 0, example2: 0 }
- for await (const data of datastore.createReadStream()) {
- const blockPrefix = getBlockPrefix(data)
- if (blockPrefix === example1.blockPrefix) {
- counts.example1++
- } else if (blockPrefix === example2.blockPrefix) {
- counts.example2++
- }
- }
-
- t.is(counts.example1, 2, 'example1 has 2 blocks')
- t.is(counts.example2, 2, 'example2 has 2 blocks')
- t.is(datastore.dataTypes.length, 2, 'datastore has 2 dataTypes')
- t.is(datastore.cores.length, 1, 'datastore has 1 core')
-
- const core = corestore.get({ key: keyPair.publicKey })
- await core.ready()
-
- t.is(core.length, 4, 'datastore core has 4 blocks')
+ dataStore.on('index-state', (state) => {
+ // eslint-disable-next-line no-unused-vars
+ const { entriesPerSecond, ...rest } = state
+ indexStates.push(rest)
+ })
+ const idlePromise = once(dataStore, 'idle')
+ await dataStore.write(obs)
+ await idlePromise
+ const expectedStates = [
+ {
+ current: 'idle',
+ remaining: 0,
+ },
+ {
+ current: 'indexing',
+ remaining: 1,
+ },
+ {
+ current: 'idle',
+ remaining: 0,
+ },
+ ]
+ t.alike(indexStates, expectedStates, 'expected index states emitted')
})
diff --git a/tests/datatype.js b/tests/datatype.js
deleted file mode 100644
index c182e7b9..00000000
--- a/tests/datatype.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import test from 'brittle'
-import { createDataType } from './helpers/datatype.js'
-
-test('datatype - create, encode, decode', async (t) => {
- t.plan(2)
-
- const { dataType } = await createDataType({
- name: 'test',
- schema: {
- type: 'object',
- properties: {
- title: { type: 'string' },
- content: { type: 'string' },
- },
- },
- blockPrefix: '0',
- extraColumns: 'title TEXT, content TEXT, timestamp INTEGER',
- })
-
- const created = await dataType.create({
- title: 'Hello World',
- content: 'This is a test',
- })
-
- const updated = await dataType.update(
- Object.assign({}, created, { title: 'hi', links: [created.version] })
- )
- t.is(updated.title, 'hi', 'updated title')
-
- const notUpdated = dataType.update(
- Object.assign({}, created, { title: 'hi', links: [] })
- )
-
- t.exception(
- notUpdated,
- 'should throw error if previous version not provided as a link'
- )
-})
diff --git a/tests/discovery.js b/tests/discovery.js
index 6dfebbd6..8b2c361b 100644
--- a/tests/discovery.js
+++ b/tests/discovery.js
@@ -5,7 +5,7 @@ import ram from 'random-access-memory'
import createTestnet from '@hyperswarm/testnet'
import { createCoreKeyPair, createIdentityKeys } from './helpers/index.js'
-import { Discovery } from '../lib/discovery/index.js'
+import { Discovery } from '../src/discovery/index.js'
test('discovery - dht/hyperswarm', async (t) => {
t.plan(2)
diff --git a/tests/helpers/authstore.js b/tests/helpers/authstore.js
index 526679eb..2794d267 100644
--- a/tests/helpers/authstore.js
+++ b/tests/helpers/authstore.js
@@ -2,10 +2,10 @@ import { randomBytes } from 'crypto'
import Corestore from 'corestore'
import ram from 'random-access-memory'
-import { Sqlite } from '../../lib/sqlite.js'
-import { AuthStore } from '../../lib/authstore/index.js'
+import { Sqlite } from '../../src/sqlite.js'
+import { AuthStore } from '../../src/authstore/index.js'
import { addCores, replicate, createIdentityKeys } from './index.js'
-import { keyToId } from '../../lib/utils.js'
+import { keyToId } from '../../src/utils.js'
export async function createAuthStore({
corestore,
@@ -72,12 +72,14 @@ export async function createAuthStores(count, options) {
}
await addCores(peers)
- replicate(peers.map((peer) => {
- return {
- id: peer.identityId,
- core: peer.authstore,
- }
- }))
+ replicate(
+ peers.map((peer) => {
+ return {
+ id: peer.identityId,
+ core: peer.authstore,
+ }
+ })
+ )
return peers
}
diff --git a/tests/helpers/blob-store.js b/tests/helpers/blob-store.js
index 320d1e01..c9fb57d3 100644
--- a/tests/helpers/blob-store.js
+++ b/tests/helpers/blob-store.js
@@ -1,25 +1,26 @@
-import { replicate } from './core-manager.js';
+import { replicate } from './core-manager.js'
import { pipelinePromise as pipeline, Writable } from 'streamx'
/**
*
- * @param {import('../../lib/core-manager/index.js').CoreManager} cm1
- * @param {import('../../lib/core-manager/index.js').CoreManager} cm2
+ * @param {import('../../src/core-manager/index.js').CoreManager} cm1
+ * @param {import('../../src/core-manager/index.js').CoreManager} cm2
*/
export function replicateBlobs(cm1, cm2) {
- cm1.addCore(cm2.getWriterCore('blobIndex').key, 'blobIndex');
- cm2.addCore(cm1.getWriterCore('blobIndex').key, 'blobIndex');
+ cm1.addCore(cm2.getWriterCore('blobIndex').key, 'blobIndex')
+ cm2.addCore(cm1.getWriterCore('blobIndex').key, 'blobIndex')
const {
- rsm: [rsm1, rsm2], destroy
- } = replicate(cm1, cm2);
- rsm1.enableNamespace('blobIndex');
- rsm1.enableNamespace('blob');
- rsm2.enableNamespace('blobIndex');
- rsm2.enableNamespace('blob');
+ rsm: [rsm1, rsm2],
+ destroy,
+ } = replicate(cm1, cm2)
+ rsm1.enableNamespace('blobIndex')
+ rsm1.enableNamespace('blob')
+ rsm2.enableNamespace('blobIndex')
+ rsm2.enableNamespace('blob')
return {
rsm: /** @type {const} */ ([rsm1, rsm2]),
- destroy
- };
+ destroy,
+ }
}
export async function concat(rs) {
@@ -27,11 +28,11 @@ export async function concat(rs) {
await pipeline(
rs,
new Writable({
- write (data, cb) {
+ write(data, cb) {
if (buf) buf = data.concat(buf)
else buf = data
return cb(null)
- }
+ },
})
)
return buf
diff --git a/tests/helpers/core-manager.js b/tests/helpers/core-manager.js
index 86732d92..377155ee 100644
--- a/tests/helpers/core-manager.js
+++ b/tests/helpers/core-manager.js
@@ -1,23 +1,23 @@
-import { CoreManager } from '../../lib/core-manager/index.js'
+import { CoreManager } from '../../src/core-manager/index.js'
import Sqlite from 'better-sqlite3'
import { randomBytes } from 'crypto'
import { KeyManager } from '@mapeo/crypto'
import RAM from 'random-access-memory'
import NoiseSecretStream from '@hyperswarm/secret-stream'
-export function createCoreManager ({
+export function createCoreManager({
rootKey = randomBytes(16),
projectKey = randomBytes(32),
...opts
} = {}) {
- const db = new Sqlite(':memory:')
+ const sqlite = new Sqlite(':memory:')
const keyManager = new KeyManager(rootKey)
return new CoreManager({
- db,
+ sqlite,
keyManager,
storage: RAM,
projectKey,
- ...opts
+ ...opts,
})
}
@@ -27,7 +27,7 @@ export function createCoreManager ({
* @param {CoreManager} cm2
* @returns
*/
-export function replicate (cm1, cm2) {
+export function replicate(cm1, cm2) {
const n1 = new NoiseSecretStream(true)
const n2 = new NoiseSecretStream(false)
n1.rawStream.pipe(n2.rawStream).pipe(n1.rawStream)
@@ -35,30 +35,30 @@ export function replicate (cm1, cm2) {
const rsm1 = cm1.replicate(n1)
const rsm2 = cm2.replicate(n2)
- async function destroy () {
+ async function destroy() {
await Promise.all([
- new Promise(res => {
+ new Promise((res) => {
n1.on('close', res)
n1.destroy()
}),
- new Promise(res => {
+ new Promise((res) => {
n2.on('close', res)
n2.destroy()
- })
+ }),
])
}
return {
rsm: [rsm1, rsm2],
- destroy
+ destroy,
}
}
-export async function waitForCores (coreManager, keys) {
+export async function waitForCores(coreManager, keys) {
const allKeys = getAllKeys(coreManager)
if (hasKeys(keys, allKeys)) return
- return new Promise(res => {
- coreManager.on('add-core', async function onAddCore ({ key, core }) {
+ return new Promise((res) => {
+ coreManager.on('add-core', async function onAddCore({ key, core }) {
await core.ready()
allKeys.push(key)
if (hasKeys(keys, allKeys)) {
@@ -69,7 +69,7 @@ export async function waitForCores (coreManager, keys) {
})
}
-export function getAllKeys (coreManager) {
+export function getAllKeys(coreManager) {
const keys = []
for (const namespace of CoreManager.namespaces) {
keys.push.apply(keys, getKeys(coreManager, namespace))
@@ -77,13 +77,13 @@ export function getAllKeys (coreManager) {
return keys
}
-export function getKeys (coreManager, namespace) {
+export function getKeys(coreManager, namespace) {
return coreManager.getCores(namespace).map(({ key }) => key)
}
-export function hasKeys (someKeys, allKeys) {
+export function hasKeys(someKeys, allKeys) {
for (const key of someKeys) {
- if (!allKeys.find(k => k.equals(key))) return false
+ if (!allKeys.find((k) => k.equals(key))) return false
}
return true
}
diff --git a/tests/helpers/datastore.js b/tests/helpers/datastore.js
deleted file mode 100644
index 07486cb8..00000000
--- a/tests/helpers/datastore.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import ram from 'random-access-memory'
-import Corestore from 'corestore'
-import b4a from 'b4a'
-
-import { DataStore } from '../../lib/datastore/index.js'
-import { Sqlite } from '../../lib/sqlite.js'
-
-import { createIdentityKeys } from './index.js'
-
-export async function createDataStore (options) {
- const { dataTypes } = options
- const { identityKeyPair, keyManager } = createIdentityKeys()
- const keyPair = keyManager.getHypercoreKeypair('data', b4a.alloc(32))
- const corestore = new Corestore(ram)
- const sqlite = new Sqlite(':memory:')
-
- const datastore = new DataStore({
- corestore,
- sqlite,
- keyPair,
- identityPublicKey: identityKeyPair.publicKey,
- dataTypes
- })
-
- await datastore.ready()
- return datastore
-}
diff --git a/tests/helpers/datatype.js b/tests/helpers/datatype.js
deleted file mode 100644
index 1e8fe679..00000000
--- a/tests/helpers/datatype.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import { randomBytes } from 'crypto'
-import Corestore from 'corestore'
-import ram from 'random-access-memory'
-import MultiCoreIndexer from 'multi-core-indexer'
-
-import { DataType } from '../../lib/datatype/index.js'
-import { Sqlite } from '../../lib/sqlite.js'
-
-import { createIdentityKeys } from './index.js'
-
-export async function createDataType(options) {
- const { name, schema, extraColumns, blockPrefix } = options
- let { corestore, sqlite } = options
- const { identityKeyPair, keyManager } = createIdentityKeys()
- const identityId = identityKeyPair.publicKey.toString('hex')
- const keyPair = keyManager.getHypercoreKeypair(name, randomBytes(32))
-
- if (!corestore) {
- corestore = new Corestore(ram)
- }
-
- if (!sqlite) {
- sqlite = new Sqlite(':memory:')
- }
-
- const core = corestore.get({
- keyPair,
- })
-
- const dataType = new DataType({
- name,
- core,
- schema,
- blockPrefix,
- identityPublicKey: identityKeyPair.publicKey,
- corestore,
- keyPair,
- sqlite,
- extraColumns,
- })
-
- await dataType.ready()
-
- let indexer
- if (options.indexer !== false) {
- const cores = [...corestore.cores.values()]
- indexer = new MultiCoreIndexer(cores, {
- storage: (key) => {
- return new ram(key)
- },
- batch: (entries) => {
- dataType.index(entries.map((entry) => entry.block))
- },
- })
- }
-
- return {
- name,
- identityId,
- dataType,
- indexer,
- }
-}
diff --git a/tests/helpers/index.js b/tests/helpers/index.js
index dd3492bd..a26980c1 100644
--- a/tests/helpers/index.js
+++ b/tests/helpers/index.js
@@ -2,7 +2,7 @@ import { KeyManager } from '@mapeo/crypto'
import Hypercore from 'hypercore'
import RAM from 'random-access-memory'
-export async function createCore (...args) {
+export async function createCore(...args) {
const core = new Hypercore(RAM, ...args)
await core.ready()
return core
diff --git a/tests/helpers/replication-state.js b/tests/helpers/replication-state.js
index 4cda0bb7..c71cebe5 100644
--- a/tests/helpers/replication-state.js
+++ b/tests/helpers/replication-state.js
@@ -1,15 +1,21 @@
import NoiseSecretStream from '@hyperswarm/secret-stream'
-import { truncateId } from '../../lib/utils.js'
+import { truncateId } from '../../src/utils.js'
import { getKeys } from './core-manager.js'
export function logState(syncState, name) {
- let message = `${name ? name + ' ' : ''}${syncState.synced ? 'synced' : 'not synced'}\n`
+ let message = `${name ? name + ' ' : ''}${
+ syncState.synced ? 'synced' : 'not synced'
+ }\n`
for (const [coreId, state] of Object.entries(syncState.cores)) {
message += `${truncateId(coreId)}`
for (const [peerId, peerState] of Object.entries(state)) {
- message += `\n${truncateId(peerId)} (${peerState.remote ? 'remote' : 'local'}) l: ${peerState.length} h: ${peerState.have} w: ${peerState.want} u: ${peerState.unavailable} `
+ message += `\n${truncateId(peerId)} (${
+ peerState.remote ? 'remote' : 'local'
+ }) l: ${peerState.length} h: ${peerState.have} w: ${peerState.want} u: ${
+ peerState.unavailable
+ } `
}
message += '\n'
}
@@ -18,14 +24,18 @@ export function logState(syncState, name) {
}
/**
- *
- * @param {CoreManager} coreManager
- * @param {import('../../lib/core-manager/core-index.js').Namespace} namespace
+ *
+ * @param {CoreManager} coreManager
+ * @param {import('../../src/core-manager/core-index.js').Namespace} namespace
* @param {Object} [options]
* @param {number} [options.start=0]
* @param {number} [options.end=-1]
*/
-export async function download(coreManager, namespace, { start = 0, end = -1 } = {}) {
+export async function download(
+ coreManager,
+ namespace,
+ { start = 0, end = -1 } = {}
+) {
const writer = coreManager.getWriterCore(namespace)
const keys = getKeys(coreManager, namespace)
@@ -36,7 +46,10 @@ export async function download(coreManager, namespace, { start = 0, end = -1 } =
}
}
-export async function downloadCore(coreManager, { key, start = 0, end = -1 } = {}) {
+export async function downloadCore(
+ coreManager,
+ { key, start = 0, end = -1 } = {}
+) {
const core = coreManager.getCoreByKey(key)
await core.download({ start, end, ifAvailable: true }).done()
}
diff --git a/tests/replication-state.js b/tests/replication-state.js
index 53ca4c64..587c904d 100644
--- a/tests/replication-state.js
+++ b/tests/replication-state.js
@@ -2,66 +2,106 @@
import test from 'brittle'
import crypto from 'hypercore-crypto'
-import { ReplicationState, CoreReplicationState } from '../lib/sync/replication-state.js'
-import { createCoreManager, waitForCores, getKeys } from './helpers/core-manager.js'
-import { download, downloadCore, replicate } from './helpers/replication-state.js'
+import {
+ ReplicationState,
+ CoreReplicationState,
+} from '../src/sync/replication-state.js'
+import {
+ createCoreManager,
+ waitForCores,
+ getKeys,
+} from './helpers/core-manager.js'
+import {
+ download,
+ downloadCore,
+ replicate,
+} from './helpers/replication-state.js'
import { createCore } from './helpers/index.js'
-import { keyToId } from '../lib/utils.js'
test('sync cores in a namespace', async function (t) {
t.plan(2)
const projectKeyPair = crypto.keyPair()
- const cm1 = createCoreManager({ projectKey: projectKeyPair.publicKey, projectSecretKey: projectKeyPair.secretKey })
+ const cm1 = createCoreManager({
+ projectKey: projectKeyPair.publicKey,
+ projectSecretKey: projectKeyPair.secretKey,
+ })
const cm2 = createCoreManager({ projectKey: projectKeyPair.publicKey })
replicate(cm1, cm2)
await Promise.all([
- waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm1, getKeys(cm2, 'auth')),
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
const rep1 = new ReplicationState({
- coreManager: cm1,
- namespace: 'auth'
+ coreManager: cm1,
+ namespace: 'auth',
})
const rep2 = new ReplicationState({
- coreManager: cm2,
- namespace: 'auth'
+ coreManager: cm2,
+ namespace: 'auth',
})
const cm1Keys = getKeys(cm1, 'auth')
const cm2Keys = getKeys(cm2, 'auth')
const writer1 = cm1.getWriterCore('auth')
- await writer1.core.append(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'])
+ await writer1.core.append([
+ 'a',
+ 'b',
+ 'c',
+ 'd',
+ 'e',
+ 'f',
+ 'g',
+ 'h',
+ 'i',
+ 'j',
+ 'k',
+ 'l',
+ 'm',
+ 'n',
+ 'o',
+ 'p',
+ 'q',
+ 'r',
+ 's',
+ 't',
+ 'u',
+ 'v',
+ 'w',
+ 'x',
+ 'y',
+ 'z',
+ ])
await writer1.core.clear(0, 10)
const writer2 = cm2.getWriterCore('auth')
await writer2.core.append(['1', '2', '3', '4', '5', '6', '7', '8', '9', '0'])
for (const key of cm1Keys) {
- if (key.equals(writer1.core.key)) continue
- const core = cm1.getCoreByKey(key)
- core.download({ start: 0, end: -1 })
+ if (key.equals(writer1.core.key)) continue
+ const core = cm1.getCoreByKey(key)
+ core.download({ start: 0, end: -1 })
}
for (const key of cm2Keys) {
- if (key.equals(writer2.core.key)) continue
- const core = cm2.getCoreByKey(key)
- core.download({ start: 0, end: -1 })
+ if (key.equals(writer2.core.key)) continue
+ const core = cm2.getCoreByKey(key)
+ core.download({ start: 0, end: -1 })
}
- rep1.on('state', function rep1Handler (state) {
+ rep1.on('state', function rep1Handler(state) {
if (state.synced) {
t.ok(state.synced, 'rep1 is synced')
rep1.off('state', rep1Handler)
}
})
- rep2.on('state', function rep2Handler (state) {
+ rep2.on('state', function rep2Handler(state) {
if (state.synced) {
t.ok(state.synced, 'rep2 is synced')
rep2.off('state', rep2Handler)
@@ -74,19 +114,22 @@ test('access peer state', async function (t) {
const projectKeyPair = crypto.keyPair()
- const cm1 = createCoreManager({ projectKey: projectKeyPair.publicKey, projectSecretKey: projectKeyPair.secretKey })
+ const cm1 = createCoreManager({
+ projectKey: projectKeyPair.publicKey,
+ projectSecretKey: projectKeyPair.secretKey,
+ })
const cm2 = createCoreManager({ projectKey: projectKeyPair.publicKey })
replicate(cm1, cm2)
await Promise.all([
- waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm1, getKeys(cm2, 'auth')),
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
const rep = new ReplicationState({
- coreManager: cm1,
- namespace: 'auth'
+ coreManager: cm1,
+ namespace: 'auth',
})
const writer = cm1.getWriterCore('auth')
@@ -98,7 +141,7 @@ test('access peer state', async function (t) {
const reader = cm2.getCoreByKey(writer.core.key)
- rep.on('state', async function handler () {
+ rep.on('state', async function handler() {
if (!rep.peers.length) return
if (reader.length === 1) {
@@ -120,7 +163,10 @@ test('replicate with updating data', async function (t) {
const projectKeyPair = crypto.keyPair()
- const cm1 = createCoreManager({ projectKey: projectKeyPair.publicKey, projectSecretKey: projectKeyPair.secretKey })
+ const cm1 = createCoreManager({
+ projectKey: projectKeyPair.publicKey,
+ projectSecretKey: projectKeyPair.secretKey,
+ })
const cm2 = createCoreManager({ projectKey: projectKeyPair.publicKey })
const writer1 = cm1.getWriterCore('auth')
@@ -138,36 +184,36 @@ test('replicate with updating data', async function (t) {
replicate(cm1, cm2)
await Promise.all([
- waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm1, getKeys(cm2, 'auth')),
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
const rep1 = new ReplicationState({
- coreManager: cm1,
- namespace: 'auth'
+ coreManager: cm1,
+ namespace: 'auth',
})
const rep2 = new ReplicationState({
- coreManager: cm2,
- namespace: 'auth'
+ coreManager: cm2,
+ namespace: 'auth',
})
const cm1Keys = getKeys(cm1, 'auth')
const cm2Keys = getKeys(cm2, 'auth')
for (const key of cm1Keys) {
- if (key.equals(writer1.core.key)) continue
- const core = cm1.getCoreByKey(key)
- core.download({ live: true, start: 0, end: -1 })
+ if (key.equals(writer1.core.key)) continue
+ const core = cm1.getCoreByKey(key)
+ core.download({ live: true, start: 0, end: -1 })
}
for (const key of cm2Keys) {
- if (key.equals(writer2.core.key)) continue
- const core = cm2.getCoreByKey(key)
- core.download({ live: true, start: 0, end: -1 })
+ if (key.equals(writer2.core.key)) continue
+ const core = cm2.getCoreByKey(key)
+ core.download({ live: true, start: 0, end: -1 })
}
- rep1.on('state', function rep1Handler (state) {
+ rep1.on('state', function rep1Handler(state) {
const synced = rep1.isSynced()
if (synced) {
@@ -176,7 +222,7 @@ test('replicate with updating data', async function (t) {
}
})
- rep2.on('state', function rep2Handler (state) {
+ rep2.on('state', function rep2Handler(state) {
const synced = rep2.isSynced()
if (synced) {
t.ok(synced, 'rep2 is synced')
@@ -190,7 +236,10 @@ test('add peer during replication', async function (t) {
const projectKeyPair = crypto.keyPair()
- const cm1 = createCoreManager({ projectKey: projectKeyPair.publicKey, projectSecretKey: projectKeyPair.secretKey })
+ const cm1 = createCoreManager({
+ projectKey: projectKeyPair.publicKey,
+ projectSecretKey: projectKeyPair.secretKey,
+ })
const cm2 = createCoreManager({ projectKey: projectKeyPair.publicKey })
const writer1 = cm1.getWriterCore('auth')
@@ -205,10 +254,12 @@ test('add peer during replication', async function (t) {
writer2.core.append(blocks)
}
- async function addCoreManager (existingCoreManagers) {
+ async function addCoreManager(existingCoreManagers) {
const connectedCoreManager = existingCoreManagers[0]
- const coreManager = createCoreManager({ projectKey: projectKeyPair.publicKey })
+ const coreManager = createCoreManager({
+ projectKey: projectKeyPair.publicKey,
+ })
const writer = coreManager.getWriterCore('auth')
await writer.core.ready()
for (let i = 0; i < 3000; i = i + 100) {
@@ -221,7 +272,7 @@ test('add peer during replication', async function (t) {
}
await Promise.all([
- waitForCores(coreManager, getKeys(connectedCoreManager, 'auth'))
+ waitForCores(coreManager, getKeys(connectedCoreManager, 'auth')),
])
download(coreManager, 'auth')
@@ -236,7 +287,7 @@ test('add peer during replication', async function (t) {
namespace: 'auth',
})
- rep.on('state', function rep3Handler (state) {
+ rep.on('state', function rep3Handler(state) {
if (state.synced) {
t.ok(state.synced, 'rep3 is synced')
rep.off('state', rep3Handler)
@@ -249,24 +300,24 @@ test('add peer during replication', async function (t) {
replicate(cm1, cm2)
await Promise.all([
- waitForCores(cm1, getKeys(cm2, 'auth')),
- waitForCores(cm2, getKeys(cm1, 'auth'))
+ waitForCores(cm1, getKeys(cm2, 'auth')),
+ waitForCores(cm2, getKeys(cm1, 'auth')),
])
const rep1 = new ReplicationState({
- coreManager: cm1,
- namespace: 'auth',
+ coreManager: cm1,
+ namespace: 'auth',
})
const rep2 = new ReplicationState({
- coreManager: cm2,
- namespace: 'auth',
+ coreManager: cm2,
+ namespace: 'auth',
})
download(cm1, 'auth')
download(cm2, 'auth')
- rep1.on('state', function rep1Handler (state) {
+ rep1.on('state', function rep1Handler(state) {
// logState(state)
if (state.synced) {
// logState(state)
@@ -276,7 +327,7 @@ test('add peer during replication', async function (t) {
})
let added = false
- rep2.on('state', async function rep2Handler (state) {
+ rep2.on('state', async function rep2Handler(state) {
// add another core manager after replication has started between the others
if (!state.synced && !added) {
added = true
@@ -293,7 +344,10 @@ test('peer leaves during replication, third peer arrives, sync all later', async
t.plan(5)
const projectKeyPair = crypto.keyPair()
- const cm1 = createCoreManager({ projectKey: projectKeyPair.publicKey, projectSecretKey: projectKeyPair.secretKey })
+ const cm1 = createCoreManager({
+ projectKey: projectKeyPair.publicKey,
+ projectSecretKey: projectKeyPair.secretKey,
+ })
const cm2 = createCoreManager({ projectKey: projectKeyPair.publicKey })
const cm3 = createCoreManager({ projectKey: projectKeyPair.publicKey })
@@ -321,41 +375,38 @@ test('peer leaves during replication, third peer arrives, sync all later', async
const cm2Keys = getKeys(cm2, 'auth')
const cm3Keys = getKeys(cm3, 'auth')
- await Promise.all([
- waitForCores(cm1, cm2Keys),
- waitForCores(cm2, cm1Keys)
- ])
+ await Promise.all([waitForCores(cm1, cm2Keys), waitForCores(cm2, cm1Keys)])
const rep1 = new ReplicationState({
- coreManager: cm1,
- namespace: 'auth'
+ coreManager: cm1,
+ namespace: 'auth',
})
const rep2 = new ReplicationState({
- coreManager: cm2,
- namespace: 'auth'
+ coreManager: cm2,
+ namespace: 'auth',
})
const rep3 = new ReplicationState({
- coreManager: cm3,
- namespace: 'auth'
+ coreManager: cm3,
+ namespace: 'auth',
})
- rep1.on('state', function rep1Handler (state) {
+ rep1.on('state', function rep1Handler(state) {
if (state.synced) {
t.ok(state.synced, 'rep1 is synced')
rep1.off('state', rep1Handler)
}
})
- rep2.on('state', function rep2Handler (state) {
+ rep2.on('state', function rep2Handler(state) {
if (state.synced) {
t.ok(state.synced, 'rep2 is synced')
rep2.off('state', rep2Handler)
}
})
- rep3.on('state', function rep3Handler (state) {
+ rep3.on('state', function rep3Handler(state) {
if (state.synced) {
t.ok(state.synced, 'rep3 is synced')
rep3.off('state', rep3Handler)
@@ -380,10 +431,7 @@ test('peer leaves during replication, third peer arrives, sync all later', async
// restart replication between peer1 and peer2
replicate(cm1, cm2)
- await Promise.all([
- waitForCores(cm1, cm2Keys),
- waitForCores(cm2, cm1Keys)
- ])
+ await Promise.all([waitForCores(cm1, cm2Keys), waitForCores(cm2, cm1Keys)])
// sync all data between peer1 and peer2
await download(cm1, 'auth')
@@ -420,12 +468,16 @@ test('replicate core with unavailable blocks', async (t) => {
length: 7,
have: 6,
want: 1,
- unavailable: 1
+ unavailable: 1,
}
rs.on('synced', (state) => {
for (const { length, have, want, unavailable } of state) {
- t.alike({ length, have, want, unavailable }, expected, 'peer state is correct')
+ t.alike(
+ { length, have, want, unavailable },
+ expected,
+ 'peer state is correct'
+ )
}
})
@@ -457,14 +509,14 @@ test('replicate 3 cores with unavailable blocks', async (t) => {
have: 7,
want: 0,
unavailable: 0,
- length: 7
+ length: 7,
}
const expectedPartialSync = {
have: 4,
want: 3,
unavailable: 3,
- length: 7
+ length: 7,
}
rs.on('synced', async (state) => {
diff --git a/tests/rpc.js b/tests/rpc.js
index 2615b474..9c8c0a35 100644
--- a/tests/rpc.js
+++ b/tests/rpc.js
@@ -5,20 +5,20 @@ import {
MapeoRPC,
PeerDisconnectedError,
TimeoutError,
- UnknownPeerError
-} from '../lib/rpc/index.js'
+ UnknownPeerError,
+} from '../src/rpc/index.js'
import FakeTimers from '@sinonjs/fake-timers'
import { once } from 'events'
import { Duplex } from 'streamx'
-test('Send invite and accept', async t => {
+test('Send invite and accept', async (t) => {
t.plan(3)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
t.is(peers.length, 1)
const response = await r1.invite(peers[0].id, { projectKey })
t.is(response, MapeoRPC.InviteResponse.ACCEPT)
@@ -28,21 +28,21 @@ test('Send invite and accept', async t => {
t.ok(invite.projectKey.equals(projectKey), 'invite project key correct')
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
})
})
replicate(r1, r2)
})
-test('Send invite and reject', async t => {
+test('Send invite and reject', async (t) => {
t.plan(3)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
t.is(peers.length, 1)
const response = await r1.invite(peers[0].id, { projectKey })
t.is(response, MapeoRPC.InviteResponse.REJECT)
@@ -52,14 +52,14 @@ test('Send invite and reject', async t => {
t.ok(invite.projectKey.equals(projectKey), 'invite project key correct')
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.REJECT
+ decision: MapeoRPC.InviteResponse.REJECT,
})
})
replicate(r1, r2)
})
-test('Invite to unknown peer', async t => {
+test('Invite to unknown peer', async (t) => {
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
@@ -73,20 +73,20 @@ test('Invite to unknown peer', async t => {
() =>
r2.inviteResponse(unknownPeerId, {
projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
}),
UnknownPeerError
)
})
-test('Send invite and already on project', async t => {
+test('Send invite and already on project', async (t) => {
t.plan(3)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
t.is(peers.length, 1)
const response = await r1.invite(peers[0].id, { projectKey })
t.is(response, MapeoRPC.InviteResponse.ALREADY)
@@ -96,14 +96,14 @@ test('Send invite and already on project', async t => {
t.ok(invite.projectKey.equals(projectKey), 'invite project key correct')
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ALREADY
+ decision: MapeoRPC.InviteResponse.ALREADY,
})
})
replicate(r1, r2)
})
-test('Send invite with encryption key', async t => {
+test('Send invite with encryption key', async (t) => {
t.plan(4)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
@@ -111,14 +111,14 @@ test('Send invite with encryption key', async t => {
const projectKey = Buffer.allocUnsafe(32).fill(0)
const encryptionKeys = {
auth: Buffer.allocUnsafe(32).fill(1),
- data: Buffer.allocUnsafe(32).fill(2)
+ data: Buffer.allocUnsafe(32).fill(2),
}
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
t.is(peers.length, 1)
const response = await r1.invite(peers[0].id, {
projectKey,
- encryptionKeys
+ encryptionKeys,
})
t.is(response, MapeoRPC.InviteResponse.ACCEPT)
})
@@ -132,14 +132,14 @@ test('Send invite with encryption key', async t => {
)
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
})
})
replicate(r1, r2)
})
-test('Send invite with project config', async t => {
+test('Send invite with project config', async (t) => {
t.plan(4)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
@@ -147,11 +147,11 @@ test('Send invite with project config', async t => {
const projectKey = Buffer.allocUnsafe(32).fill(0)
const projectConfig = Buffer.allocUnsafe(1024).fill(1)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
t.is(peers.length, 1)
const response = await r1.invite(peers[0].id, {
projectKey,
- projectConfig
+ projectConfig,
})
t.is(response, MapeoRPC.InviteResponse.ACCEPT)
})
@@ -165,20 +165,20 @@ test('Send invite with project config', async t => {
)
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
})
})
replicate(r1, r2)
})
-test('Disconnected peer shows in state', async t => {
+test('Disconnected peer shows in state', async (t) => {
t.plan(6)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
let peerStateUpdates = 0
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
t.is(peers.length, 1, 'one peer in state')
if (peers[0].status === 'connected') {
t.pass('peer appeared as connected')
@@ -193,14 +193,14 @@ test('Disconnected peer shows in state', async t => {
const destroy = replicate(r1, r2)
})
-test('Disconnect results in rejected invite', async t => {
+test('Disconnect results in rejected invite', async (t) => {
t.plan(2)
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
if (peers[0].status === 'connected') {
const invite = r1.invite(peers[0].id, { projectKey })
await t.exception(
@@ -220,7 +220,7 @@ test('Disconnect results in rejected invite', async t => {
const destroy = replicate(r1, r2)
})
-test('Invite to multiple peers', async t => {
+test('Invite to multiple peers', async (t) => {
// This is catches not tracking invites by peer
t.plan(2)
const r1 = new MapeoRPC()
@@ -229,11 +229,11 @@ test('Invite to multiple peers', async t => {
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
if (peers.length < 2) return
t.pass('connected to two peers')
const responses = await Promise.all(
- peers.map(peer => r1.invite(peer.id, { projectKey }))
+ peers.map((peer) => r1.invite(peer.id, { projectKey }))
)
t.alike(
responses.sort(),
@@ -245,14 +245,14 @@ test('Invite to multiple peers', async t => {
r2.on('invite', (peerId, invite) => {
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
})
})
r3.on('invite', (peerId, invite) => {
r3.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.REJECT
+ decision: MapeoRPC.InviteResponse.REJECT,
})
})
@@ -261,7 +261,7 @@ test('Invite to multiple peers', async t => {
replicate(r3, r1)
})
-test('Multiple invites to a peer, only one response', async t => {
+test('Multiple invites to a peer, only one response', async (t) => {
t.plan(2)
let count = 0
const r1 = new MapeoRPC()
@@ -269,11 +269,11 @@ test('Multiple invites to a peer, only one response', async t => {
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.on('peers', async peers => {
+ r1.on('peers', async (peers) => {
const responses = await Promise.all([
r1.invite(peers[0].id, { projectKey }),
r1.invite(peers[0].id, { projectKey }),
- r1.invite(peers[0].id, { projectKey })
+ r1.invite(peers[0].id, { projectKey }),
])
const expected = Array(3).fill(MapeoRPC.InviteResponse.ACCEPT)
t.alike(responses, expected)
@@ -285,14 +285,14 @@ test('Multiple invites to a peer, only one response', async t => {
t.is(count, 3)
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
})
})
replicate(r1, r2)
})
-test('Default: invites do not timeout', async t => {
+test('Default: invites do not timeout', async (t) => {
const clock = FakeTimers.install({ shouldAdvanceTime: true })
t.teardown(() => clock.uninstall())
t.plan(1)
@@ -302,7 +302,7 @@ test('Default: invites do not timeout', async t => {
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.once('peers', async peers => {
+ r1.once('peers', async (peers) => {
r1.invite(peers[0].id, { projectKey }).then(
() => t.fail('invite promise should not resolve'),
() => t.fail('invite promise should not reject')
@@ -314,7 +314,7 @@ test('Default: invites do not timeout', async t => {
replicate(r1, r2)
})
-test('Invite timeout', async t => {
+test('Invite timeout', async (t) => {
const clock = FakeTimers.install({ shouldAdvanceTime: true })
t.teardown(() => clock.uninstall())
t.plan(1)
@@ -324,7 +324,7 @@ test('Invite timeout', async t => {
const projectKey = Buffer.allocUnsafe(32).fill(0)
- r1.once('peers', async peers => {
+ r1.once('peers', async (peers) => {
t.exception(
r1.invite(peers[0].id, { projectKey, timeout: 5000 }),
TimeoutError
@@ -335,7 +335,7 @@ test('Invite timeout', async t => {
replicate(r1, r2)
})
-test('Reconnect peer and send invite', async t => {
+test('Reconnect peer and send invite', async (t) => {
const r1 = new MapeoRPC()
const r2 = new MapeoRPC()
@@ -352,7 +352,7 @@ test('Reconnect peer and send invite', async t => {
t.ok(invite.projectKey.equals(projectKey), 'invite project key correct')
r2.inviteResponse(peerId, {
projectKey: invite.projectKey,
- decision: MapeoRPC.InviteResponse.ACCEPT
+ decision: MapeoRPC.InviteResponse.ACCEPT,
})
})
@@ -364,37 +364,41 @@ test('Reconnect peer and send invite', async t => {
t.is(response, MapeoRPC.InviteResponse.ACCEPT)
})
-test('invalid stream', t => {
+test('invalid stream', (t) => {
const r1 = new MapeoRPC()
const regularStream = new Duplex()
t.exception(() => r1.connect(regularStream), 'Invalid stream')
})
-function replicate (rpc1, rpc2) {
+function replicate(rpc1, rpc2) {
const n1 = new NoiseSecretStream(true, undefined, {
// Keep keypairs deterministic for tests, since we use peer.publicKey as an identifier.
- keyPair: NoiseSecretStream.keyPair(Buffer.allocUnsafe(32).fill(0))
+ keyPair: NoiseSecretStream.keyPair(Buffer.allocUnsafe(32).fill(0)),
})
const n2 = new NoiseSecretStream(false, undefined, {
- keyPair: NoiseSecretStream.keyPair(Buffer.allocUnsafe(32).fill(1))
+ keyPair: NoiseSecretStream.keyPair(Buffer.allocUnsafe(32).fill(1)),
})
n1.rawStream.pipe(n2.rawStream).pipe(n1.rawStream)
rpc1.connect(n1)
rpc2.connect(n2)
- return async function destroy () {
+ return async function destroy() {
return Promise.all([
/** @type {Promise} */
- (new Promise(res => {
- n1.on('close', res)
- n1.destroy()
- })),
+ (
+ new Promise((res) => {
+ n1.on('close', res)
+ n1.destroy()
+ })
+ ),
/** @type {Promise} */
- (new Promise(res => {
- n2.on('close', res)
- n2.destroy()
- }))
+ (
+ new Promise((res) => {
+ n2.on('close', res)
+ n2.destroy()
+ })
+ ),
])
}
}
diff --git a/tests/schema.js b/tests/schema.js
index f88c8334..53ad2599 100644
--- a/tests/schema.js
+++ b/tests/schema.js
@@ -1,15 +1,15 @@
/* eslint-disable no-unused-vars */
import { test } from 'brittle'
import { getTableConfig } from 'drizzle-orm/sqlite-core'
-import * as clientTableSchemas from '../lib/schema/client.js'
-import * as projectTableSchemas from '../lib/schema/project.js'
+import * as clientTableSchemas from '../src/schema/client.js'
+import * as projectTableSchemas from '../src/schema/project.js'
import { dereferencedDocSchemas as jsonSchemas } from '@mapeo/schema'
import { drizzle } from 'drizzle-orm/better-sqlite3'
import Database from 'better-sqlite3'
import {
BACKLINK_TABLE_POSTFIX,
getBacklinkTableName,
-} from '../lib/schema/utils.js'
+} from '../src/schema/utils.js'
test('Expected table config', (t) => {
const allTableSchemas = [
@@ -52,7 +52,7 @@ test('Expected table config', (t) => {
/**
* @template {object} T
- * @typedef {import('../lib/schema/schema-to-drizzle.js').OptionalToNull} OptionalToNull
+ * @typedef {import('../src/schema/schema-to-drizzle.js').OptionalToNull} OptionalToNull
*/
/**
* @typedef {import('@mapeo/schema').MapeoDoc} MapeoDoc
diff --git a/tsconfig.json b/tsconfig.json
index 5da87202..cd77d0c8 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -25,7 +25,7 @@
]
}
},
- "files": ["tests/schema.js", "test-e2e/crud.js"],
- "include": ["**/*"],
+ "files": ["tests/schema.js", "test-e2e/project-crud.js"],
+ "include": ["src/**/*"],
"exclude": ["node_modules", "tmp", "tests", "examples"]
}
diff --git a/typedoc.json b/typedoc.json
index bfdd9009..a1bd45a0 100644
--- a/typedoc.json
+++ b/typedoc.json
@@ -1,5 +1,5 @@
{
- "entryPoints": ["./index.js", "./lib/**/*.js"],
+ "entryPoints": ["./index.js", "./src/**/*.js"],
"allReflectionsHaveOwnDocument": true,
"cleanOutputDir": false,
"readme": "none",