From b2af823471d6b70554080dc31c8a05fce4371bd3 Mon Sep 17 00:00:00 2001 From: RangerMauve Date: Fri, 7 Feb 2020 15:25:22 -0500 Subject: [PATCH] Dat-2 support, nodejs-only? --- index.js | 229 +++++++++++-------- package.json | 16 +- promise.js | 623 --------------------------------------------------- test.js | 126 ++++++----- 4 files changed, 205 insertions(+), 789 deletions(-) delete mode 100644 promise.js diff --git a/index.js b/index.js index 89f332a..d3b4fde 100644 --- a/index.js +++ b/index.js @@ -3,18 +3,18 @@ const path = require('path') // This is a dirty hack for browserify to work. 😅 if (!path.posix) path.posix = path -const discovery = require('hyperdiscovery') -const datStorage = require('universal-dat-storage') +const SwarmNetworker = require('corestore-swarm-networking') +const RAA = require('random-access-application') const DatEncoding = require('dat-encoding') const crypto = require('hypercore-crypto') const RAM = require('random-access-memory') -const fs = require('fs') +const HypercoreProtocol = require('hypercore-protocol') const datDNS = require('dat-dns') -const hyperdrive = require('hyperdrive') -const hypercore = require('hypercore') +const makeHyperdrive = require('hyperdrive') +const Corestore = require('corestore') +const makeHypercore = require('hypercore') -const DEFAULT_STORAGE_OPTS = {} const DEFAULT_SWARM_OPTS = { extensions: [] } @@ -27,25 +27,63 @@ const DEFAULT_CORE_OPTS = { persist: true } const DEFAULT_DNS_OPTS = {} +const DEFAULT_CORESTORE_OPTS = { + sparse: true +} + +const DEFAULT_APPLICATION_NAME = 'dat-sdk' module.exports = SDK -function SDK ({ storageOpts, swarmOpts, driveOpts, coreOpts, dnsOpts } = {}) { - const storage = datStorage(Object.assign({}, DEFAULT_STORAGE_OPTS, storageOpts)) - const swarm = discovery(Object.assign({}, DEFAULT_SWARM_OPTS, swarmOpts)) +// TODO: Set up Promise API based on Beaker https://github.com/beakerbrowser/beaker/blob/blue-hyperdrive10/app/bg/web-apis/fg/hyperdrive.js + +async function SDK ({ + storage, + corestore, + applicationName = DEFAULT_APPLICATION_NAME, + swarmOpts, + driveOpts, + coreOpts, + dnsOpts, + corestoreOpts +} = {} +) { + // Derive storage if it isn't provided + // Don't derive if corestore was provided + if (!storage && !corestore) storage = RAA(applicationName) + + if (!corestore) { + corestore = new Corestore( + storage, + Object.assign({}, DEFAULT_CORESTORE_OPTS, corestoreOpts) + ) + } + + const swarm = new SwarmNetworker(corestore, Object.assign({}, DEFAULT_SWARM_OPTS, swarmOpts)) const dns = datDNS(Object.assign({}, DEFAULT_DNS_OPTS, dnsOpts)) // Track list of hyperdrives const drives = new Map() const cores = new Map() - function addExtensions (extensions) { - if (!extensions || !extensions.length) return - // TODO: This has code smell - const currentExtensions = swarm._opts.extensions || [] - const finalSet = new Set([...currentExtensions, ...extensions]) + await corestore.ready() + + // I think this is used to create a persisted identity? + const noiseSeed = corestore._deriveSecret(applicationName, 'replication-keypair') + swarm.listen({ + keyPair: HypercoreProtocol.keyPair(noiseSeed) + }) - swarm._opts.extensions = [...finalSet] + return { + Hyperdrive, + Hypercore, + resolveName, + deleteStorage, + destroy, + _storage: storage, + _corestore: corestore, + _swarm: swarm, + _dns: dns } function destroy (cb) { @@ -68,137 +106,134 @@ function SDK ({ storageOpts, swarmOpts, driveOpts, coreOpts, dnsOpts } = {}) { storage.delete(key, cb) } - function Hyperdrive (location, opts) { - opts = Object.assign({}, DEFAULT_DRIVE_OPTS, driveOpts, opts) + function Hyperdrive (nameOrKey, opts) { + if (!nameOrKey) throw new Error('Must give a name or key in the constructor') - addExtensions(opts.extensions) + opts = Object.assign({}, DEFAULT_DRIVE_OPTS, driveOpts, opts) let key = null - if (!location) { - const { publicKey, secretKey } = crypto.keyPair() - key = publicKey - location = DatEncoding.encode(publicKey) - opts.secretKey = secretKey - } - try { - key = DatEncoding.decode(location) + key = DatEncoding.decode(nameOrKey) + // Normalize keys to be hex strings of the key instead of dat URLs + nameOrKey = key.toString('hex') } catch (e) { - // Location must be relative path + // Probably isn't a `dat://` URL, so it must be a name } - const stringKey = key.toString('hex') + if (drives.has(nameOrKey)) return drives.get(nameOrKey) - if (drives.has(stringKey)) return drives.get(stringKey) + opts.namespace = nameOrKey const { persist } = opts - let driveStorage = null - try { - if (!persist) { - driveStorage = RAM - } else if (opts.storage) { - driveStorage = opts.storage(location) - } else { - driveStorage = storage.getDrive(location) - } - } catch (e) { - if (e.message !== 'Unable to create storage') throw e - - // If the folder isn't a dat archive. Turn it into one. - const { publicKey, secretKey } = crypto.keyPair() - fs.writeFileSync(path.join(location, '.dat'), publicKey) - key = publicKey - location = DatEncoding.encode(publicKey) - opts.secretKey = secretKey - - if (opts.storage) { - driveStorage = opts.storage(location) - } else { - driveStorage = storage.getDrive(location) - } + let driveStorage = corestore + if (!persist) { + driveStorage = RAM + } else if (opts.storage) { + driveStorage = opts.storage(key) + } else { + driveStorage = corestore } - const drive = hyperdrive(driveStorage, key, opts) + const drive = makeHyperdrive(driveStorage, key, opts) - drives.set(stringKey, drive) + drives.set(nameOrKey, drive) + if (!key) { + drive.ready(() => { + const key = drive.key + const stringKey = key.toString('hex') + drives.set(stringKey, drive) + }) + } drive.ready(() => { - swarm.add(drive) + swarm.seed(drive.discoveryKey) }) drive.once('close', () => { - const discoveryKey = DatEncoding.encode(drive.discoveryKey) - swarm.leave(discoveryKey) - swarm._replicatingFeeds.delete(discoveryKey) + swarm.unseed(drive.discoveryKey) + + const key = drive.key + const stringKey = key.toString('hex') + drives.delete(stringKey) + drives.delete(nameOrKey) }) return drive } - function Hypercore (location, opts) { - opts = Object.assign({}, DEFAULT_CORE_OPTS, coreOpts, opts) + function Hypercore (nameOrKey, opts) { + if (!nameOrKey) throw new Error('Must give a name or key in the constructor') - addExtensions(opts.extensions) + opts = Object.assign({}, DEFAULT_CORE_OPTS, driveOpts, opts) let key = null - if (!location) { - const { publicKey, secretKey } = crypto.keyPair() - key = publicKey - location = DatEncoding.encode(publicKey) - opts.secretKey = secretKey - } - try { - key = DatEncoding.decode(location) + key = DatEncoding.decode(nameOrKey) + // Normalize keys to be hex strings of the key instead of dat URLs + nameOrKey = key.toString('hex') } catch (e) { - // Location must be relative path + // Probably isn't a `dat://` URL, so it must be a name } - const stringKey = location.toString('hex') - - if (cores.has(stringKey)) return cores.get(stringKey) + if (cores.has(nameOrKey)) return cores.get(nameOrKey) const { persist } = opts let coreStorage = null - try { - if (!persist) { - coreStorage = RAM - } else if (opts.storage) { - coreStorage = opts.storage(location) + + if (!persist) { + coreStorage = RAM + } else if (opts.storage) { + coreStorage = opts.storage(key) + } + + let core = null + + // If sotrage was passed in the opts, use it. Else use the corestore + if (coreStorage) { + // We only want to generate keys if we have a custom storage + // Else the corestore does fancy key storage for us + if (!key) { + const { publicKey, secretKey } = crypto.keyPair() + key = publicKey + opts.secretKey = secretKey + } + core = makeHypercore(coreStorage, key, opts) + } else { + if (key) { + // If a dat key was provided, get it from the corestore + core = corestore.get({ key, ...opts }) } else { - coreStorage = storage.getCore(location) + // If no dat key was provided, but a name was given, use it as a namespace + core = corestore.namespace(nameOrKey).default(opts) } - } catch (e) { - if (e.message !== 'Unable to create storage') throw e } - const core = hypercore(coreStorage, key, opts) - - cores.set(stringKey, core) + cores.set(nameOrKey, core) + if (!key) { + core.ready(() => { + const key = core.key + const stringKey = key.toString('hex') + cores.set(stringKey, core) + }) + } core.ready(() => { - swarm.add(core) + swarm.seed(core.discoveryKey) }) core.once('close', () => { - const discoveryKey = DatEncoding.encode(core.discoveryKey) - swarm.leave(discoveryKey) - swarm._replicatingFeeds.delete(discoveryKey) + const key = core.key + const stringKey = key.toString('hex') + + swarm.unseed(core.discoveryKey) cores.delete(stringKey) + cores.delete(nameOrKey) }) return core } - - return { - Hyperdrive, - Hypercore, - resolveName, - deleteStorage, - destroy - } } diff --git a/package.json b/package.json index 81fc495..445fe94 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "The official Dat SDK", "main": "index.js", "scripts": { - "test": "node test && node test-promise", + "test": "node test", "build-test": "browserify -d test.js > test-bundle.js", "build-test-promise": "browserify -d test-promise.js > test-bundle.js" }, @@ -26,25 +26,27 @@ }, "browser": { "dat-dns": "./dns-web.js", + "hyperswarm": "hyperswarm-web", "./localstorage": "./localstorage-web.js" }, "homepage": "https://github.com/datproject/sdk#readme", "dependencies": { "concat-stream": "^2.0.0", + "corestore": "^5.0.8", + "corestore-swarm-networking": "^4.0.3", "dat-dns": "^4.0.0", "dat-encoding": "^5.0.1", "dom-event-target": "^1.0.0", "global": "^4.4.0", "hex-to-32": "^2.0.0", - "hypercore": "^7.5.0", + "hypercore": "^8.7.0", "hypercore-crypto": "^1.0.0", - "hyperdiscovery": "^10.2.0", - "hyperdrive": "github:mafintosh/hyperdrive#v9", - "node-dat-archive": "^2.2.0", + "hypercore-protocol": "^7.10.0", + "hyperdrive": "^10.8.10", + "hyperswarm-web": "^1.0.1", "node-localstorage": "^2.1.5", - "pauls-dat-api": "^8.1.0", + "random-access-application": "^1.0.0", "random-access-memory": "^3.1.1", - "universal-dat-storage": "^1.3.1", "universal-localstorage": "^1.0.2", "universal-prompt": "^1.0.0", "url-parse": "^1.4.7" diff --git a/promise.js b/promise.js deleted file mode 100644 index 08118eb..0000000 --- a/promise.js +++ /dev/null @@ -1,623 +0,0 @@ -const path = require('path') -const pda = require('pauls-dat-api') -const parseURL = require('url-parse') -const concat = require('concat-stream') -const EventTarget = require('dom-event-target') -const { - DAT_MANIFEST_FILENAME, - DAT_VALID_PATH_REGEX -} = require('node-dat-archive/lib/const') -const { - ArchiveNotWritableError, - ProtectedFileNotWritableError, - InvalidPathError, - TimeoutError -} = require('beaker-error-constants') -const hexTo32 = require('hex-to-32') -const getLocalstorage = require('./localstorage') - -const prompt = require('universal-prompt') - -const SDKcb = require('./') - -// Gateways are hella slow so we'll have a crazy long timeout -const API_TIMEOUT = 15 * 1000 - -// How long to wait to get peers / sync with them -const READY_DELAY = 3000 - -const BASE_32_KEY_LENGTH = 52 - -const DAT_KEY_URL_REGEX = /^dat:\/\/[\dabcdef]{64}\/?$/i - -module.exports = function SDK (opts) { - const { Hyperdrive, resolveName, deleteStorage, destroy } = SDKcb(opts) - - const localStorage = getLocalstorage((opts || {}).storageOpts) - - function isLocal (key) { - try { - const current = listLocal() - return current.includes(key) - } catch (e) { - return false - } - } - - function listLocal () { - try { - return JSON.parse(localStorage.getItem('dats')) - } catch (e) { - return [] - } - } - - function addLocal (key) { - try { - const current = listLocal() - saveLocal(current.concat(key)) - } catch (e) { - saveLocal([key]) - } - } - - function saveLocal (list) { - localStorage.setItem('dats', JSON.stringify(list)) - } - - async function reallyReady (archive) { - if (archive.writable) return - - const files = new Promise((resolve, reject) => { - archive.readdir('/', (err, files) => err ? resolve([]) : resolve(files)) - }) - - if (files.length) return - - return new Promise((resolve, reject) => { - function cb (err, result) { - // Ignore errors saying we're up to date - if (err && err.message !== 'No update available from peers') reject(err) - else resolve(result) - } - if (archive.metadata.peers.length) { - archive.metadata.update({ ifAvailable: true }, cb) - } else { - const timeout = setTimeout(cb, READY_DELAY) - archive.metadata.once('peer-add', () => { - clearTimeout(timeout) - archive.metadata.update({ ifAvailable: true }, cb) - }) - } - }) - } - - async function getURLData (url) { - let key = null - let version = null - - if (url) { - if (!url.startsWith('dat://') && !url.startsWith('http://') && !url.startsWith('https://')) { - url = `dat://${url}` - } - const parsed = parseURL(url) - let hostname = null - const isDat = parsed.protocol.indexOf('dat') === 0 - const isUndefined = parsed.protocol.indexOf('undefined') === 0 - if (isDat || isUndefined) { - const hostnameParts = parsed.hostname.split('+') - hostname = hostnameParts[0] - version = hostnameParts[1] || null - } else { - const hostnameParts = parsed.hostname.split('.') - const subdomain = hostnameParts[0] - if (subdomain.length === BASE_32_KEY_LENGTH) { - hostname = hexTo32.decode(subdomain) - } else { - hostname = parsed.hostname - } - } - - key = await DatArchive.resolveName(`dat://${hostname}`) - } - - return { - key, - version - } - } - - class DatArchive extends EventTarget { - constructor (url, options = {}) { - super() - this.url = url - - this._loadPromise = Promise.resolve().then(async () => { - const { key, version } = await getURLData(url) - - let archive = null - - const localOptions = { - persist: false - } - - if (key) { - if (isLocal(key)) { - localOptions.persist = true - } - const finalOptions = Object.assign(localOptions, options) - archive = Hyperdrive(key, finalOptions) - } else { - localOptions.persist = true - const finalOptions = Object.assign(localOptions, options) - - archive = Hyperdrive(null, finalOptions) - const keyHex = archive.metadata.key.toString('hex') - addLocal(`dat://${keyHex}`) - } - - this._archive = archive - - await waitReady(archive) - - await reallyReady(archive) - - this._archive.once('close', () => { - this.send('close', { target: this }) - }) - - this._checkout = version ? archive.checkout(version) : archive - this.url = this.url || `dat://${archive.key.toString('hex')}` - this._loadPromise = null - - var s = toEventTarget(pda.createNetworkActivityStream(this._archive)) - - s.addEventListener('network-changed', detail => - this.send('network-changed', { target: this, ...detail }) - ) - - s.addEventListener('download', detail => - this.send('download', { target: this, ...detail }) - ) - - s.addEventListener('upload', detail => - this.send('upload', { target: this, ...detail }) - ) - - s.addEventListener('sync', detail => - this.send('sync', { target: this, ...detail }) - ) - }) - } - - async getSecretKey () { - await this._loadPromise - return this._archive.secretKey - } - - async getInfo (opts = {}) { - return timer(to(opts), async () => { - await this._loadPromise - - // read manifest - var manifest - try { - manifest = await pda.readManifest(this._checkout) - } catch (e) { - manifest = {} - } - - // return - return { - key: this._archive.key.toString('hex'), - url: this.url, - isOwner: this._archive.writable, - - // state - version: this._checkout.version, - peers: this._archive.metadata.peers.length, - mtime: 0, - size: 0, - - // manifest - title: manifest.title, - description: manifest.description, - type: manifest.type, - author: manifest.author - } - }) - } - - async configure (settings) { - await this._loadPromise - if (!settings || typeof settings !== 'object') throw new Error('Invalid argument') - if ('title' in settings || 'description' in settings || 'type' in settings || 'author' in settings) { - await pda.updateManifest(this._archive, settings) - } - } - - async diff () { - // noop - return [] - } - - async commit () { - // noop - return [] - } - - async revert () { - // noop - return [] - } - - async history (opts = {}) { - return timer(to(opts), async () => { - await this._loadPromise - var reverse = opts.reverse === true - var { start, end } = opts - - // if reversing the output, modify start/end - start = start || 0 - end = end || this._checkout.metadata.length - if (reverse) { - // swap values - const t = start - start = end - end = t - // start from the end - start = this._checkout.metadata.length - start - end = this._checkout.metadata.length - end - } - - return new Promise((resolve, reject) => { - var stream = this._checkout.history({ live: false, start, end }) - stream.pipe(concat({ encoding: 'object' }, values => { - values = values.map(massageHistoryObj) - if (reverse) values.reverse() - resolve(values) - })) - stream.on('error', reject) - }) - }) - } - - async stat (filepath, opts = {}) { - filepath = massageFilepath(filepath) - return timer(to(opts), async () => { - await this._loadPromise - return pda.stat(this._checkout, filepath) - }) - } - - async readFile (filepath, opts = {}) { - filepath = massageFilepath(filepath) - return timer(to(opts), async () => { - await this._loadPromise - return pda.readFile(this._checkout, filepath, opts) - }) - } - - watch (pathPattern, onInvalidated) { - if (typeof pathPattern === 'function') { - onInvalidated = pathPattern - pathPattern = null - } - - if (this._loadPromise) { - var proxy = new EventTarget() - this._loadPromise.then(() => { - var evts = this.watch(pathPattern, onInvalidated) - evts.addEventListener('invalidated', (e) => { - proxy.send('invalidated', e) - }) - evts.addEventListener('changed', (e) => { - proxy.send('changed', e) - }) - }) - return proxy - } - - var evts = toEventTarget(pda.watch(this._archive, pathPattern)) - if (onInvalidated) evts.addEventListener('invalidated', onInvalidated) - return evts - } - - async writeFile (filepath, data, opts = {}) { - filepath = massageFilepath(filepath) - return timer(to(opts), async () => { - await this._loadPromise - if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') - await assertWritePermission(this._archive) - await assertValidFilePath(filepath) - await assertUnprotectedFilePath(filepath) - return pda.writeFile(this._archive, filepath, data, opts) - }) - } - - async unlink (filepath) { - filepath = massageFilepath(filepath) - return timer(to(), async () => { - await this._loadPromise - if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') - await assertWritePermission(this._archive) - await assertUnprotectedFilePath(filepath) - return pda.unlink(this._archive, filepath) - }) - } - - async download (filepath, opts = {}) { - filepath = massageFilepath(filepath) - return timer(to(opts), async (checkin) => { - await this._loadPromise - if (this._version) throw new Error('Not yet supported: can\'t download() old versions yet. Sorry!') // TODO - if (this._archive.writable) { - return // no need to download - } - return pda.download(this._archive, filepath) - }) - } - - async readdir (filepath, opts = {}) { - filepath = massageFilepath(filepath) - return timer(to(opts), async () => { - await this._loadPromise - var names = await pda.readdir(this._checkout, filepath, opts) - if (opts.stat) { - for (let i = 0; i < names.length; i++) { - names[i] = { - name: names[i], - stat: await pda.stat(this._checkout, path.join(filepath, names[i])) - } - } - } - return names - }) - } - - async mkdir (filepath) { - filepath = massageFilepath(filepath) - return timer(to(), async () => { - await this._loadPromise - if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') - await assertWritePermission(this._archive) - await assertValidPath(filepath) - await assertUnprotectedFilePath(filepath) - return pda.mkdir(this._archive, filepath) - }) - } - - async rmdir (filepath, opts = {}) { - return timer(to(opts), async () => { - filepath = massageFilepath(filepath) - await this._loadPromise - if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') - await assertUnprotectedFilePath(filepath) - return pda.rmdir(this._archive, filepath, opts) - }) - } - - async copy (path, dstPath, opts) { - path = massageFilepath(path) - dstPath = massageFilepath(dstPath) - return timer(to(opts), async () => { - await this._loadPromise - await pda.copy(this._archive, path, dstPath) - }) - } - - async rename (filepath, dstpath, opts) { - filepath = massageFilepath(filepath) - dstpath = massageFilepath(dstpath) - return timer(to(opts), async () => { - await this._loadPromise - await pda.rename(this._archive, filepath, dstpath) - }) - } - - createFileActivityStream (pathPattern) { - return toEventTarget(pda.watch(this._archive, pathPattern)) - } - - createNetworkActivityStream () { - return toEventTarget(pda.createNetworkActivityStream(this._archive)) - } - - async close () { - await this._loadPromise - return new Promise((resolve) => { - this._archive.close(resolve) - }) - } - - static async resolveName (name) { - // If it's already a valid dat URL, don't bother resolving it - // Avoids the case where you can't load an archive while offline - if (name.match(DAT_KEY_URL_REGEX)) { - return name - } - return new Promise((resolve, reject) => { - resolveName(name, (err, resolved) => { - if (err) reject(err) - else resolve(resolved) - }) - }) - } - - static async fork (url, opts) { - const srcDat = new DatArchive(url) - - const destDat = await DatArchive.create(opts) - - await srcDat._loadPromise - - await pda.exportArchiveToArchive({ - srcArchive: srcDat._archive, - dstArchive: destDat._archive - }) - - return destDat - } - - static async selectArchive (options) { - const urls = listLocal() - const archives = urls.map((url) => new DatArchive(url)) - - const info = await Promise.all(archives.map((archive) => archive.getInfo())) - - const message = ` - Please choose a Dat Archive: - ${info.map(({ url, title }, index) => `${index}. ${title || 'Untitled'}: ${url}`).join('\n')} - ` - - const selection = prompt(message, 0) - - const archive = archives[selection] - - if (!archive) throw new Error('Archive Not Found', selection) - - return archive - } - - static async create (options = {}) { - const { title, description, type, author } = options - const archive = new DatArchive(null, options) - - await archive._loadPromise - - await pda.writeManifest(archive._archive, { url: archive.url, title, description, type, author }) - - return archive - } - - static async load (url, options) { - const archive = new DatArchive(url, options) - - await archive._loadPromise - - return archive - } - - static async unlink (url) { - const key = await DatArchive.resolveName(url) - - return new Promise((resolve, reject) => { - deleteStorage(key, (err) => { - if (err) reject(err) - else resolve() - }) - }) - } - } - - function destroyPromise (cb) { - return new Promise((resolve, reject) => { - destroy((err) => { - if (cb) cb(err) - if (err) reject(err) - else resolve() - }) - }) - } - - return { - DatArchive, - destroy: destroyPromise - } -} - -// helper to check if filepath refers to a file that userland is not allowed to edit directly -function assertUnprotectedFilePath (filepath) { - if (filepath === '/' + DAT_MANIFEST_FILENAME) { - throw new ProtectedFileNotWritableError() - } -} - -async function assertWritePermission (archive) { - // ensure we have the archive's private key - if (!archive.writable) { - throw new ArchiveNotWritableError() - } - return true -} - -async function assertValidFilePath (filepath) { - if (filepath.slice(-1) === '/') { - throw new InvalidPathError('Files can not have a trailing slash') - } - await assertValidPath(filepath) -} - -async function assertValidPath (fileOrFolderPath) { - if (!DAT_VALID_PATH_REGEX.test(fileOrFolderPath)) { - throw new InvalidPathError('Path contains invalid characters') - } -} - -function massageHistoryObj ({ name, version, type }) { - return { path: name, version, type } -} - -function massageFilepath (filepath) { - filepath = filepath || '' - filepath = decodeURIComponent(filepath) - if (!filepath.startsWith('/')) { - filepath = '/' + filepath - } - return filepath -} - -function waitReady (archive) { - return new Promise((resolve, reject) => { - archive.ready((err) => { - if (err) reject(err) - else resolve(archive) - }) - }) -} - -const to = (opts) => - (opts && typeof opts.timeout !== 'undefined') - ? opts.timeout - : API_TIMEOUT - -function timer (ms, fn) { - var currentAction - var isTimedOut = false - - // no timeout? - if (!ms) return fn(() => false) - - return new Promise((resolve, reject) => { - // start the timer - const timer = setTimeout(() => { - isTimedOut = true - reject(new TimeoutError(currentAction ? `Timed out while ${currentAction}` : undefined)) - }, ms) - - // call the fn to get the promise - var promise = fn(action => { - if (action) currentAction = action - return isTimedOut - }) - - // wrap the promise - promise.then( - val => { - clearTimeout(timer) - resolve(val) - }, - err => { - clearTimeout(timer) - reject(err) - } - ) - }) -} - -function toEventTarget (es) { - var target = new EventTarget() - es.on('data', ([event, args]) => target.send(event, args)) - target.close = es.destroy.bind(es) - return target -} diff --git a/test.js b/test.js index 0e935d2..61b7557 100644 --- a/test.js +++ b/test.js @@ -6,106 +6,108 @@ const storageLocation = isBrowser ? '/' : require('tmp').dirSync({ prefix: 'universal-dat-storage-' }).name -const { Hyperdrive, Hypercore, resolveName, destroy } = SDK({ - storageOpts: { - storageLocation - } -}) +run() -const DATPROJECT_KEY = 'dat://60c525b5589a5099aa3610a8ee550dcd454c3e118f7ac93b7d41b6b850272330' -const TEST_TIMEOUT = 10 * 1000 - -test.onFinish(destroy) - -test('Hyperdrive - load drive', (t) => { - t.timeoutAfter(TEST_TIMEOUT) +async function run () { + const { Hyperdrive, Hypercore, resolveName, destroy } = await SDK({ + storageOpts: { + storageLocation + } + }) - const drive = Hyperdrive(DATPROJECT_KEY) + const EXAMPLE_KEY = 'dat://f1b83ec1836550a480bdd92ec3b34bf0bf7b00c2810e2c50c463305955ac751a' + const TEST_TIMEOUT = 10 * 1000 - drive.readFile('/dat.json', 'utf8', (err, data) => { - t.notOk(err, 'loaded file without error') + const EXAMPLE_DNS_URL = `dat://dat.foundation` + const EXAMPLE_DNS_RESOLUTION = `60c525b5589a5099aa3610a8ee550dcd454c3e118f7ac93b7d41b6b850272330` - t.end() - }) -}) + test.onFinish(destroy) -test('Hyperdrive - create drive', (t) => { - t.timeoutAfter(TEST_TIMEOUT) + test('Hyperdrive - load drive', (t) => { + t.timeoutAfter(TEST_TIMEOUT) - const drive = Hyperdrive() + const drive = Hyperdrive(EXAMPLE_KEY) - drive.writeFile('/example.txt', 'Hello World!', (err) => { - t.notOk(err, 'Able to write to hyperdrive') + drive.readFile('/index.html', 'utf8', (err, data) => { + t.notOk(err, 'loaded file without error') - t.end() + t.end() + }) }) -}) -test('Hyperdrive - get existing drive', (t) => { - const drive = Hyperdrive() + test('Hyperdrive - create drive', (t) => { + t.timeoutAfter(TEST_TIMEOUT) - drive.ready(() => { - const existing = Hyperdrive(drive.key) + const drive = Hyperdrive('Example drive 1') - t.equal(existing, drive, 'Got existing drive by reference') + drive.writeFile('/example.txt', 'Hello World!', (err) => { + t.notOk(err, 'Able to write to hyperdrive') - t.end() + t.end() + }) }) -}) -test('Hyperdrive - new drive created after close', (t) => { - const drive = Hyperdrive() + test('Hyperdrive - get existing drive', (t) => { + const drive = Hyperdrive('Example drive 2') - drive.ready(() => { - drive.once('close', () => { + drive.ready(() => { const existing = Hyperdrive(drive.key) - t.notEqual(existing, drive, 'Got new drive by reference') + t.equal(existing, drive, 'Got existing drive by reference') t.end() }) - drive.close() }) -}) -test('resolveName - resolve and load archive', (t) => { - t.timeoutAfter(TEST_TIMEOUT) + test('Hyperdrive - new drive created after close', (t) => { + const drive = Hyperdrive('Example drive 3') - resolveName('dat://dat.foundation', (err, resolved) => { - t.notOk(err, 'Resolved successfully') + drive.ready(() => { + drive.once('close', () => { + const existing = Hyperdrive(drive.key) - const drive = Hyperdrive(resolved) + t.notEqual(existing, drive, 'Got new drive by reference') - drive.readFile('/dat.json', 'utf8', (err2) => { - t.notOk(err2, 'loaded file without error') + t.end() + }) + drive.close() + }) + }) + + test('resolveName - resolve and load archive', (t) => { + t.timeoutAfter(TEST_TIMEOUT) + + resolveName(EXAMPLE_DNS_URL, (err, resolved) => { + t.notOk(err, 'Resolved successfully') + t.equal(resolved, EXAMPLE_DNS_RESOLUTION) t.end() }) }) -}) -test('Hypercore - create', (t) => { - t.timeoutAfter(TEST_TIMEOUT) + test('Hypercore - create', (t) => { + t.timeoutAfter(TEST_TIMEOUT) - const core = Hypercore() + const core = Hypercore('Example hypercore 1') - core.append('Hello World', (err) => { - t.notOk(err, 'able to write to hypercore') + core.append('Hello World', (err) => { + t.notOk(err, 'able to write to hypercore') - t.end() + t.end() + }) }) -}) -test('Hypercore - load', (t) => { - t.timeoutAfter(TEST_TIMEOUT) + test('Hypercore - load', (t) => { + t.timeoutAfter(TEST_TIMEOUT) - const key = '60c525b5589a5099aa3610a8ee550dcd454c3e118f7ac93b7d41b6b850272330' + const key = '60c525b5589a5099aa3610a8ee550dcd454c3e118f7ac93b7d41b6b850272330' - const core = Hypercore(key) + const core = Hypercore(key) - core.ready(() => { - t.equal(core.key.toString('hex'), key, 'loaded key') + core.ready(() => { + t.equal(core.key.toString('hex'), key, 'loaded key') - t.end() + t.end() + }) }) -}) +}