Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

Commit

Permalink
fix: updates ipld-dag-pb dep to version without .cid properties
Browse files Browse the repository at this point in the history
Follows on from ipld/js-ipld-dag-pb#99 and updates this module to not
rely on DAGNodes having knowledge of their CIDs.

Bonus: also fixes #24 by removing use of js-ipfs from this module
breaking another circular dependency from the project.

License: MIT
Signed-off-by: achingbrain <[email protected]>
  • Loading branch information
achingbrain committed Nov 8, 2018
1 parent 42ae46e commit 4339a7f
Show file tree
Hide file tree
Showing 32 changed files with 295 additions and 316 deletions.
11 changes: 4 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,20 +42,20 @@
"detect-node": "^2.0.4",
"detect-webworker": "^1.0.0",
"dirty-chai": "^2.0.1",
"ipfs": "~0.33.0",
"ipld": "ipld/js-ipld#depend-on-dag-pb-without-cid",
"pull-buffer-stream": "^1.0.0",
"tmp": "~0.0.33"
"pull-traverse": "^1.0.3"
},
"dependencies": {
"async": "^2.6.1",
"blob": "~0.0.5",
"cids": "~0.5.5",
"debug": "^4.1.0",
"file-api": "~0.10.4",
"filereader-stream": "^2.0.0",
"interface-datastore": "~0.6.0",
"ipfs-multipart": "~0.1.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-engine": "~0.33.0",
"ipld-dag-pb": "ipld/js-ipld-dag-pb#remove-cid-property-from-dagnodes",
"is-pull-stream": "~0.0.0",
"is-stream": "^1.1.0",
"joi": "^14.0.4",
Expand All @@ -65,11 +65,8 @@
"promisify-es6": "^1.0.3",
"pull-cat": "^1.1.11",
"pull-defer": "~0.2.3",
"pull-paramap": "^1.2.2",
"pull-pushable": "^2.2.0",
"pull-stream": "^3.6.9",
"pull-stream-to-stream": "^1.3.4",
"pull-traverse": "^1.0.3",
"stream-to-pull-stream": "^1.7.2"
},
"contributors": [
Expand Down
64 changes: 31 additions & 33 deletions src/core/cp.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const defaultOptions = {
hashAlg: 'sha2-256'
}

module.exports = (ipfs) => {
module.exports = (context) => {
return function mfsCp () {
const args = Array.from(arguments)
const {
Expand All @@ -41,74 +41,73 @@ module.exports = (ipfs) => {

options.parents = options.p || options.parents

traverseTo(ipfs, destination.path, {}, (error, result) => {
traverseTo(context, destination.path, {}, (error, result) => {
if (error) {
if (sources.length === 1) {
log('Only one source, copying to a file')
return copyToFile(ipfs, sources.pop(), destination, options, callback)
return copyToFile(context, sources.pop(), destination, options, callback)
} else {
log('Multiple sources, copying to a directory')
return copyToDirectory(ipfs, sources, destination, options, callback)
return copyToDirectory(context, sources, destination, options, callback)
}
}

const meta = UnixFs.unmarshal(result.node.data)

if (meta.type === 'directory') {
return copyToDirectory(ipfs, sources, destination, options, callback)
return copyToDirectory(context, sources, destination, options, callback)
}

callback(new Error('directory already has entry by that name'))
})
}
}

const copyToFile = (ipfs, source, destination, options, callback) => {
const copyToFile = (context, source, destination, options, callback) => {
waterfall([
(cb) => {
parallel([
(next) => stat(ipfs)(source.path, options, next),
(next) => stat(ipfs)(destination.path, options, (error) => {
(next) => stat(context)(source.path, options, next),
(next) => stat(context)(destination.path, options, (error) => {
if (!error) {
return next(new Error('directory already has entry by that name'))
}

next()
}),
(next) => traverseTo(ipfs, destination.dir, options, next)
(next) => traverseTo(context, destination.dir, options, next)
], cb)
},
([sourceStats, _, dest], cb) => {
waterfall([
(next) => addLink(ipfs, {
(next) => addLink(context, {
parent: dest.node,
child: {
size: sourceStats.cumulativeSize,
hash: sourceStats.hash
},
size: sourceStats.cumulativeSize,
cid: sourceStats.hash,
name: destination.name
}, next),
(newParent, next) => {
dest.node = newParent
updateTree(ipfs, dest, next)
({ node, cid }, next) => {
dest.node = node
dest.cid = cid
updateTree(context, dest, next)
},
(newRoot, cb) => updateMfsRoot(ipfs, newRoot.node.multihash, cb)
({ node, cid }, cb) => updateMfsRoot(context, cid, cb)
], cb)
}
], (error) => callback(error))
}

const copyToDirectory = (ipfs, sources, destination, options, callback) => {
const copyToDirectory = (context, sources, destination, options, callback) => {
waterfall([
(cb) => {
series([
// stat in parallel
(done) => parallel(
sources.map(source => (next) => stat(ipfs)(source.path, options, next)),
sources.map(source => (next) => stat(context)(source.path, options, next)),
done
),
// this could end up changing the root mfs node so do it after parallel
(done) => traverseTo(ipfs, destination.path, Object.assign({}, options, {
(done) => traverseTo(context, destination.path, Object.assign({}, options, {
createLastComponent: true
}), done)
], cb)
Expand All @@ -123,7 +122,7 @@ const copyToDirectory = (ipfs, sources, destination, options, callback) => {
parallel(
sources.map(source => {
return (cb) => {
stat(ipfs)(`${destination.path}/${source.name}`, options, (error) => {
stat(context)(`${destination.path}/${source.name}`, options, (error) => {
if (!error) {
return cb(new Error('directory already has entry by that name'))
}
Expand All @@ -138,30 +137,29 @@ const copyToDirectory = (ipfs, sources, destination, options, callback) => {
// add links to target directory
(next) => {
waterfall([
(done) => done(null, dest.node)
(done) => done(null, dest)
].concat(
sourceStats.map((sourceStat, index) => {
return (dest, done) => {
return addLink(ipfs, {
parent: dest,
child: {
size: sourceStat.cumulativeSize,
hash: sourceStat.hash
},
return addLink(context, {
parent: dest.node,
size: sourceStat.cumulativeSize,
cid: sourceStat.hash,
name: sources[index].name
}, done)
}
})
), next)
},
// update mfs tree
(newParent, next) => {
dest.node = newParent
({ node, cid }, next) => {
dest.node = node
dest.cid = cid

updateTree(ipfs, dest, next)
updateTree(context, dest, next)
},
// save new root CID
(newRoot, cb) => updateMfsRoot(ipfs, newRoot.node.multihash, cb)
(newRoot, cb) => updateMfsRoot(context, newRoot.cid, cb)
], cb)
}
], (error) => callback(error))
Expand Down
4 changes: 2 additions & 2 deletions src/core/flush.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ const {

const defaultOptions = {}

module.exports = (ipfs) => {
module.exports = (context) => {
return function mfsFlush (path, options, callback) {
if (typeof options === 'function') {
callback = options
Expand All @@ -28,7 +28,7 @@ module.exports = (ipfs) => {
options = Object.assign({}, defaultOptions, options)

waterfall([
(cb) => traverseTo(ipfs, path, {}, cb),
(cb) => traverseTo(context, path, {}, cb),
(root, cb) => {
cb()
}
Expand Down
22 changes: 14 additions & 8 deletions src/core/index.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
'use strict'

const assert = require('assert')
const promisify = require('promisify-es6')
const {
createLock
Expand Down Expand Up @@ -33,22 +34,27 @@ const unwrappedSynchronousOperations = {
}

const wrap = ({
ipfs, mfs, operations, lock
options, mfs, operations, lock
}) => {
Object.keys(operations).forEach(key => {
mfs[key] = promisify(lock(operations[key](ipfs)))
mfs[key] = promisify(lock(operations[key](options)))
})
}

const defaultOptions = {
repoOwner: true
repoOwner: true,
ipld: null,
datastore: null
}

module.exports = (ipfs, options) => {
module.exports = (options) => {
const {
repoOwner
} = Object.assign({}, defaultOptions || {}, options)

assert(options.ipld, 'MFS requires an IPLD instance')
assert(options.datastore, 'MFS requires an interface-datastore instance')

const lock = createLock(repoOwner)

const readLock = (operation) => {
Expand All @@ -62,18 +68,18 @@ module.exports = (ipfs, options) => {
const mfs = {}

wrap({
ipfs, mfs, operations: readOperations, lock: readLock
options, mfs, operations: readOperations, lock: readLock
})
wrap({
ipfs, mfs, operations: writeOperations, lock: writeLock
options, mfs, operations: writeOperations, lock: writeLock
})

Object.keys(unwrappedOperations).forEach(key => {
mfs[key] = promisify(unwrappedOperations[key](ipfs))
mfs[key] = promisify(unwrappedOperations[key](options))
})

Object.keys(unwrappedSynchronousOperations).forEach(key => {
mfs[key] = unwrappedSynchronousOperations[key](ipfs)
mfs[key] = unwrappedSynchronousOperations[key](options)
})

return mfs
Expand Down
12 changes: 6 additions & 6 deletions src/core/ls.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const defaultOptions = {
unsorted: false
}

module.exports = (ipfs) => {
module.exports = (context) => {
return function mfsLs (path, options, callback) {
if (typeof path === 'function') {
callback = path
Expand All @@ -35,21 +35,21 @@ module.exports = (ipfs) => {
options.long = options.l || options.long

waterfall([
(cb) => traverseTo(ipfs, path, {}, cb),
(cb) => traverseTo(context, path, {}, cb),
(result, cb) => {
const meta = UnixFs.unmarshal(result.node.data)

if (meta.type === 'directory') {
map(result.node.links, (link, next) => {
waterfall([
(done) => loadNode(ipfs, link, done),
(node, done) => {
(done) => loadNode(context, link, done),
({ node, cid }, done) => {
const meta = UnixFs.unmarshal(node.data)

done(null, {
name: link.name,
type: meta.type,
hash: formatCid(node.multihash, options.cidBase),
hash: formatCid(cid, options.cidBase),
size: meta.fileSize() || 0
})
}
Expand All @@ -59,7 +59,7 @@ module.exports = (ipfs) => {
cb(null, [{
name: result.name,
type: meta.type,
hash: formatCid(result.node.multihash, options.cidBase),
hash: formatCid(result.cid, options.cidBase),
size: meta.fileSize() || 0
}])
}
Expand Down
10 changes: 5 additions & 5 deletions src/core/mkdir.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const defaultOptions = {
cidVersion: 0
}

module.exports = (ipfs) => {
module.exports = (context) => {
return function mfsMkdir (path, options, callback) {
if (typeof options === 'function') {
callback = options
Expand All @@ -41,7 +41,7 @@ module.exports = (ipfs) => {

waterfall([
(cb) => {
traverseTo(ipfs, path, {
traverseTo(context, path, {
parents: false,
createLastComponent: false
}, (error) => {
Expand All @@ -58,13 +58,13 @@ module.exports = (ipfs) => {
return cb(error)
})
},
(cb) => traverseTo(ipfs, path, {
(cb) => traverseTo(context, path, {
parents: options.parents,
flush: options.flush,
createLastComponent: true
}, cb),
(result, cb) => updateTree(ipfs, result, cb),
(newRoot, next) => updateMfsRoot(ipfs, newRoot.node.multihash, next)
(result, cb) => updateTree(context, result, cb),
(newRoot, next) => updateMfsRoot(context, newRoot.cid, next)
], (error) => {
if (error && error.message.includes('file already exists') && options.parents) {
// when the directory already exists and we are creating intermediate
Expand Down
6 changes: 3 additions & 3 deletions src/core/mv.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const defaultOptions = {
hashAlg: 'sha2-256'
}

module.exports = (ipfs) => {
module.exports = (context) => {
return function mfsMv () {
let args = Array.from(arguments)

Expand All @@ -42,8 +42,8 @@ module.exports = (ipfs) => {
}))

series([
(cb) => cp(ipfs).apply(null, cpArgs.concat(cb)),
(cb) => rm(ipfs).apply(null, rmArgs.concat(cb))
(cb) => cp(context).apply(null, cpArgs.concat(cb)),
(cb) => rm(context).apply(null, rmArgs.concat(cb))
], callback)
}
}
Loading

0 comments on commit 4339a7f

Please sign in to comment.