Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
fix: updates ipld-dag-pb dep to version without .cid properties
Browse files Browse the repository at this point in the history
Follows on from ipld/js-ipld-dag-pb#99 and
updates this module to not rely on DAGNodes having knowledge of
their CIDs.

Bonus: also removes use of js-ipfs from this module breaking another
circular dependency from the project.
  • Loading branch information
achingbrain committed Nov 11, 2018
1 parent c5c0720 commit 9cf2223
Show file tree
Hide file tree
Showing 16 changed files with 201 additions and 220 deletions.
5 changes: 2 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,9 @@
"aegir": "^17.0.0",
"chai": "^4.2.0",
"dirty-chai": "^2.0.1",
"ipfs": "~0.32.3",
"ipfs-block-service": "~0.15.1",
"ipfs-repo": "~0.25.0",
"ipld": "~0.19.1",
"ipld": "^0.20.0",
"mkdirp": "~0.5.1",
"multihashes": "~0.4.14",
"ncp": "^2.0.0",
Expand All @@ -58,7 +57,7 @@
"cids": "~0.5.5",
"deep-extend": "~0.6.0",
"ipfs-unixfs": "~0.1.16",
"ipld-dag-pb": "~0.14.11",
"ipld-dag-pb": "^0.15.0",
"left-pad": "^1.3.0",
"multihashing-async": "~0.5.1",
"pull-batch": "^1.0.0",
Expand Down
93 changes: 28 additions & 65 deletions src/builder/builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,11 @@ const pull = require('pull-stream')
const through = require('pull-through')
const parallel = require('async/parallel')
const waterfall = require('async/waterfall')
const dagPB = require('ipld-dag-pb')
const CID = require('cids')
const multihash = require('multihashing-async')

const persist = require('../utils/persist')
const reduce = require('./reduce')

const DAGNode = dagPB.DAGNode
const {
DAGNode
} = require('ipld-dag-pb')

const defaultOptions = {
chunkerOptions: {
Expand All @@ -27,12 +25,6 @@ const defaultOptions = {

module.exports = function builder (createChunker, ipld, createReducer, _options) {
const options = extend({}, defaultOptions, _options)
options.cidVersion = options.cidVersion || options.cidVersion
options.hashAlg = options.hashAlg || defaultOptions.hashAlg

if (options.hashAlg !== 'sha2-256') {
options.cidVersion = 1
}

return function (source) {
return function (items, cb) {
Expand Down Expand Up @@ -71,33 +63,17 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
const d = new UnixFS('directory')

waterfall([
(cb) => DAGNode.create(d.marshal(), [], options.hashAlg, cb),
(node, cb) => {
if (options.onlyHash) {
return cb(null, node)
}

const cid = new CID(options.cidVersion, 'dag-pb', node.multihash)

node = new DAGNode(
node.data,
node.links,
node.serialized,
cid
)

ipld.put(node, {
cid
}, (err) => cb(err, node))
}
], (err, node) => {
(cb) => DAGNode.create(d.marshal(), [], cb),
(node, cb) => persist(node, ipld, options, cb)
], (err, result) => {
if (err) {
return callback(err)
}

callback(null, {
path: item.path,
multihash: node.multihash,
size: node.size
multihash: result.cid.buffer,
size: result.node.size
})
})
}
Expand Down Expand Up @@ -134,55 +110,42 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
}),
pull.asyncMap((buffer, callback) => {
if (options.rawLeaves) {
return multihash(buffer, options.hashAlg, (error, hash) => {
if (error) {
return callback(error)
}

return callback(null, {
multihash: hash,
size: buffer.length,
leafSize: buffer.length,
cid: new CID(1, 'raw', hash),
data: buffer
})
return callback(null, {
size: buffer.length,
leafSize: buffer.length,
data: buffer
})
}

const file = new UnixFS(options.leafType, buffer)

DAGNode.create(file.marshal(), [], options.hashAlg, (err, node) => {
DAGNode.create(file.marshal(), [], (err, node) => {
if (err) {
return callback(err)
}

callback(null, {
multihash: node.multihash,
size: node.size,
leafSize: file.fileSize(),
cid: new CID(options.cidVersion, 'dag-pb', node.multihash),
data: node
})
})
}),
pull.asyncMap((leaf, callback) => {
if (options.onlyHash) {
return callback(null, leaf)
}
persist(leaf.data, ipld, options, (error, results) => {
if (error) {
return callback(error)
}

ipld.put(leaf.data, {
cid: leaf.cid
}, (error) => callback(error, leaf))
}),
pull.map((leaf) => {
return {
path: file.path,
multihash: leaf.cid.buffer,
size: leaf.size,
leafSize: leaf.leafSize,
name: '',
cid: leaf.cid
}
callback(null, {
size: leaf.size,
leafSize: leaf.leafSize,
data: results.node,
multihash: results.cid.buffer,
path: leaf.path,
name: ''
})
})
}),
through( // mark as single node if only one single node
function onData (data) {
Expand Down
41 changes: 10 additions & 31 deletions src/builder/reduce.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
const waterfall = require('async/waterfall')
const dagPB = require('ipld-dag-pb')
const UnixFS = require('ipfs-unixfs')
const CID = require('cids')
const persist = require('../utils/persist')

const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode
Expand All @@ -14,10 +14,10 @@ module.exports = function reduce (file, ipld, options) {
const leaf = leaves[0]

return callback(null, {
path: file.path,
multihash: leaf.multihash,
size: leaf.size,
leafSize: leaf.leafSize,
multihash: leaf.multihash,
path: file.path,
name: leaf.name
})
}
Expand All @@ -28,44 +28,23 @@ module.exports = function reduce (file, ipld, options) {
const links = leaves.map((leaf) => {
f.addBlockSize(leaf.leafSize)

let cid = leaf.cid

if (!cid) {
// we are an intermediate node
cid = new CID(options.cidVersion, 'dag-pb', leaf.multihash)
}

return new DAGLink(leaf.name, leaf.size, cid.buffer)
return new DAGLink(leaf.name, leaf.size, leaf.multihash)
})

waterfall([
(cb) => DAGNode.create(f.marshal(), links, options.hashAlg, cb),
(node, cb) => {
const cid = new CID(options.cidVersion, 'dag-pb', node.multihash)

if (options.onlyHash) {
return cb(null, {
node, cid
})
}

ipld.put(node, {
cid
}, (error) => cb(error, {
node, cid
}))
}
(cb) => DAGNode.create(f.marshal(), links, cb),
(node, cb) => persist(node, ipld, options, cb)
], (error, result) => {
if (error) {
return callback(error)
}

callback(null, {
name: '',
path: file.path,
multihash: result.cid.buffer,
size: result.node.size,
leafSize: f.fileSize()
leafSize: f.fileSize(),
multihash: result.cid.buffer,
path: file.path,
name: ''
})
})
}
Expand Down
4 changes: 2 additions & 2 deletions src/exporter/dir-flat.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
name: name,
depth: depth,
path: path,
hash: cid,
multihash: cid.buffer,
size: node.size,
type: 'dir'
}
Expand All @@ -26,7 +26,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
size: link.size,
name: link.name,
path: path + '/' + link.name,
multihash: link.multihash,
multihash: link.cid.buffer,
linkName: link.name,
pathRest: pathRest.slice(1),
type: 'dir'
Expand Down
4 changes: 2 additions & 2 deletions src/exporter/dir-hamt-sharded.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
name: name,
depth: depth,
path: path,
hash: cid,
multihash: cid.buffer,
size: node.size,
type: 'dir'
}
Expand All @@ -36,7 +36,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
depth: depth + 1,
name: p,
path: pp,
multihash: link.multihash,
multihash: link.cid.buffer,
pathRest: p ? pathRest.slice(1) : pathRest,
parent: dir || parent
}
Expand Down
7 changes: 3 additions & 4 deletions src/exporter/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

const traverse = require('pull-traverse')
const UnixFS = require('ipfs-unixfs')
const CID = require('cids')
const pull = require('pull-stream')
const paramap = require('pull-paramap')
const extractDataFromBlock = require('./extract-data-from-block')
Expand Down Expand Up @@ -43,7 +42,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
content: pull.once(Buffer.alloc(0)),
name: name,
path: path,
hash: cid,
multihash: cid.buffer,
size: fileSize,
type: 'file'
})
Expand All @@ -64,7 +63,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
content: content,
name: name,
path: path,
hash: cid,
multihash: cid.buffer,
size: fileSize,
type: 'file'
}])
Expand Down Expand Up @@ -142,7 +141,7 @@ function getChildren (dag, offset, end) {
return pull(
pull.values(filteredLinks),
paramap((child, cb) => {
dag.get(new CID(child.link.multihash), (error, result) => cb(error, {
dag.get(child.link.cid, (error, result) => cb(error, {
start: child.start,
end: child.end,
node: result && result.value,
Expand Down
6 changes: 4 additions & 2 deletions src/exporter/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,11 @@ module.exports = (path, dag, options) => {
const pathLengthToCut = join(
[dPath.base].concat(dPath.rest.slice(0, dPath.rest.length - 1))).length

const cid = new CID(dPath.base)

return pull(
pull.values([{
multihash: new CID(dPath.base),
multihash: cid.buffer,
name: dPath.base,
path: dPath.base,
pathRest: dPath.rest,
Expand All @@ -70,7 +72,7 @@ module.exports = (path, dag, options) => {
name: node.name,
path: finalPathFor(node),
size: node.size,
hash: node.hash || node.multihash,
hash: node.multihash,
content: node.content,
type: node.type
}
Expand Down
23 changes: 6 additions & 17 deletions src/importer/dir-flat.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@

const asyncEachSeries = require('async/eachSeries')
const waterfall = require('async/waterfall')
const CID = require('cids')
const dagPB = require('ipld-dag-pb')
const UnixFS = require('ipfs-unixfs')
const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode
const Dir = require('./dir')
const persist = require('../utils/persist')

class DirFlat extends Dir {
constructor (props, _options) {
Expand Down Expand Up @@ -56,28 +56,17 @@ class DirFlat extends Dir {
})

const dir = new UnixFS('directory')
const options = this._options

waterfall(
[
(callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback),
(node, callback) => {
if (options.onlyHash) return callback(null, node)

let cid = new CID(node.multihash)

if (options.cidVersion === 1) {
cid = cid.toV1()
}

ipld.put(node, { cid }, (err) => callback(err, node))
},
(node, callback) => {
this.multihash = node.multihash
(callback) => DAGNode.create(dir.marshal(), links, callback),
(node, callback) => persist(node, ipld, this._options, callback),
({cid, node}, callback) => {
this.multihash = cid.buffer
this.size = node.size
const pushable = {
path: path,
multihash: node.multihash,
multihash: cid.buffer,
size: node.size
}
source.push(pushable)
Expand Down
Loading

0 comments on commit 9cf2223

Please sign in to comment.