Skip to content
This repository has been archived by the owner on Apr 29, 2020. It is now read-only.

Commit

Permalink
refactor: update to new IPLD API
Browse files Browse the repository at this point in the history
This is part of the Awesome Endeavour: Async Iterators:
ipfs/js-ipfs#1670
  • Loading branch information
vmx committed Feb 5, 2019
1 parent bb17a2d commit 30fdc0d
Show file tree
Hide file tree
Showing 6 changed files with 89 additions and 95 deletions.
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,20 @@
"chai": "^4.2.0",
"detect-node": "^2.0.4",
"dirty-chai": "^2.0.1",
"ipld": "~0.20.2",
"ipld": "git+https://github.com/ipld/js-ipld.git#new-api-impl",
"ipld-dag-pb": "~0.15.2",
"ipld-in-memory": "^2.0.0",
"multicodec": "~0.5.0",
"pull-pushable": "^2.2.0",
"pull-stream-to-stream": "^1.3.4",
"pull-zip": "^2.0.1",
"sinon": "^7.1.0",
"stream-to-pull-stream": "^1.7.2"
},
"dependencies": {
"async": "^2.6.1",
"cids": "~0.5.5",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-importer": "~0.38.0",
"ipfs-unixfs-importer": "git+https://github.com/ipfs/js-ipfs-unixfs-importer.git#new-ipld-api",
"pull-cat": "^1.1.11",
"pull-paramap": "^1.2.2",
"pull-stream": "^3.6.9",
Expand Down
36 changes: 19 additions & 17 deletions src/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -150,23 +150,25 @@ function getChildren (dag, offset, end) {

return pull(
once(filteredLinks),
paramap((children, cb) => {
dag.getMany(children.map(child => child.link.cid), (err, results) => {
if (err) {
return cb(err)
}

cb(null, results.map((result, index) => {
const child = children[index]

return {
start: child.start,
end: child.end,
node: result,
size: child.size
}
}))
})
paramap(async (children, cb) => {
const results = dag.get(children.map(child => child.link.cid))
const final = []
for (
let index = 0, result = await results.next();
!result.done;
index++, result = await results.next()
) {
const child = children[index]
const node = result.value

final.push({
start: child.start,
end: child.end,
node: node,
size: child.size
})
}
cb(null, final)
}),
flatten()
)
Expand Down
4 changes: 2 additions & 2 deletions src/resolve.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ function createResolver (dag, options, depth, parent) {
const cid = new CID(item.multihash)

waterfall([
(done) => dag.get(cid, done),
(node, done) => done(null, resolveItem(cid, node.value, item, options))
async () => dag.get([cid]).first(),
(node, done) => done(null, resolveItem(cid, node, item, options))
], cb)
}),
flatten(),
Expand Down
33 changes: 17 additions & 16 deletions test/exporter-sharded.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ const {
DAGLink,
DAGNode
} = require('ipld-dag-pb')
const multicodec = require('multicodec')

const SHARD_SPLIT_THRESHOLD = 10

Expand Down Expand Up @@ -88,18 +89,18 @@ describe('exporter sharded', function () {
}),
collect(cb)
),
(imported, cb) => {
async (imported) => {
directory = new CID(imported.pop().multihash)

// store the CIDs, we will validate them later
imported.forEach(imported => {
files[imported.path].cid = new CID(imported.multihash)
})

ipld.get(directory, cb)
return ipld.get([directory]).first()
},
({ value, cid }, cb) => {
const dir = UnixFS.unmarshal(value.data)
({ data }, cb) => {
const dir = UnixFS.unmarshal(data)

expect(dir.type).to.equal('hamt-sharded-directory')

Expand Down Expand Up @@ -374,24 +375,24 @@ describe('exporter sharded', function () {
new DAGLink('shard', 5, dir)
], cb)
},
(node, cb) => {
ipld.put(node, {
version: 0,
format: 'dag-pb',
hashAlg: 'sha2-256'
}, cb)
async (node) => {
const result = ipld.put([node], multicodec.DAG_PB, {
cidVersion: 0,
hashAlg: multicodec.SHA2_256
})
return result.first()
},
(cid, cb) => {
DAGNode.create(new UnixFS('hamt-sharded-directory').marshal(), [
new DAGLink('75normal-dir', 5, cid)
], cb)
},
(node, cb) => {
ipld.put(node, {
version: 1,
format: 'dag-pb',
hashAlg: 'sha2-256'
}, cb)
async (node) => {
const result = ipld.put([node], multicodec.DAG_PB, {
cidVersion: 1,
hashAlg: multicodec.SHA_256
})
return result.first()
},
(dir, cb) => {
pull(
Expand Down
5 changes: 4 additions & 1 deletion test/exporter-subtree.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const pull = require('pull-stream')
const randomBytes = require('./helpers/random-bytes')
const waterfall = require('async/waterfall')
const importer = require('ipfs-unixfs-importer')
const multicodec = require('multicodec')

const ONE_MEG = Math.pow(1024, 2)

Expand Down Expand Up @@ -132,7 +133,9 @@ describe('exporter subtree', () => {
),
(files, cb) => cb(null, files.pop().multihash),
(buf, cb) => cb(null, new CID(buf)),
(cid, cb) => ipld.put({ a: { file: cid } }, { format: 'dag-cbor' }, cb),
async (cid) => {
return ipld.put([{ a: { file: cid } }], multicodec.DAG_CBOR).first()
},
(cborNodeCid, cb) => pull(
exporter(`${cborNodeCid.toBaseEncodedString()}/a/file/level-1/200Bytes.txt`, ipld),
pull.collect(cb)
Expand Down
100 changes: 44 additions & 56 deletions test/exporter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ const IPLD = require('ipld')
const inMemory = require('ipld-in-memory')
const UnixFS = require('ipfs-unixfs')
const pull = require('pull-stream')
const zip = require('pull-zip')
const CID = require('cids')
const doUntil = require('async/doUntil')
const waterfall = require('async/waterfall')
Expand All @@ -25,6 +24,7 @@ const {
} = require('ipld-dag-pb')
const isNode = require('detect-node')
const randomBytes = require('./helpers/random-bytes')
const multicodec = require('multicodec')

const exporter = require('../src')
const importer = require('ipfs-unixfs-importer')
Expand All @@ -51,13 +51,13 @@ describe('exporter', () => {
DAGNode.create(file.marshal(), options.links, (err, node) => {
expect(err).to.not.exist()

ipld.put(node, {
version: 0,
hashAlg: 'sha2-256',
format: 'dag-pb'
}, (err, cid) => {
cb(err, { file: file, node: node, cid: cid })
const result = ipld.put([node], multicodec.DAG_PB, {
cidVersion: 0,
hashAlg: multicodec.SHA2_256,
})
result.first()
.then((cid) => cb(null, { file: file, node: node, cid: cid }))
.catch((error) => cb(error))
})
}

Expand Down Expand Up @@ -182,47 +182,41 @@ describe('exporter', () => {
})

it('ensure hash inputs are sanitized', (done) => {
dagPut((err, result) => {
dagPut(async (err, result) => {
expect(err).to.not.exist()

ipld.get(result.cid, (err, res) => {
expect(err).to.not.exist()
const unmarsh = UnixFS.unmarshal(result.node.data)
const node = await ipld.get([result.cid]).first()
const unmarsh = UnixFS.unmarshal(node.data)

expect(unmarsh.data).to.deep.equal(result.file.data)
expect(unmarsh.data).to.deep.equal(result.file.data)

pull(
exporter(result.cid, ipld),
pull.collect(onFiles)
)
pull(
exporter(result.cid, ipld),
pull.collect(onFiles)
)

function onFiles (err, files) {
expect(err).to.equal(null)
expect(files).to.have.length(1)
expect(files[0]).to.have.property('hash')
expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString())
fileEql(files[0], unmarsh.data, done)
}
})
function onFiles (err, files) {
expect(err).to.equal(null)
expect(files).to.have.length(1)
expect(files[0]).to.have.property('hash')
expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString())
fileEql(files[0], unmarsh.data, done)
}
})
})

it('exports a file with no links', (done) => {
dagPut((err, result) => {
dagPut(async (err, result) => {
expect(err).to.not.exist()

const node = await ipld.get([result.cid]).first()
const unmarsh = UnixFS.unmarshal(node.data)

pull(
zip(
pull(
ipld.getStream(result.cid),
pull.map((res) => UnixFS.unmarshal(res.value.data))
),
exporter(result.cid, ipld)
),
exporter(result.cid, ipld),
pull.collect((err, values) => {
expect(err).to.not.exist()
const unmarsh = values[0][0]
const file = values[0][1]
const file = values[0]

fileEql(file, unmarsh.data, done)
})
Expand Down Expand Up @@ -292,25 +286,20 @@ describe('exporter', () => {

dagPut({
content: randomBytes(100)
}, (err, result) => {
}, async (err, result) => {
expect(err).to.not.exist()

const node = await ipld.get([result.cid]).first()
const unmarsh = UnixFS.unmarshal(node.data)

pull(
zip(
pull(
ipld.getStream(result.cid),
pull.map((res) => UnixFS.unmarshal(res.value.data))
),
exporter(result.cid, ipld, {
offset,
length
})
),
exporter(result.cid, ipld, {
offset,
length
}),
pull.collect((err, values) => {
expect(err).to.not.exist()

const unmarsh = values[0][0]
const file = values[0][1]
const file = values[0]

fileEql(file, unmarsh.data.slice(offset, offset + length), done)
})
Expand Down Expand Up @@ -1118,13 +1107,12 @@ function createAndPersistNode (ipld, type, data, children, callback) {
return callback(error)
}

ipld.put(node, {
version: 1,
hashAlg: 'sha2-256',
format: 'dag-pb'
}, (error, cid) => callback(error, {
node,
cid
}))
const result = ipld.put([node], multicodec.DAG_PB, {
cidVersion: 1,
hashAlg: multicodec.SHA2_256,
})
result.first()
.then((cid) => callback(null, { node, cid }))
.catch((error) => callback(error))
})
}

0 comments on commit 30fdc0d

Please sign in to comment.