From 02a158f577a70482d2d534525429b8aa281894d8 Mon Sep 17 00:00:00 2001 From: Volker Mische Date: Fri, 14 Dec 2018 15:17:16 +0100 Subject: [PATCH] refactor: update to new IPLD API This is part of the Awesome Endeavour: Async Iterators: https://github.com/ipfs/js-ipfs/issues/1670 --- package.json | 6 +-- src/file.js | 36 +++++++------ src/resolve.js | 4 +- test/exporter-sharded.spec.js | 31 ++++++----- test/exporter-subtree.spec.js | 7 ++- test/exporter.spec.js | 98 ++++++++++++++++------------------- 6 files changed, 91 insertions(+), 91 deletions(-) diff --git a/package.json b/package.json index 9c37f4b..efc2c96 100644 --- a/package.json +++ b/package.json @@ -40,12 +40,12 @@ "chai": "^4.2.0", "detect-node": "^2.0.4", "dirty-chai": "^2.0.1", - "ipld": "~0.20.2", + "ipld": "git+https://github.com/ipld/js-ipld.git#new-api-impl", "ipld-dag-pb": "~0.15.2", "ipld-in-memory": "^2.0.0", + "multicodec": "~0.5.0", "pull-pushable": "^2.2.0", "pull-stream-to-stream": "^1.3.4", - "pull-zip": "^2.0.1", "sinon": "^7.1.0", "stream-to-pull-stream": "^1.7.2" }, @@ -53,7 +53,7 @@ "async": "^2.6.1", "cids": "~0.5.5", "ipfs-unixfs": "~0.1.16", - "ipfs-unixfs-importer": "~0.38.0", + "ipfs-unixfs-importer": "git+https://github.com/ipfs/js-ipfs-unixfs-importer.git#new-ipld-api", "pull-cat": "^1.1.11", "pull-paramap": "^1.2.2", "pull-stream": "^3.6.9", diff --git a/src/file.js b/src/file.js index 9380e7c..9d6de8b 100644 --- a/src/file.js +++ b/src/file.js @@ -150,23 +150,25 @@ function getChildren (dag, offset, end) { return pull( once(filteredLinks), - paramap((children, cb) => { - dag.getMany(children.map(child => child.link.cid), (err, results) => { - if (err) { - return cb(err) - } - - cb(null, results.map((result, index) => { - const child = children[index] - - return { - start: child.start, - end: child.end, - node: result, - size: child.size - } - })) - }) + paramap(async (children, cb) => { + const results = dag.get(children.map(child => child.link.cid)) + const final = [] + for ( + let index = 0, result = await results.next(); + !result.done; + index++, result = await results.next() + ) { + const child = children[index] + const node = result.value + + final.push({ + start: child.start, + end: child.end, + node: node, + size: child.size + }) + } + cb(null, final) }), flatten() ) diff --git a/src/resolve.js b/src/resolve.js index 4786729..ab925f3 100644 --- a/src/resolve.js +++ b/src/resolve.js @@ -45,8 +45,8 @@ function createResolver (dag, options, depth, parent) { const cid = new CID(item.multihash) waterfall([ - (done) => dag.get(cid, done), - (node, done) => done(null, resolveItem(cid, node.value, item, options)) + async () => dag.get([cid]).first(), + (node, done) => done(null, resolveItem(cid, node, item, options)) ], cb) }), flatten(), diff --git a/test/exporter-sharded.spec.js b/test/exporter-sharded.spec.js index b78dd54..09382bd 100644 --- a/test/exporter-sharded.spec.js +++ b/test/exporter-sharded.spec.js @@ -20,6 +20,7 @@ const { DAGLink, DAGNode } = require('ipld-dag-pb') +const multicodec = require('multicodec') const SHARD_SPLIT_THRESHOLD = 10 @@ -88,7 +89,7 @@ describe('exporter sharded', function () { }), collect(cb) ), - (imported, cb) => { + async (imported) => { directory = new CID(imported.pop().multihash) // store the CIDs, we will validate them later @@ -96,10 +97,10 @@ describe('exporter sharded', function () { files[imported.path].cid = new CID(imported.multihash) }) - ipld.get(directory, cb) + return ipld.get([directory]).first() }, - ({ value, cid }, cb) => { - const dir = UnixFS.unmarshal(value.data) + ({ data }, cb) => { + const dir = UnixFS.unmarshal(data) expect(dir.type).to.equal('hamt-sharded-directory') @@ -374,24 +375,26 @@ describe('exporter sharded', function () { new DAGLink('shard', 5, dir) ], cb) }, - (node, cb) => { - ipld.put(node, { + async (node) => { + const result = ipld.put([node], { version: 0, - format: 'dag-pb', - hashAlg: 'sha2-256' - }, cb) + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256 + }) + return result.first() }, (cid, cb) => { DAGNode.create(new UnixFS('hamt-sharded-directory').marshal(), [ new DAGLink('75normal-dir', 5, cid) ], cb) }, - (node, cb) => { - ipld.put(node, { + async (node) => { + const result = ipld.put([node], { version: 1, - format: 'dag-pb', - hashAlg: 'sha2-256' - }, cb) + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA_256 + }) + return result.first() }, (dir, cb) => { pull( diff --git a/test/exporter-subtree.spec.js b/test/exporter-subtree.spec.js index 741e181..c6a589f 100644 --- a/test/exporter-subtree.spec.js +++ b/test/exporter-subtree.spec.js @@ -11,6 +11,7 @@ const pull = require('pull-stream') const randomBytes = require('./helpers/random-bytes') const waterfall = require('async/waterfall') const importer = require('ipfs-unixfs-importer') +const multicodec = require('multicodec') const ONE_MEG = Math.pow(1024, 2) @@ -132,7 +133,11 @@ describe('exporter subtree', () => { ), (files, cb) => cb(null, files.pop().multihash), (buf, cb) => cb(null, new CID(buf)), - (cid, cb) => ipld.put({ a: { file: cid } }, { format: 'dag-cbor' }, cb), + async (cid) => { + return ipld.put([{ a: { file: cid } }], { + format: multicodec.DAG_CBOR + }).first() + }, (cborNodeCid, cb) => pull( exporter(`${cborNodeCid.toBaseEncodedString()}/a/file/level-1/200Bytes.txt`, ipld), pull.collect(cb) diff --git a/test/exporter.spec.js b/test/exporter.spec.js index 08c868a..78b0b37 100644 --- a/test/exporter.spec.js +++ b/test/exporter.spec.js @@ -8,7 +8,6 @@ const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const UnixFS = require('ipfs-unixfs') const pull = require('pull-stream') -const zip = require('pull-zip') const CID = require('cids') const doUntil = require('async/doUntil') const waterfall = require('async/waterfall') @@ -25,6 +24,7 @@ const { } = require('ipld-dag-pb') const isNode = require('detect-node') const randomBytes = require('./helpers/random-bytes') +const multicodec = require('multicodec') const exporter = require('../src') const importer = require('ipfs-unixfs-importer') @@ -51,13 +51,14 @@ describe('exporter', () => { DAGNode.create(file.marshal(), options.links, (err, node) => { expect(err).to.not.exist() - ipld.put(node, { + const result = ipld.put([node], { version: 0, - hashAlg: 'sha2-256', - format: 'dag-pb' - }, (err, cid) => { - cb(err, { file: file, node: node, cid: cid }) + hashAlg: multicodec.SHA2_256, + format: multicodec.DAG_PB }) + result.first() + .then((cid) => cb(null, { file: file, node: node, cid: cid })) + .catch((error) => cb(error)) }) } @@ -182,47 +183,41 @@ describe('exporter', () => { }) it('ensure hash inputs are sanitized', (done) => { - dagPut((err, result) => { + dagPut(async (err, result) => { expect(err).to.not.exist() - ipld.get(result.cid, (err, res) => { - expect(err).to.not.exist() - const unmarsh = UnixFS.unmarshal(result.node.data) + const node = await ipld.get([result.cid]).first() + const unmarsh = UnixFS.unmarshal(node.data) - expect(unmarsh.data).to.deep.equal(result.file.data) + expect(unmarsh.data).to.deep.equal(result.file.data) - pull( - exporter(result.cid, ipld), - pull.collect(onFiles) - ) + pull( + exporter(result.cid, ipld), + pull.collect(onFiles) + ) - function onFiles (err, files) { - expect(err).to.equal(null) - expect(files).to.have.length(1) - expect(files[0]).to.have.property('hash') - expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString()) - fileEql(files[0], unmarsh.data, done) - } - }) + function onFiles (err, files) { + expect(err).to.equal(null) + expect(files).to.have.length(1) + expect(files[0]).to.have.property('hash') + expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString()) + fileEql(files[0], unmarsh.data, done) + } }) }) it('exports a file with no links', (done) => { - dagPut((err, result) => { + dagPut(async (err, result) => { expect(err).to.not.exist() + const node = await ipld.get([result.cid]).first() + const unmarsh = UnixFS.unmarshal(node.data) + pull( - zip( - pull( - ipld.getStream(result.cid), - pull.map((res) => UnixFS.unmarshal(res.value.data)) - ), - exporter(result.cid, ipld) - ), + exporter(result.cid, ipld), pull.collect((err, values) => { expect(err).to.not.exist() - const unmarsh = values[0][0] - const file = values[0][1] + const file = values[0] fileEql(file, unmarsh.data, done) }) @@ -292,25 +287,20 @@ describe('exporter', () => { dagPut({ content: randomBytes(100) - }, (err, result) => { + }, async (err, result) => { expect(err).to.not.exist() + const node = await ipld.get([result.cid]).first() + const unmarsh = UnixFS.unmarshal(node.data) + pull( - zip( - pull( - ipld.getStream(result.cid), - pull.map((res) => UnixFS.unmarshal(res.value.data)) - ), - exporter(result.cid, ipld, { - offset, - length - }) - ), + exporter(result.cid, ipld, { + offset, + length + }), pull.collect((err, values) => { expect(err).to.not.exist() - - const unmarsh = values[0][0] - const file = values[0][1] + const file = values[0] fileEql(file, unmarsh.data.slice(offset, offset + length), done) }) @@ -1118,13 +1108,13 @@ function createAndPersistNode (ipld, type, data, children, callback) { return callback(error) } - ipld.put(node, { + const result = ipld.put([node], { version: 1, - hashAlg: 'sha2-256', - format: 'dag-pb' - }, (error, cid) => callback(error, { - node, - cid - })) + hashAlg: multicodec.SHA2_256, + format: multicodec.DAG_PB + }) + result.first() + .then((cid) => callback(null, { node, cid })) + .catch((error) => callback(error)) }) }