Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 04e7483

Browse files
authored
Merge pull request #47 from noffle/end-exporter-stream
End export stream on completion.
2 parents 138990f + a5b9816 commit 04e7483

File tree

3 files changed

+90
-82
lines changed

3 files changed

+90
-82
lines changed

package.json

+5-1
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,12 @@
3535
"homepage": "https://github.com./diasdavid/js-ipfs-data-importing#readme",
3636
"devDependencies": {
3737
"aegir": "^3.0.1",
38+
"async": "^1.5.2",
3839
"block-stream2": "^1.1.0",
3940
"bs58": "^3.0.0",
4041
"buffer-loader": "0.0.1",
4142
"chai": "^3.5.0",
43+
"concat-stream": "^1.5.1",
4244
"fs-blob-store": "^5.2.1",
4345
"idb-plus-blob-store": "^1.1.2",
4446
"ipfs-repo": "^0.7.5",
@@ -51,11 +53,13 @@
5153
"string-to-stream": "^1.0.1"
5254
},
5355
"dependencies": {
54-
"async": "^1.5.2",
5556
"block-stream2": "^1.1.0",
57+
"bs58": "^3.0.0",
5658
"debug": "^2.2.0",
59+
"field-trip": "0.0.3",
5760
"ipfs-merkle-dag": "^0.5.0",
5861
"ipfs-unixfs": "^0.1.0",
62+
"is-ipfs": "^0.2.0",
5963
"isstream": "^0.1.2",
6064
"readable-stream": "^1.1.13",
6165
"run-series": "^1.1.4",

src/exporter.js

+55-63
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,14 @@
33
const debug = require('debug')
44
const log = debug('exporter')
55
log.err = debug('exporter:error')
6+
const isIPFS = require('is-ipfs')
7+
const bs58 = require('bs58')
68
const UnixFS = require('ipfs-unixfs')
79
const series = require('run-series')
8-
const async = require('async')
910
const Readable = require('readable-stream').Readable
1011
const pathj = require('path')
1112
const util = require('util')
13+
const fieldtrip = require('field-trip')
1214

1315
exports = module.exports = Exporter
1416

@@ -19,21 +21,29 @@ function Exporter (hash, dagService, options) {
1921
return new Exporter(hash, dagService, options)
2022
}
2123

24+
// Sanitize hash.
25+
if (!isIPFS.multihash(hash)) {
26+
throw new Error('not valid multihash')
27+
}
28+
if (Buffer.isBuffer(hash)) {
29+
hash = bs58.encode(hash)
30+
}
31+
2232
Readable.call(this, { objectMode: true })
2333

2434
this.options = options || {}
2535

2636
this._read = (n) => {}
2737

28-
let fileExporter = (node, name, callback) => {
29-
let init
38+
let fileExporter = (node, name, done) => {
39+
let init = false
3040

31-
if (!callback) { callback = function noop () {} }
41+
if (!done) throw new Error('done must be set')
3242

43+
// Logic to export a single (possibly chunked) unixfs file.
3344
var rs = new Readable()
3445
if (node.links.length === 0) {
3546
const unmarshaledData = UnixFS.unmarshal(node.data)
36-
init = false
3747
rs._read = () => {
3848
if (init) {
3949
return
@@ -43,10 +53,8 @@ function Exporter (hash, dagService, options) {
4353
rs.push(null)
4454
}
4555
this.push({ content: rs, path: name })
46-
callback()
47-
return
56+
done()
4857
} else {
49-
init = false
5058
rs._read = () => {
5159
if (init) {
5260
return
@@ -57,7 +65,7 @@ function Exporter (hash, dagService, options) {
5765
return (cb) => {
5866
dagService.get(link.hash, (err, res) => {
5967
if (err) {
60-
cb(err)
68+
return cb(err)
6169
}
6270
var unmarshaledData = UnixFS.unmarshal(res.data)
6371
rs.push(unmarshaledData.data)
@@ -67,80 +75,64 @@ function Exporter (hash, dagService, options) {
6775
})
6876
series(array, (err, res) => {
6977
if (err) {
70-
callback()
78+
rs.emit('error', err)
7179
return
7280
}
7381
rs.push(null)
74-
callback()
7582
return
7683
})
7784
}
7885
this.push({ content: rs, path: name })
79-
callback()
80-
return
86+
done()
8187
}
8288
}
8389

84-
let dirExporter = (node, name, callback) => {
85-
let init
90+
// Logic to export a unixfs directory.
91+
let dirExporter = (node, name, add, done) => {
92+
if (!add) throw new Error('add must be set')
93+
if (!done) throw new Error('done must be set')
8694

87-
if (!callback) { callback = function noop () {} }
95+
this.push({content: null, path: name})
8896

89-
var rs = new Readable()
90-
if (node.links.length === 0) {
91-
init = false
92-
rs._read = () => {
93-
if (init) {
94-
return
95-
}
96-
init = true
97-
rs.push(node.data)
98-
rs.push(null)
99-
}
100-
this.push({content: null, path: name})
101-
callback()
102-
return
103-
} else {
104-
async.forEachSeries(node.links, (link, callback) => {
105-
dagService.get(link.hash, (err, res) => {
106-
if (err) {
107-
callback(err)
108-
}
109-
var unmarshaledData = UnixFS.unmarshal(res.data)
110-
if (unmarshaledData.type === 'file') {
111-
return (fileExporter(res, pathj.join(name, link.name), callback))
112-
}
113-
if (unmarshaledData.type === 'directory') {
114-
return (dirExporter(res, pathj.join(name, link.name), callback))
115-
}
116-
callback()
117-
})
118-
}, (err) => {
119-
if (err) {
120-
callback()
121-
return
122-
}
123-
callback()
124-
return
97+
// Directory has links
98+
if (node.links.length > 0) {
99+
node.links.forEach((link) => {
100+
add({ path: pathj.join(name, link.name), hash: link.hash })
125101
})
126102
}
103+
done()
127104
}
128105

129-
dagService.get(hash, (err, fetchedNode) => {
106+
// Traverse the DAG asynchronously
107+
var self = this
108+
fieldtrip([{ path: hash, hash: hash }], visit, (err) => {
130109
if (err) {
131-
this.emit('error', err)
110+
self.emit('error', err)
132111
return
133112
}
134-
const data = UnixFS.unmarshal(fetchedNode.data)
135-
const type = data.type
136-
137-
if (type === 'directory') {
138-
dirExporter(fetchedNode, hash)
139-
}
140-
if (type === 'file') {
141-
fileExporter(fetchedNode, hash)
142-
}
113+
self.push(null)
143114
})
144115

116+
// Visit function: called once per node in the exported graph
117+
function visit (item, add, done) {
118+
dagService.get(item.hash, (err, fetchedNode) => {
119+
if (err) {
120+
self.emit('error', err)
121+
return
122+
}
123+
124+
const data = UnixFS.unmarshal(fetchedNode.data)
125+
const type = data.type
126+
127+
if (type === 'directory') {
128+
dirExporter(fetchedNode, item.path, add, done)
129+
}
130+
131+
if (type === 'file') {
132+
fileExporter(fetchedNode, item.path, done)
133+
}
134+
})
135+
}
136+
145137
return this
146138
}

test/test-exporter.js

+30-18
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const expect = require('chai').expect
77
const BlockService = require('ipfs-block-service')
88
const DAGService = require('ipfs-merkle-dag').DAGService
99
const UnixFS = require('ipfs-unixfs')
10-
const bl = require('bl')
10+
const concat = require('concat-stream')
1111
const fs = require('fs')
1212
const path = require('path')
1313

@@ -32,13 +32,16 @@ module.exports = function (repo) {
3232
const unmarsh = UnixFS.unmarshal(fetchedNode.data)
3333
expect(err).to.not.exist
3434
const testExport = exporter(hash, ds)
35-
testExport.on('data', (file) => {
36-
file.content.pipe(bl((err, bldata) => {
37-
expect(err).to.not.exist
35+
testExport.on('error', (err) => {
36+
expect(err).to.not.exist
37+
})
38+
testExport.pipe(concat((files) => {
39+
expect(files).to.be.length(1)
40+
files[0].content.pipe(concat((bldata) => {
3841
expect(bldata).to.deep.equal(unmarsh.data)
3942
done()
4043
}))
41-
})
44+
}))
4245
})
4346
})
4447

@@ -47,10 +50,12 @@ module.exports = function (repo) {
4750
const bs = new BlockService(repo)
4851
const ds = new DAGService(bs)
4952
const testExport = exporter(hash, ds)
53+
testExport.on('error', (err) => {
54+
expect(err).to.not.exist
55+
})
5056
testExport.on('data', (file) => {
51-
file.content.pipe(bl((err, bldata) => {
57+
file.content.pipe(concat((bldata) => {
5258
expect(bldata).to.deep.equal(bigFile)
53-
expect(err).to.not.exist
5459
done()
5560
}))
5661
})
@@ -61,10 +66,13 @@ module.exports = function (repo) {
6166
const bs = new BlockService(repo)
6267
const ds = new DAGService(bs)
6368
const testExport = exporter(hash, ds)
69+
testExport.on('error', (err) => {
70+
expect(err).to.not.exist
71+
})
6472
testExport.on('data', (file) => {
6573
expect(file.path).to.equal('QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE')
66-
file.content.pipe(bl((err, bldata) => {
67-
expect(err).to.not.exist
74+
file.content.pipe(concat((bldata) => {
75+
expect(bldata).to.exist
6876
done()
6977
}))
7078
})
@@ -75,24 +83,28 @@ module.exports = function (repo) {
7583
const bs = new BlockService(repo)
7684
const ds = new DAGService(bs)
7785
const testExport = exporter(hash, ds)
78-
var fsa = []
79-
testExport.on('data', (files) => {
80-
fsa.push(files)
86+
testExport.on('error', (err) => {
87+
expect(err).to.not.exist
8188
})
82-
setTimeout(() => {
83-
expect(fsa[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt')
84-
expect(fsa[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another')
85-
expect(fsa[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt')
86-
expect(fsa[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2')
89+
testExport.pipe(concat((files) => {
90+
expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN')
91+
expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt')
92+
expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another')
93+
expect(files[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1')
94+
expect(files[4].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt')
95+
expect(files[5].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2')
8796
done()
88-
}, 1000)
97+
}))
8998
})
9099

91100
it('returns a null stream for dir', (done) => {
92101
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' // This hash doesn't exist in the repo
93102
const bs = new BlockService(repo)
94103
const ds = new DAGService(bs)
95104
const testExport = exporter(hash, ds)
105+
testExport.on('error', (err) => {
106+
expect(err).to.not.exist
107+
})
96108
testExport.on('data', (dir) => {
97109
expect(dir.content).to.equal(null)
98110
done()

0 commit comments

Comments
 (0)