3
3
const debug = require ( 'debug' )
4
4
const log = debug ( 'exporter' )
5
5
log . err = debug ( 'exporter:error' )
6
+ const isIPFS = require ( 'is-ipfs' )
7
+ const bs58 = require ( 'bs58' )
6
8
const UnixFS = require ( 'ipfs-unixfs' )
7
9
const series = require ( 'run-series' )
8
- const async = require ( 'async' )
9
10
const Readable = require ( 'readable-stream' ) . Readable
10
11
const pathj = require ( 'path' )
11
12
const util = require ( 'util' )
13
+ const fieldtrip = require ( 'field-trip' )
12
14
13
15
exports = module . exports = Exporter
14
16
@@ -19,21 +21,29 @@ function Exporter (hash, dagService, options) {
19
21
return new Exporter ( hash , dagService , options )
20
22
}
21
23
24
+ // Sanitize hash.
25
+ if ( ! isIPFS . multihash ( hash ) ) {
26
+ throw new Error ( 'not valid multihash' )
27
+ }
28
+ if ( Buffer . isBuffer ( hash ) ) {
29
+ hash = bs58 . encode ( hash )
30
+ }
31
+
22
32
Readable . call ( this , { objectMode : true } )
23
33
24
34
this . options = options || { }
25
35
26
36
this . _read = ( n ) => { }
27
37
28
- let fileExporter = ( node , name , callback ) => {
29
- let init
38
+ let fileExporter = ( node , name , done ) => {
39
+ let init = false
30
40
31
- if ( ! callback ) { callback = function noop ( ) { } }
41
+ if ( ! done ) throw new Error ( 'done must be set' )
32
42
43
+ // Logic to export a single (possibly chunked) unixfs file.
33
44
var rs = new Readable ( )
34
45
if ( node . links . length === 0 ) {
35
46
const unmarshaledData = UnixFS . unmarshal ( node . data )
36
- init = false
37
47
rs . _read = ( ) => {
38
48
if ( init ) {
39
49
return
@@ -43,10 +53,8 @@ function Exporter (hash, dagService, options) {
43
53
rs . push ( null )
44
54
}
45
55
this . push ( { content : rs , path : name } )
46
- callback ( )
47
- return
56
+ done ( )
48
57
} else {
49
- init = false
50
58
rs . _read = ( ) => {
51
59
if ( init ) {
52
60
return
@@ -57,7 +65,7 @@ function Exporter (hash, dagService, options) {
57
65
return ( cb ) => {
58
66
dagService . get ( link . hash , ( err , res ) => {
59
67
if ( err ) {
60
- cb ( err )
68
+ return cb ( err )
61
69
}
62
70
var unmarshaledData = UnixFS . unmarshal ( res . data )
63
71
rs . push ( unmarshaledData . data )
@@ -67,80 +75,64 @@ function Exporter (hash, dagService, options) {
67
75
} )
68
76
series ( array , ( err , res ) => {
69
77
if ( err ) {
70
- callback ( )
78
+ rs . emit ( 'error' , err )
71
79
return
72
80
}
73
81
rs . push ( null )
74
- callback ( )
75
82
return
76
83
} )
77
84
}
78
85
this . push ( { content : rs , path : name } )
79
- callback ( )
80
- return
86
+ done ( )
81
87
}
82
88
}
83
89
84
- let dirExporter = ( node , name , callback ) => {
85
- let init
90
+ // Logic to export a unixfs directory.
91
+ let dirExporter = ( node , name , add , done ) => {
92
+ if ( ! add ) throw new Error ( 'add must be set' )
93
+ if ( ! done ) throw new Error ( 'done must be set' )
86
94
87
- if ( ! callback ) { callback = function noop ( ) { } }
95
+ this . push ( { content : null , path : name } )
88
96
89
- var rs = new Readable ( )
90
- if ( node . links . length === 0 ) {
91
- init = false
92
- rs . _read = ( ) => {
93
- if ( init ) {
94
- return
95
- }
96
- init = true
97
- rs . push ( node . data )
98
- rs . push ( null )
99
- }
100
- this . push ( { content : null , path : name } )
101
- callback ( )
102
- return
103
- } else {
104
- async . forEachSeries ( node . links , ( link , callback ) => {
105
- dagService . get ( link . hash , ( err , res ) => {
106
- if ( err ) {
107
- callback ( err )
108
- }
109
- var unmarshaledData = UnixFS . unmarshal ( res . data )
110
- if ( unmarshaledData . type === 'file' ) {
111
- return ( fileExporter ( res , pathj . join ( name , link . name ) , callback ) )
112
- }
113
- if ( unmarshaledData . type === 'directory' ) {
114
- return ( dirExporter ( res , pathj . join ( name , link . name ) , callback ) )
115
- }
116
- callback ( )
117
- } )
118
- } , ( err ) => {
119
- if ( err ) {
120
- callback ( )
121
- return
122
- }
123
- callback ( )
124
- return
97
+ // Directory has links
98
+ if ( node . links . length > 0 ) {
99
+ node . links . forEach ( ( link ) => {
100
+ add ( { path : pathj . join ( name , link . name ) , hash : link . hash } )
125
101
} )
126
102
}
103
+ done ( )
127
104
}
128
105
129
- dagService . get ( hash , ( err , fetchedNode ) => {
106
+ // Traverse the DAG asynchronously
107
+ var self = this
108
+ fieldtrip ( [ { path : hash , hash : hash } ] , visit , ( err ) => {
130
109
if ( err ) {
131
- this . emit ( 'error' , err )
110
+ self . emit ( 'error' , err )
132
111
return
133
112
}
134
- const data = UnixFS . unmarshal ( fetchedNode . data )
135
- const type = data . type
136
-
137
- if ( type === 'directory' ) {
138
- dirExporter ( fetchedNode , hash )
139
- }
140
- if ( type === 'file' ) {
141
- fileExporter ( fetchedNode , hash )
142
- }
113
+ self . push ( null )
143
114
} )
144
115
116
+ // Visit function: called once per node in the exported graph
117
+ function visit ( item , add , done ) {
118
+ dagService . get ( item . hash , ( err , fetchedNode ) => {
119
+ if ( err ) {
120
+ self . emit ( 'error' , err )
121
+ return
122
+ }
123
+
124
+ const data = UnixFS . unmarshal ( fetchedNode . data )
125
+ const type = data . type
126
+
127
+ if ( type === 'directory' ) {
128
+ dirExporter ( fetchedNode , item . path , add , done )
129
+ }
130
+
131
+ if ( type === 'file' ) {
132
+ fileExporter ( fetchedNode , item . path , done )
133
+ }
134
+ } )
135
+ }
136
+
145
137
return this
146
138
}
0 commit comments