diff --git a/examples/circuit-relaying/package.json b/examples/circuit-relaying/package.json index 28365fbf49..596a228f01 100644 --- a/examples/circuit-relaying/package.json +++ b/examples/circuit-relaying/package.json @@ -19,7 +19,7 @@ "ipfs-pubsub-room": "^2.0.1" }, "devDependencies": { - "aegir": "^21.3.0", + "aegir": "21.3.0", "execa": "^3.2.0", "ipfs-css": "^0.13.1", "ipfs-http-client": "^42.0.0", diff --git a/examples/running-multiple-nodes/test.js b/examples/running-multiple-nodes/test.js index 958f4ceeba..e8666d2b9f 100644 --- a/examples/running-multiple-nodes/test.js +++ b/examples/running-multiple-nodes/test.js @@ -5,7 +5,7 @@ const IPFS = require('ipfs') const execa = require('execa') const os = require('os') const path = require('path') -const hat = require('hat') +const nanoid = require('nanoid') const { waitForOutput } = require('test-ipfs-example/utils') @@ -18,7 +18,7 @@ async function testCli () { } async function startCliNode () { - const repoDir = path.join(os.tmpdir(), `repo-${hat()}`) + const repoDir = path.join(os.tmpdir(), `repo-${nanoid()}`) const opts = { env: { ...process.env, @@ -43,7 +43,7 @@ async function testProgramatically () { } async function startProgramaticNode () { - const repoDir = path.join(os.tmpdir(), `repo-${hat()}`) + const repoDir = path.join(os.tmpdir(), `repo-${nanoid()}`) const node = await IPFS.create({ repo: repoDir, config: { diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json index cb93dc2bb1..0ac9375b2e 100644 --- a/packages/interface-ipfs-core/package.json +++ b/packages/interface-ipfs-core/package.json @@ -32,17 +32,17 @@ "dependencies": { "chai": "^4.2.0", "chai-as-promised": "^7.1.1", - "chai-things": "^0.2.0", + "chai-subset": "^1.6.0", "cids": "^0.7.3", "delay": "^4.3.0", "dirty-chai": "^2.0.1", - "hat": "0.0.3", "ipfs-block": "^0.8.1", "ipfs-unixfs": "^1.0.0", "ipfs-utils": "^0.7.2", "ipld-dag-cbor": "^0.15.1", "ipld-dag-pb": "^0.18.3", "is-ipfs": "^0.6.1", + "iso-random-stream": "^1.1.1", "it-all": "^1.0.1", "it-concat": "^1.0.0", "it-last": "^1.0.1", @@ -51,11 +51,13 @@ "multibase": "^0.6.0", "multihashes": "^0.4.14", "multihashing-async": "^0.8.0", + "nanoid": "^2.1.11", "peer-id": "^0.13.5", - "readable-stream": "^3.4.0" + "readable-stream": "^3.4.0", + "temp-write": "^4.0.0" }, "devDependencies": { - "aegir": "^21.3.0", + "aegir": "21.3.0", "ipfsd-ctl": "^3.0.0" }, "contributors": [ diff --git a/packages/interface-ipfs-core/src/add.js b/packages/interface-ipfs-core/src/add.js index 2193bb36d0..0889570592 100644 --- a/packages/interface-ipfs-core/src/add.js +++ b/packages/interface-ipfs-core/src/add.js @@ -4,6 +4,7 @@ const { fixtures } = require('./utils') const { Readable } = require('readable-stream') const all = require('it-all') +const last = require('it-last') const fs = require('fs') const os = require('os') const path = require('path') @@ -13,7 +14,6 @@ const urlSource = require('ipfs-utils/src/files/url-source') const { isNode } = require('ipfs-utils/src/env') const { getDescribe, getIt, expect } = require('./utils/mocha') const { echoUrl, redirectUrl } = require('./utils/echo-http-server') - const fixturesPath = path.join(__dirname, '..', 'test', 'fixtures') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -221,9 +221,8 @@ module.exports = (common, options) => { emptyDir('files/empty') ] - const res = await all(ipfs.add(dirs)) + const root = await last(ipfs.add(dirs)) - const root = res[res.length - 1] expect(root.path).to.equal('test-folder') expect(root.cid.toString()).to.equal(fixtures.directory.cid) }) @@ -258,9 +257,7 @@ module.exports = (common, options) => { accumProgress += p } - const filesAdded = await all(ipfs.add(dirs, { progress: handler })) - - const root = filesAdded[filesAdded.length - 1] + const root = await last(ipfs.add(dirs, { progress: handler })) expect(progCalled).to.be.true() expect(accumProgress).to.be.at.least(total) expect(root.path).to.equal('test-folder') @@ -289,10 +286,10 @@ module.exports = (common, options) => { expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))).to.be.true() }) - it('should fail when passed invalid input', () => { + it('should fail when passed invalid input', async () => { const nonValid = 138 - return expect(all(ipfs.add(nonValid))).to.eventually.be.rejected() + await expect(all(ipfs.add(nonValid))).to.eventually.be.rejected() }) it('should wrap content in a directory', async () => { diff --git a/packages/interface-ipfs-core/src/block/rm.js b/packages/interface-ipfs-core/src/block/rm.js index 37cd17d98f..7d2f486dbf 100644 --- a/packages/interface-ipfs-core/src/block/rm.js +++ b/packages/interface-ipfs-core/src/block/rm.js @@ -2,7 +2,7 @@ 'use strict' const { getDescribe, getIt, expect } = require('../utils/mocha') -const hat = require('hat') +const nanoid = require('nanoid') const all = require('it-all') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -22,7 +22,7 @@ module.exports = (common, options) => { after(() => common.clean()) it('should remove by CID object', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -44,7 +44,7 @@ module.exports = (common, options) => { }) it('should remove by CID in string', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -56,7 +56,7 @@ module.exports = (common, options) => { }) it('should remove by CID in buffer', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -69,15 +69,15 @@ module.exports = (common, options) => { it('should remove multiple CIDs', async () => { const cids = [ - await ipfs.dag.put(Buffer.from(hat()), { + await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }), - await ipfs.dag.put(Buffer.from(hat()), { + await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }), - await ipfs.dag.put(Buffer.from(hat()), { + await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -94,7 +94,7 @@ module.exports = (common, options) => { }) it('should error when removing non-existent blocks', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -111,7 +111,7 @@ module.exports = (common, options) => { }) it('should not error when force removing non-existent blocks', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -128,7 +128,7 @@ module.exports = (common, options) => { }) it('should return empty output when removing blocks quietly', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) @@ -138,7 +138,7 @@ module.exports = (common, options) => { }) it('should error when removing pinned blocks', async () => { - const cid = await ipfs.dag.put(Buffer.from(hat()), { + const cid = await ipfs.dag.put(Buffer.from(nanoid()), { format: 'raw', hashAlg: 'sha2-256' }) diff --git a/packages/interface-ipfs-core/src/dht/get.js b/packages/interface-ipfs-core/src/dht/get.js index dcc60785db..7fc977a4b7 100644 --- a/packages/interface-ipfs-core/src/dht/get.js +++ b/packages/interface-ipfs-core/src/dht/get.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -36,8 +36,8 @@ module.exports = (common, options) => { // "invalid record keytype" - it needs to put a valid key and value for it to // be a useful test. it.skip('should get a value after it was put on another node', async () => { - const key = Buffer.from(hat()) - const value = Buffer.from(hat()) + const key = Buffer.from(nanoid()) + const value = Buffer.from(nanoid()) await nodeB.dht.put(key, value) const result = await nodeA.dht.get(key) diff --git a/packages/interface-ipfs-core/src/files/chmod.js b/packages/interface-ipfs-core/src/files/chmod.js index 7c0372bf3b..6d57189d2d 100644 --- a/packages/interface-ipfs-core/src/files/chmod.js +++ b/packages/interface-ipfs-core/src/files/chmod.js @@ -1,8 +1,9 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') +const isShardAtPath = require('../utils/is-shard-at-path') module.exports = (common, options) => { const describe = getDescribe(options) @@ -13,16 +14,20 @@ module.exports = (common, options) => { let ipfs - async function testMode (mode, expectedMode) { - const testPath = `/test-${hat()}` + async function testChmod (initialMode, modification, expectedFinalMode) { + const path = `/test-${nanoid()}` - await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { - create: true + await ipfs.files.write(path, Buffer.from('Hello world!'), { + create: true, + mtime: new Date(), + mode: initialMode + }) + await ipfs.files.chmod(path, modification, { + flush: true }) - await ipfs.files.chmod(testPath, mode) - const stat = await ipfs.files.stat(testPath) - expect(stat).to.have.property('mode').that.equals(expectedMode) + const updatedMode = (await ipfs.files.stat(path)).mode + expect(updatedMode).to.equal(parseInt(expectedFinalMode, 8)) } before(async () => { @@ -31,32 +36,298 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should change file mode', async function () { - const mode = parseInt('544', 8) - await testMode(mode, mode) + it('should update the mode for a file', async () => { + const path = `/foo-${Math.random()}` + + await ipfs.files.write(path, Buffer.from('Hello world'), { + create: true, + mtime: new Date() + }) + const originalMode = (await ipfs.files.stat(path)).mode + await ipfs.files.chmod(path, '0777', { + flush: true + }) + + const updatedMode = (await ipfs.files.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) + }) + + it('should update the mode for a directory', async () => { + const path = `/foo-${Math.random()}` + + await ipfs.files.mkdir(path) + const originalMode = (await ipfs.files.stat(path)).mode + await ipfs.files.chmod(path, '0777', { + flush: true + }) + + const updatedMode = (await ipfs.files.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) }) - it('should change file mode as string', async function () { - const mode = parseInt('544', 8) - await testMode('544', mode) + it('should update the mode for a hamt-sharded-directory', async () => { + const path = `/foo-${Math.random()}` + + await ipfs.files.mkdir(path) + await ipfs.files.write(`${path}/foo.txt`, Buffer.from('Hello world'), { + create: true, + shardSplitThreshold: 0 + }) + const originalMode = (await ipfs.files.stat(path)).mode + await ipfs.files.chmod(path, '0777', { + flush: true + }) + + const updatedMode = (await ipfs.files.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) }) - it('should change file mode to 0', async function () { - const mode = 0 - await testMode(mode, mode) + it('should update modes with basic symbolic notation that adds bits', async () => { + await testChmod('0000', '+x', '0111') + await testChmod('0000', '+w', '0222') + await testChmod('0000', '+r', '0444') + await testChmod('0000', 'u+x', '0100') + await testChmod('0000', 'u+w', '0200') + await testChmod('0000', 'u+r', '0400') + await testChmod('0000', 'g+x', '0010') + await testChmod('0000', 'g+w', '0020') + await testChmod('0000', 'g+r', '0040') + await testChmod('0000', 'o+x', '0001') + await testChmod('0000', 'o+w', '0002') + await testChmod('0000', 'o+r', '0004') + await testChmod('0000', 'ug+x', '0110') + await testChmod('0000', 'ug+w', '0220') + await testChmod('0000', 'ug+r', '0440') + await testChmod('0000', 'ugo+x', '0111') + await testChmod('0000', 'ugo+w', '0222') + await testChmod('0000', 'ugo+r', '0444') + await testChmod('0000', 'a+x', '0111') + await testChmod('0000', 'a+w', '0222') + await testChmod('0000', 'a+r', '0444') }) - it('should change directory mode', async function () { - const testPath = `/test-${hat()}` - const mode = parseInt('544', 8) + it('should update modes with basic symbolic notation that removes bits', async () => { + await testChmod('0111', '-x', '0000') + await testChmod('0222', '-w', '0000') + await testChmod('0444', '-r', '0000') + await testChmod('0100', 'u-x', '0000') + await testChmod('0200', 'u-w', '0000') + await testChmod('0400', 'u-r', '0000') + await testChmod('0010', 'g-x', '0000') + await testChmod('0020', 'g-w', '0000') + await testChmod('0040', 'g-r', '0000') + await testChmod('0001', 'o-x', '0000') + await testChmod('0002', 'o-w', '0000') + await testChmod('0004', 'o-r', '0000') + await testChmod('0110', 'ug-x', '0000') + await testChmod('0220', 'ug-w', '0000') + await testChmod('0440', 'ug-r', '0000') + await testChmod('0111', 'ugo-x', '0000') + await testChmod('0222', 'ugo-w', '0000') + await testChmod('0444', 'ugo-r', '0000') + await testChmod('0111', 'a-x', '0000') + await testChmod('0222', 'a-w', '0000') + await testChmod('0444', 'a-r', '0000') + }) + + it('should update modes with basic symbolic notation that overrides bits', async () => { + await testChmod('0777', '=x', '0111') + await testChmod('0777', '=w', '0222') + await testChmod('0777', '=r', '0444') + await testChmod('0777', 'u=x', '0177') + await testChmod('0777', 'u=w', '0277') + await testChmod('0777', 'u=r', '0477') + await testChmod('0777', 'g=x', '0717') + await testChmod('0777', 'g=w', '0727') + await testChmod('0777', 'g=r', '0747') + await testChmod('0777', 'o=x', '0771') + await testChmod('0777', 'o=w', '0772') + await testChmod('0777', 'o=r', '0774') + await testChmod('0777', 'ug=x', '0117') + await testChmod('0777', 'ug=w', '0227') + await testChmod('0777', 'ug=r', '0447') + await testChmod('0777', 'ugo=x', '0111') + await testChmod('0777', 'ugo=w', '0222') + await testChmod('0777', 'ugo=r', '0444') + await testChmod('0777', 'a=x', '0111') + await testChmod('0777', 'a=w', '0222') + await testChmod('0777', 'a=r', '0444') + }) - await ipfs.files.mkdir(testPath, { - create: true + it('should update modes with multiple symbolic notation', async () => { + await testChmod('0000', 'g+x,u+w', '0210') + }) + + it('should update modes with special symbolic notation', async () => { + await testChmod('0000', 'g+s', '2000') + await testChmod('0000', 'u+s', '4000') + await testChmod('0000', '+t', '1000') + await testChmod('0000', '+s', '6000') + }) + + it('should apply special execute permissions to world', async () => { + const path = `/foo-${Math.random()}` + const sub = `${path}/sub` + const file = `${path}/sub/foo.txt` + const bin = `${path}/sub/bar` + + await ipfs.files.mkdir(sub, { + parents: true + }) + await ipfs.files.touch(file) + await ipfs.files.touch(bin) + + await ipfs.files.chmod(path, 0o644, { + recursive: true + }) + await ipfs.files.chmod(bin, 'u+x') + + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o744) + + await ipfs.files.chmod(path, 'a+X', { + recursive: true }) - await ipfs.files.chmod(testPath, mode) - const stat = await ipfs.files.stat(testPath) - expect(stat).to.have.property('mode').that.equals(mode) + // directories should be world-executable + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o755) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o755) + + // files without prior execute bit should be untouched + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + + // files with prior execute bit should now be world-executable + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o755) + }) + + it('should apply special execute permissions to user', async () => { + const path = `/foo-${Math.random()}` + const sub = `${path}/sub` + const file = `${path}/sub/foo.txt` + const bin = `${path}/sub/bar` + + await ipfs.files.mkdir(sub, { + parents: true + }) + await ipfs.files.touch(file) + await ipfs.files.touch(bin) + + await ipfs.files.chmod(path, 0o644, { + recursive: true + }) + await ipfs.files.chmod(bin, 'u+x') + + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o744) + + await ipfs.files.chmod(path, 'u+X', { + recursive: true + }) + + // directories should be user executable + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o744) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o744) + + // files without prior execute bit should be untouched + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + + // files with prior execute bit should now be user executable + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o744) + }) + + it('should apply special execute permissions to user and group', async () => { + const path = `/foo-${Math.random()}` + const sub = `${path}/sub` + const file = `${path}/sub/foo.txt` + const bin = `${path}/sub/bar` + + await ipfs.files.mkdir(sub, { + parents: true + }) + await ipfs.files.touch(file) + await ipfs.files.touch(bin) + + await ipfs.files.chmod(path, 0o644, { + recursive: true + }) + await ipfs.files.chmod(bin, 'u+x') + + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o744) + + await ipfs.files.chmod(path, 'ug+X', { + recursive: true + }) + + // directories should be user and group executable + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o754) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o754) + + // files without prior execute bit should be untouched + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + + // files with prior execute bit should now be user and group executable + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o754) + }) + + it('should apply special execute permissions to sharded directories', async () => { + const path = `/foo-${Math.random()}` + const sub = `${path}/sub` + const file = `${path}/sub/foo.txt` + const bin = `${path}/sub/bar` + + await ipfs.files.mkdir(sub, { + parents: true, + shardSplitThreshold: 0 + }) + await ipfs.files.touch(file, { + shardSplitThreshold: 0 + }) + await ipfs.files.touch(bin, { + shardSplitThreshold: 0 + }) + + await ipfs.files.chmod(path, 0o644, { + recursive: true, + shardSplitThreshold: 0 + }) + await ipfs.files.chmod(bin, 'u+x', { + recursive: true, + shardSplitThreshold: 0 + }) + + await expect(ipfs.files.stat(path)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o744) + + await ipfs.files.chmod(path, 'ug+X', { + recursive: true, + shardSplitThreshold: 0 + }) + + // directories should be user and group executable + await expect(isShardAtPath(path, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(path)).to.eventually.include({ + type: 'directory', + mode: 0o754 + }) + await expect(ipfs.files.stat(sub)).to.eventually.have.property('mode', 0o754) + + // files without prior execute bit should be untouched + await expect(ipfs.files.stat(file)).to.eventually.have.property('mode', 0o644) + + // files with prior execute bit should now be user and group executable + await expect(ipfs.files.stat(bin)).to.eventually.have.property('mode', 0o754) }) }) } diff --git a/packages/interface-ipfs-core/src/files/cp.js b/packages/interface-ipfs-core/src/files/cp.js index 930a2dc70e..cd96783421 100644 --- a/packages/interface-ipfs-core/src/files/cp.js +++ b/packages/interface-ipfs-core/src/files/cp.js @@ -1,11 +1,17 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const all = require('it-all') const concat = require('it-concat') const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const mh = require('multihashing-async').multihash +const Block = require('ipfs-block') +const CID = require('cids') +const randomBytes = require('iso-random-stream/src/random') +const createShardedDirectory = require('../utils/create-sharded-directory') +const isShardAtPath = require('../utils/is-shard-at-path') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -25,44 +31,345 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should copy file, expect error', () => { - const testDir = `/test-${hat()}` + it('refuses to copy files without a source', async () => { + await expect(ipfs.files.cp()).to.eventually.be.rejected.with('Please supply at least one source') + }) + + it('refuses to copy files without a source, even with options', async () => { + await expect(ipfs.files.cp({})).to.eventually.be.rejected.with('Please supply at least one source') + }) - return expect(ipfs.files.cp(`${testDir}/c`, `${testDir}/b`)).to.eventually.be.rejected() + it('refuses to copy files without a destination', async () => { + await expect(ipfs.files.cp('/source')).to.eventually.be.rejected.with('Please supply at least one source') }) - it('should copy file, expect no error', async () => { - const testDir = `/test-${hat()}` + it('refuses to copy files without a destination, even with options', async () => { + await expect(ipfs.files.cp('/source', {})).to.eventually.be.rejected.with('Please supply at least one source') + }) - await ipfs.files.mkdir(testDir, { parents: true }) - await ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }) - await ipfs.files.cp(`${testDir}/a`, `${testDir}/b`) + it('refuses to copy a non-existent file', async () => { + await expect(ipfs.files.cp('/i-do-not-exist', '/destination', {})).to.eventually.be.rejected.with('does not exist') }) - it('should copy dir, expect error', () => { - const testDir = `/test-${hat()}` + it('refuses to copy multiple files to a non-existent child directory', async () => { + const src1 = `/src1-${Math.random()}` + const src2 = `/src2-${Math.random()}` + const parent = `/output-${Math.random()}` - return expect(ipfs.files.cp(`${testDir}/lv1/lv3`, `${testDir}/lv1/lv4`)).to.eventually.be.rejected() + await ipfs.files.write(src1, [], { + create: true + }) + await ipfs.files.write(src2, [], { + create: true + }) + await ipfs.files.mkdir(parent) + await expect(ipfs.files.cp(src1, src2, `${parent}/child`)).to.eventually.be.rejectedWith(Error) + .that.has.property('message').that.matches(/destination did not exist/) }) - it('should copy dir, expect no error', async () => { - const testDir = `/test-${hat()}` + it('refuses to copy files to an unreadable node', async () => { + const src1 = `/src2-${Math.random()}` + const parent = `/output-${Math.random()}` - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) - await ipfs.files.cp(`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`) + const cid = new CID(1, 'identity', mh.encode(Buffer.from('derp'), 'identity')) + await ipfs.block.put(new Block(Buffer.from('derp'), cid), { cid }) + await ipfs.files.cp(`/ipfs/${cid}`, parent) + + await ipfs.files.write(src1, [], { + create: true + }) + await expect(ipfs.files.cp(src1, `${parent}/child`)).to.eventually.be.rejectedWith(Error) + .that.has.property('message').that.matches(/"identity"/) + }) + + it('refuses to copy files to an exsting file', async () => { + const source = `/source-file-${Math.random()}.txt` + const destination = `/dest-file-${Math.random()}.txt` + + await ipfs.files.write(source, randomBytes(100), { + create: true + }) + await ipfs.files.write(destination, randomBytes(100), { + create: true + }) + + try { + await ipfs.files.cp(source, destination) + throw new Error('No error was thrown when trying to overwrite a file') + } catch (err) { + expect(err.message).to.contain('directory already has entry by that name') + } }) - it('should copy from outside of mfs', async () => { - const [{ cid }] = await all(ipfs.add(fixtures.smallFile.data)) - const testFilePath = `/${hat()}` - await ipfs.files.cp(`/ipfs/${cid}`, testFilePath) - const testFileData = await concat(ipfs.files.read(testFilePath)) - expect(testFileData.slice()).to.eql(fixtures.smallFile.data) + it('refuses to copy a file to itself', async () => { + const source = `/source-file-${Math.random()}.txt` + + await ipfs.files.write(source, randomBytes(100), { + create: true + }) + + try { + await ipfs.files.cp(source, source) + throw new Error('No error was thrown for a non-existent file') + } catch (err) { + expect(err.message).to.contain('directory already has entry by that name') + } + }) + + it('copies a file to new location', async () => { + const source = `/source-file-${Math.random()}.txt` + const destination = `/dest-file-${Math.random()}.txt` + const data = randomBytes(500) + + await ipfs.files.write(source, data, { + create: true + }) + + await ipfs.files.cp(source, destination) + + const buffer = await concat(ipfs.files.read(destination)) + + expect(buffer.slice()).to.deep.equal(data) + }) + + it('copies a file to a pre-existing directory', async () => { + const source = `/source-file-${Math.random()}.txt` + const directory = `/dest-directory-${Math.random()}` + const destination = `${directory}${source}` + + await ipfs.files.write(source, randomBytes(500), { + create: true + }) + await ipfs.files.mkdir(directory) + await ipfs.files.cp(source, directory) + + const stats = await ipfs.files.stat(destination) + expect(stats.size).to.equal(500) + }) + + it('copies directories', async () => { + const source = `/source-directory-${Math.random()}` + const destination = `/dest-directory-${Math.random()}` + + await ipfs.files.mkdir(source) + await ipfs.files.cp(source, destination) + + const stats = await ipfs.files.stat(destination) + expect(stats.type).to.equal('directory') + }) + + it('copies directories recursively', async () => { + const directory = `/source-directory-${Math.random()}` + const subDirectory = `/source-directory-${Math.random()}` + const source = `${directory}${subDirectory}` + const destination = `/dest-directory-${Math.random()}` + + await ipfs.files.mkdir(source, { + parents: true + }) + await ipfs.files.cp(directory, destination) + + const stats = await ipfs.files.stat(destination) + expect(stats.type).to.equal('directory') + + const subDirStats = await ipfs.files.stat(`${destination}/${subDirectory}`) + expect(subDirStats.type).to.equal('directory') + }) + + it('copies multiple files to new location', async () => { + const sources = [{ + path: `/source-file-${Math.random()}.txt`, + data: randomBytes(500) + }, { + path: `/source-file-${Math.random()}.txt`, + data: randomBytes(500) + }] + const destination = `/dest-dir-${Math.random()}` + + for (const source of sources) { + await ipfs.files.write(source.path, source.data, { + create: true + }) + } + + await ipfs.files.cp(sources[0].path, sources[1].path, destination, { + parents: true + }) + + for (const source of sources) { + const buffer = await concat(ipfs.files.read(`${destination}${source.path}`)) + + expect(buffer.slice()).to.deep.equal(source.data) + } + }) + + it('copies files from ipfs paths', async () => { + const source = `/source-file-${Math.random()}.txt` + const destination = `/dest-file-${Math.random()}.txt` + + await ipfs.files.write(source, randomBytes(100), { + create: true + }) + + const stats = await ipfs.files.stat(source) + await ipfs.files.cp(`/ipfs/${stats.cid}`, destination) + + const destinationStats = await ipfs.files.stat(destination) + expect(destinationStats.size).to.equal(100) + }) + + it('copies files from deep ipfs paths', async () => { + const dir = `dir-${Math.random()}` + const file = `source-file-${Math.random()}.txt` + const source = `/${dir}/${file}` + const destination = `/dest-file-${Math.random()}.txt` + + await ipfs.files.write(source, randomBytes(100), { + create: true, + parents: true + }) + + const stats = await ipfs.files.stat(`/${dir}`) + await ipfs.files.cp(`/ipfs/${stats.cid}/${file}`, destination) + + const destinationStats = await ipfs.files.stat(destination) + expect(destinationStats.size).to.equal(100) + }) + + it('copies files to deep mfs paths and creates intermediate directories', async () => { + const source = `/source-file-${Math.random()}.txt` + const destination = `/really/deep/path/to/dest-file-${Math.random()}.txt` + + await ipfs.files.write(source, randomBytes(100), { + create: true + }) + + await ipfs.files.cp(source, destination, { + parents: true + }) + + const destinationStats = await ipfs.files.stat(destination) + expect(destinationStats.size).to.equal(100) + }) + + it('fails to copy files to deep mfs paths when intermediate directories do not exist', async () => { + const source = `/source-file-${Math.random()}.txt` + const destination = `/really/deep/path-${Math.random()}/to-${Math.random()}/dest-file-${Math.random()}.txt` + + await ipfs.files.write(source, randomBytes(100), { + create: true + }) + + await expect(ipfs.files.cp(source, destination)).to.eventually.be.rejected() + }) + + it('copies a sharded directory to a normal directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + + const normalDir = `dir-${Math.random()}` + const normalDirPath = `/${normalDir}` + + await ipfs.files.mkdir(normalDirPath) + + await ipfs.files.cp(shardedDirPath, normalDirPath) + + const finalShardedDirPath = `${normalDirPath}${shardedDirPath}` + + // should still be a sharded directory + await expect(isShardAtPath(finalShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(finalShardedDirPath)).type).to.equal('directory') + + const files = await all(ipfs.files.ls(finalShardedDirPath)) + + expect(files.length).to.be.ok() + }) + + it('copies a normal directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + + const normalDir = `dir-${Math.random()}` + const normalDirPath = `/${normalDir}` + + await ipfs.files.mkdir(normalDirPath) + + await ipfs.files.cp(normalDirPath, shardedDirPath) + + const finalDirPath = `${shardedDirPath}${normalDirPath}` + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalDirPath)).type).to.equal('directory') + }) + + it('copies a file from a normal directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + + const file = `file-${Math.random()}.txt` + const filePath = `/${file}` + const finalFilePath = `${shardedDirPath}/${file}` + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + await ipfs.files.cp(filePath, finalFilePath) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') + }) + + it('copies a file from a sharded directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const othershardedDirPath = await createShardedDirectory(ipfs) + + const file = `file-${Math.random()}.txt` + const filePath = `${shardedDirPath}/${file}` + const finalFilePath = `${othershardedDirPath}/${file}` + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + await ipfs.files.cp(filePath, finalFilePath) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + await expect(isShardAtPath(othershardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(othershardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') + }) + + it('copies a file from a sharded directory to a normal directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dir = `dir-${Math.random()}` + const dirPath = `/${dir}` + + const file = `file-${Math.random()}.txt` + const filePath = `${shardedDirPath}/${file}` + const finalFilePath = `${dirPath}/${file}` + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + await ipfs.files.mkdir(dirPath) + + await ipfs.files.cp(filePath, finalFilePath) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') }) it('should respect metadata when copying files', async function () { - const testSrcPath = `/test-${hat()}` - const testDestPath = `/test-${hat()}` + const testSrcPath = `/test-${nanoid()}` + const testDestPath = `/test-${nanoid()}` const mode = parseInt('0321', 8) const mtime = new Date() const seconds = Math.floor(mtime.getTime() / 1000) @@ -84,8 +391,8 @@ module.exports = (common, options) => { }) it('should respect metadata when copying directories', async function () { - const testSrcPath = `/test-${hat()}` - const testDestPath = `/test-${hat()}` + const testSrcPath = `/test-${nanoid()}` + const testDestPath = `/test-${nanoid()}` const mode = parseInt('0321', 8) const mtime = new Date() const seconds = Math.floor(mtime.getTime() / 1000) @@ -108,7 +415,7 @@ module.exports = (common, options) => { }) it('should respect metadata when copying from outside of mfs', async function () { - const testDestPath = `/test-${hat()}` + const testDestPath = `/test-${nanoid()}` const mode = parseInt('0321', 8) const mtime = new Date() const seconds = Math.floor(mtime.getTime() / 1000) diff --git a/packages/interface-ipfs-core/src/files/flush.js b/packages/interface-ipfs-core/src/files/flush.js index 1912f9b6de..f2b111596c 100644 --- a/packages/interface-ipfs-core/src/files/flush.js +++ b/packages/interface-ipfs-core/src/files/flush.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -23,7 +23,7 @@ module.exports = (common, options) => { after(() => common.clean()) it('should not flush not found file/dir, expect error', async () => { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` try { await ipfs.files.flush(`${testDir}/404`) @@ -40,7 +40,7 @@ module.exports = (common, options) => { }) it('should flush specific dir', async () => { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true }) diff --git a/packages/interface-ipfs-core/src/files/ls.js b/packages/interface-ipfs-core/src/files/ls.js index dda4816822..57905c7ff0 100644 --- a/packages/interface-ipfs-core/src/files/ls.js +++ b/packages/interface-ipfs-core/src/files/ls.js @@ -1,10 +1,17 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') -const all = require('it-all') -const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const CID = require('cids') +const createShardedDirectory = require('../utils/create-sharded-directory') +const all = require('it-all') +const randomBytes = require('iso-random-stream/src/random') + +const MFS_FILE_TYPES = { + file: 0, + directory: 1, + 'hamt-sharded-directory': 1 +} /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -14,6 +21,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (common, options) => { const describe = getDescribe(options) const it = getIt(options) + const largeFile = randomBytes(490668) describe('.files.ls', function () { this.timeout(40 * 1000) @@ -24,95 +32,169 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should not ls not found file/dir, expect error', () => { - const testDir = `/test-${hat()}` + it('lists the root directory by default', async () => { + const fileName = `small-file-${Math.random()}.txt` + const content = Buffer.from('Hello world') + + await ipfs.files.write(`/${fileName}`, content, { + create: true + }) + + const files = await all(ipfs.files.ls()) + + expect(files).to.have.lengthOf(1).and.to.containSubset([{ + cid: new CID('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), + name: fileName, + size: content.length, + type: MFS_FILE_TYPES.file + }]) + }) + + it('refuses to lists files with an empty path', async () => { + await expect(all(ipfs.files.ls(''))).to.eventually.be.rejected() + }) - return expect(all(ipfs.files.ls(`${testDir}/404`))).to.eventually.be.rejected() + it('refuses to lists files with an invalid path', async () => { + await expect(all(ipfs.files.ls('not-valid'))).to.eventually.be.rejected() }) - it('should ls directory', async () => { - const testDir = `/test-${hat()}` + it('lists files in a directory', async () => { + const dirName = `dir-${Math.random()}` + const fileName = `small-file-${Math.random()}.txt` + const content = Buffer.from('Hello world') - await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + await ipfs.files.write(`/${dirName}/${fileName}`, content, { + create: true, + parents: true + }) - const entries = await all(ipfs.files.ls(testDir)) + const files = await all(ipfs.files.ls(`/${dirName}`)) - expect(entries).to.have.lengthOf(2) - expect(entries[0].name).to.equal('b') - expect(entries[0].type).to.equal(0) - expect(entries[0].size).to.equal(13) - expect(entries[0].cid.toString()).to.equal('QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T') - expect(entries[1].name).to.equal('lv1') - expect(entries[1].type).to.equal(1) - expect(entries[1].size).to.equal(0) - expect(entries[1].cid.toString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + expect(files).to.have.lengthOf(1).and.to.containSubset([{ + cid: new CID('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), + name: fileName, + size: content.length, + type: MFS_FILE_TYPES.file + }]) }) - it('should ls directory and include metadata', async () => { - const testDir = `/test-${hat()}` + it('lists a file', async () => { + const fileName = `small-file-${Math.random()}.txt` + const content = Buffer.from('Hello world') - await ipfs.files.mkdir(`${testDir}/lv1`, { - parents: true, - mtime: { - secs: 5 - } + await ipfs.files.write(`/${fileName}`, content, { + create: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { + + const files = await all(ipfs.files.ls(`/${fileName}`)) + + expect(files).to.have.lengthOf(1).and.to.containSubset([{ + cid: new CID('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ'), + name: fileName, + size: content.length, + type: MFS_FILE_TYPES.file + }]) + }) + + it('fails to list non-existent file', async () => { + await expect(all(ipfs.files.ls('/i-do-not-exist'))).to.eventually.be.rejected() + }) + + it('lists a raw node', async () => { + const filePath = '/stat/large-file.txt' + + await ipfs.files.write(filePath, largeFile, { create: true, - mtime: { - secs: 5 - } + parents: true, + rawLeaves: true }) - const entries = await all(ipfs.files.ls(testDir, { long: true })) + const stats = await ipfs.files.stat(filePath) + const { value: node } = await ipfs.dag.get(stats.cid) - expect(entries).to.have.lengthOf(2) - expect(entries[0].cid.toString()).to.equal('QmTVnczjg445RUAEYNH1wvhVa2rnPoWMfHMxQc6W7HHoyM') - expect(entries[0].mode).to.equal(0o0644) - expect(entries[0].mtime).to.deep.equal({ - secs: 5, - nsecs: 0 - }) - expect(entries[1].cid.toString()).to.equal('QmXkBjmbtWUxXLa3s541UBSzPgvaAR7b8X3Amcp5D1VKTQ') - expect(entries[1].mode).to.equal(0o0755) - expect(entries[1].mtime).to.deep.equal({ - secs: 5, - nsecs: 0 + expect(node).to.have.nested.property('Links[0].Hash.codec', 'raw') + + const child = node.Links[0] + const files = await all(ipfs.files.ls(`/ipfs/${child.Hash}`)) + + expect(files).to.have.lengthOf(1).and.to.containSubset([{ + cid: child.Hash, + name: child.Hash.toString(), + size: 262144, + type: MFS_FILE_TYPES.file + }]) + }) + + it('lists a raw node in an mfs directory', async () => { + const filePath = '/stat/large-file.txt' + + await ipfs.files.write(filePath, largeFile, { + create: true, + parents: true, + rawLeaves: true }) + + const stats = await ipfs.files.stat(filePath) + const cid = stats.cid + const { value: node } = await ipfs.dag.get(cid) + + expect(node).to.have.nested.property('Links[0].Hash.codec', 'raw') + + const child = node.Links[0] + const dir = `/dir-with-raw-${Math.random()}` + const path = `${dir}/raw-${Math.random()}` + + await ipfs.files.mkdir(dir) + await ipfs.files.cp(`/ipfs/${child.Hash}`, path) + + const files = await all(ipfs.files.ls(`/ipfs/${child.Hash}`)) + + expect(files).to.have.lengthOf(1).and.to.containSubset([{ + cid: child.Hash, + name: child.Hash.toString(), + size: 262144, + type: MFS_FILE_TYPES.file + }]) }) - it('should ls from outside of mfs', async () => { - const testFileName = hat() - const [{ - cid - }] = await all(ipfs.add({ path: `/test/${testFileName}`, content: fixtures.smallFile.data })) - const listing = await all(ipfs.files.ls('/ipfs/' + cid)) - expect(listing).to.have.length(1) - expect(listing[0].name).to.equal(cid.toString()) + it('lists a sharded directory contents', async () => { + const fileCount = 1001 + const dirPath = await createShardedDirectory(ipfs, fileCount) + const files = await all(ipfs.files.ls(dirPath)) + + expect(files.length).to.equal(fileCount) + + files.forEach(file => { + // should be a file + expect(file.type).to.equal(0) + }) }) - it('should list an empty directory', async () => { - const testDir = `/test-${hat()}` - await ipfs.files.mkdir(testDir) - const contents = await all(ipfs.files.ls(testDir)) + it('lists a file inside a sharded directory directly', async () => { + const dirPath = await createShardedDirectory(ipfs) + const files = await all(ipfs.files.ls(dirPath)) + const filePath = `${dirPath}/${files[0].name}` + + // should be able to ls new file directly + const file = await all(ipfs.files.ls(filePath)) - expect(contents).to.be.an('array').and.to.be.empty() + expect(file).to.have.lengthOf(1).and.to.containSubset([files[0]]) }) - it('should list a file directly', async () => { - const fileName = `single-file-${hat()}.txt` - const filePath = `/${fileName}` - await ipfs.files.write(filePath, Buffer.from('Hello world'), { + it('lists the contents of a directory inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dirPath = `${shardedDirPath}/subdir-${Math.random()}` + const fileName = `small-file-${Math.random()}.txt` + + await ipfs.files.mkdir(`${dirPath}`) + await ipfs.files.write(`${dirPath}/${fileName}`, Buffer.from([0, 1, 2, 3]), { create: true }) - const entries = await all(ipfs.files.ls(filePath)) - expect(entries).to.have.lengthOf(1) - expect(entries[0].name).to.equal(fileName) - expect(entries[0].type).to.equal(0) - expect(entries[0].size).to.equal(11) - expect(entries[0].cid.toString()).to.equal('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ') + const files = await all(ipfs.files.ls(dirPath)) + + expect(files.length).to.equal(1) + expect(files.filter(file => file.name === fileName)).to.be.ok() }) }) } diff --git a/packages/interface-ipfs-core/src/files/mkdir.js b/packages/interface-ipfs-core/src/files/mkdir.js index df30130ecf..5f21161125 100644 --- a/packages/interface-ipfs-core/src/files/mkdir.js +++ b/packages/interface-ipfs-core/src/files/mkdir.js @@ -1,8 +1,12 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') +const multihash = require('multihashes') +const createShardedDirectory = require('../utils/create-sharded-directory') +const all = require('it-all') +const isShardAtPath = require('../utils/is-shard-at-path') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -19,7 +23,7 @@ module.exports = (common, options) => { let ipfs async function testMode (mode, expectedMode) { - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` await ipfs.files.mkdir(testPath, { mode }) @@ -29,7 +33,7 @@ module.exports = (common, options) => { } async function testMtime (mtime, expectedMtime) { - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` await ipfs.files.mkdir(testPath, { mtime }) @@ -42,20 +46,135 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should make directory on root', () => { - const testDir = `/test-${hat()}` + it('requires a directory', async () => { + await expect(ipfs.files.mkdir('')).to.eventually.be.rejected() + }) + + it('refuses to create a directory without a leading slash', async () => { + await expect(ipfs.files.mkdir('foo')).to.eventually.be.rejected() + }) + + it('refuses to recreate the root directory when -p is false', async () => { + await expect(ipfs.files.mkdir('/', { + parents: false + })).to.eventually.be.rejected() + }) + + it('refuses to create a nested directory when -p is false', async () => { + await expect(ipfs.files.mkdir('/foo/bar/baz', { + parents: false + })).to.eventually.be.rejected() + }) + + it('creates a directory', async () => { + const path = '/foo' + + await ipfs.files.mkdir(path, {}) + + const stats = await ipfs.files.stat(path) + expect(stats.type).to.equal('directory') + + const files = await all(ipfs.files.ls(path)) + + expect(files.length).to.equal(0) + }) + + it('refuses to create a directory that already exists', async () => { + const path = '/qux/quux/quuux' + + await ipfs.files.mkdir(path, { + parents: true + }) + + await expect(ipfs.files.mkdir(path, { + parents: false + })).to.eventually.be.rejected() + }) + + it('does not error when creating a directory that already exists and parents is true', async () => { + const path = '/qux/quux/quuux' + + await ipfs.files.mkdir(path, { + parents: true + }) + + await ipfs.files.mkdir(path, { + parents: true + }) + }) - return ipfs.files.mkdir(testDir) + it('creates a nested directory when -p is true', async () => { + const path = '/foo/bar/baz' + + await ipfs.files.mkdir(path, { + parents: true + }) + + const files = await all(ipfs.files.ls(path)) + + expect(files.length).to.equal(0) + }) + + it('creates nested directories', async () => { + await ipfs.files.mkdir('/nested-dir') + await ipfs.files.mkdir('/nested-dir/baz') + + const files = await all(ipfs.files.ls('/nested-dir')) + + expect(files.length).to.equal(1) + }) + + it('creates a nested directory with a different CID version to the parent', async () => { + const directory = `cid-versions-${Math.random()}` + const directoryPath = `/${directory}` + const subDirectory = `cid-versions-${Math.random()}` + const subDirectoryPath = `${directoryPath}/${subDirectory}` + + await ipfs.files.mkdir(directoryPath, { + cidVersion: 0 + }) + + await expect(ipfs.files.stat(directoryPath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.mkdir(subDirectoryPath, { + cidVersion: 1 + }) + + await expect(ipfs.files.stat(subDirectoryPath)).to.eventually.have.nested.property('cid.version', 1) }) - it('should make directory and its parents', () => { - const testDir = `/test-${hat()}` + it('creates a nested directory with a different hash function to the parent', async () => { + const directory = `cid-versions-${Math.random()}` + const directoryPath = `/${directory}` + const subDirectory = `cid-versions-${Math.random()}` + const subDirectoryPath = `${directoryPath}/${subDirectory}` + + await ipfs.files.mkdir(directoryPath, { + cidVersion: 0 + }) + + await expect(ipfs.files.stat(directoryPath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.mkdir(subDirectoryPath, { + cidVersion: 1, + hashAlg: 'sha2-512' + }) - return ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) + await expect(ipfs.files.stat(subDirectoryPath)).to.eventually.have.nested.property('cid.multihash') + .that.satisfies(hash => multihash.decode(hash).name === 'sha2-512') }) - it('should not make already existent directory', () => { - return expect(ipfs.files.mkdir('/')).to.eventually.be.rejected() + it('makes a directory inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dirPath = `${shardedDirPath}/subdir-${Math.random()}` + + await ipfs.files.mkdir(`${dirPath}`) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(shardedDirPath)).to.eventually.have.property('type', 'directory') + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.false() + await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory') }) it('should make directory and have default mode', async function () { diff --git a/packages/interface-ipfs-core/src/files/mv.js b/packages/interface-ipfs-core/src/files/mv.js index d5c9bcc62a..bfa7f8f8e7 100644 --- a/packages/interface-ipfs-core/src/files/mv.js +++ b/packages/interface-ipfs-core/src/files/mv.js @@ -1,8 +1,11 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') +const createShardedDirectory = require('../utils/create-sharded-directory') +const concat = require('it-concat') +const randomBytes = require('iso-random-stream/src/random') +const isShardAtPath = require('../utils/is-shard-at-path') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -26,26 +29,242 @@ module.exports = (common, options) => { }) after(() => common.clean()) - it('should not move not found file/dir, expect error', () => { - const testDir = `/test-${hat()}` + it('refuses to move files without arguments', async () => { + await expect(ipfs.files.mv()).to.eventually.be.rejected() + }) - return expect(ipfs.files.mv(`${testDir}/404`, `${testDir}/a`)).to.eventually.be.rejected() + it('refuses to move files without enough arguments', async () => { + await expect(ipfs.files.mv()).to.eventually.be.rejected() }) - it('should move file, expect no error', async () => { - const testDir = `/test-${hat()}` + it('moves a file', async () => { + const source = `/source-file-${Math.random()}.txt` + const destination = `/dest-file-${Math.random()}.txt` + const data = randomBytes(500) + + await ipfs.files.write(source, data, { + create: true + }) + await ipfs.files.mv(source, destination) + + const buffer = await concat(ipfs.files.read(destination)) + expect(buffer.slice()).to.deep.equal(data) + + await expect(ipfs.files.stat(source)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('moves a directory', async () => { + const source = `/source-directory-${Math.random()}` + const destination = `/dest-directory-${Math.random()}` + + await ipfs.files.mkdir(source) + await ipfs.files.mv(source, destination, { + recursive: true + }) + const stats = await ipfs.files.stat(destination) + + expect(stats.type).to.equal('directory') + + try { + await ipfs.files.stat(source) + throw new Error('Directory was copied but not removed') + } catch (err) { + expect(err.message).to.contain('does not exist') + } + }) + + it('moves directories recursively', async () => { + const directory = `source-directory-${Math.random()}` + const subDirectory = `/source-directory-${Math.random()}` + const source = `/${directory}${subDirectory}` + const destination = `/dest-directory-${Math.random()}` + + await ipfs.files.mkdir(source, { + parents: true + }) + await ipfs.files.mv(`/${directory}`, destination, { + recursive: true + }) + + const stats = await ipfs.files.stat(destination) + expect(stats.type).to.equal('directory') - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) - await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) + const subDirectoryStats = await ipfs.files.stat(`${destination}${subDirectory}`) + expect(subDirectoryStats.type).to.equal('directory') - await ipfs.files.mv(`${testDir}/a`, `${testDir}/c`) + try { + await ipfs.files.stat(source) + throw new Error('Directory was copied but not removed') + } catch (err) { + expect(err.message).to.contain('does not exist') + } }) - it('should move dir, expect no error', async () => { - const testDir = `/test-${hat()}` + it('moves a sharded directory to a normal directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dirPath = `/dir-${Math.random()}` + const finalShardedDirPath = `${dirPath}${shardedDirPath}` + + await ipfs.files.mkdir(dirPath) + await ipfs.files.mv(shardedDirPath, dirPath) + + await expect(isShardAtPath(finalShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(finalShardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + + try { + await ipfs.files.stat(shardedDirPath) + throw new Error('Dir was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } + }) + + it('moves a normal directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dirPath = `/dir-${Math.random()}` + const finalDirPath = `${shardedDirPath}${dirPath}` + + await ipfs.files.mkdir(dirPath) + await ipfs.files.mv(dirPath, shardedDirPath) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalDirPath)).type).to.equal('directory') + + try { + await ipfs.files.stat(dirPath) + throw new Error('Dir was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } + }) + + it('moves a sharded directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const otherShardedDirPath = await createShardedDirectory(ipfs) + const finalShardedDirPath = `${shardedDirPath}${otherShardedDirPath}` + + await ipfs.files.mv(otherShardedDirPath, shardedDirPath) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + await expect(isShardAtPath(finalShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(finalShardedDirPath)).type).to.equal('directory') + + try { + await ipfs.files.stat(otherShardedDirPath) + throw new Error('Sharded dir was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } + }) + + it('moves a file from a normal directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dirPath = `/dir-${Math.random()}` + const file = `file-${Math.random()}.txt` + const filePath = `${dirPath}/${file}` + const finalFilePath = `${shardedDirPath}/${file}` + + await ipfs.files.mkdir(dirPath) + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { + create: true + }) + + await ipfs.files.mv(filePath, shardedDirPath) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') + + try { + await ipfs.files.stat(filePath) + throw new Error('File was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } + }) + + it('moves a file from a sharded directory to a normal directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dirPath = `/dir-${Math.random()}` + const file = `file-${Math.random()}.txt` + const filePath = `${shardedDirPath}/${file}` + const finalFilePath = `${dirPath}/${file}` + + await ipfs.files.mkdir(dirPath) + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { + create: true + }) + + await ipfs.files.mv(filePath, dirPath) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + + try { + await ipfs.files.stat(filePath) + throw new Error('File was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } + }) + + it('moves a file from a sharded directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const otherShardedDirPath = await createShardedDirectory(ipfs) + const file = `file-${Math.random()}.txt` + const filePath = `${shardedDirPath}/${file}` + const finalFilePath = `${otherShardedDirPath}/${file}` + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { + create: true + }) + + await ipfs.files.mv(filePath, otherShardedDirPath) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') + await expect(isShardAtPath(otherShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(otherShardedDirPath)).type).to.equal('directory') + + try { + await ipfs.files.stat(filePath) + throw new Error('File was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } + }) + + it('moves a file from a sub-shard of a sharded directory to a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const otherShardedDirPath = await createShardedDirectory(ipfs) + const file = 'file-1a.txt' + const filePath = `${shardedDirPath}/${file}` + const finalFilePath = `${otherShardedDirPath}/${file}` + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { + create: true + }) + + await ipfs.files.mv(filePath, otherShardedDirPath) + + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(shardedDirPath)).type).to.equal('directory') + expect((await ipfs.files.stat(finalFilePath)).type).to.equal('file') + await expect(isShardAtPath(otherShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(otherShardedDirPath)).type).to.equal('directory') - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) - await ipfs.files.mv('/test/lv1/lv2', '/test/lv1/lv4') + try { + await ipfs.files.stat(filePath) + throw new Error('File was not removed') + } catch (error) { + expect(error.message).to.contain('does not exist') + } }) }) } diff --git a/packages/interface-ipfs-core/src/files/read.js b/packages/interface-ipfs-core/src/files/read.js index ed97418b67..bcd30e528e 100644 --- a/packages/interface-ipfs-core/src/files/read.js +++ b/packages/interface-ipfs-core/src/files/read.js @@ -1,11 +1,12 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') const concat = require('it-concat') const all = require('it-all') const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const createShardedDirectory = require('../utils/create-sharded-directory') +const randomBytes = require('iso-random-stream/src/random') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -15,6 +16,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (common, options) => { const describe = getDescribe(options) const it = getIt(options) + const smallFile = randomBytes(13) describe('.files.read', function () { this.timeout(40 * 1000) @@ -25,24 +27,126 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should not read not found, expect error', () => { - const testDir = `/test-${hat()}` + it('reads a small file', async () => { + const filePath = '/small-file.txt' - return expect(ipfs.files.cp(`${testDir}/c`, `${testDir}/b`)).to.eventually.be.rejected - .and.be.an.instanceOf(Error) - .and.to.have.property('message') - .that.include('does not exist') + await ipfs.files.write(filePath, smallFile, { + create: true + }) + + const buffer = await concat(ipfs.files.read(filePath)) + + expect(buffer.slice()).to.deep.equal(smallFile) + }) + + it('reads a file with an offset', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = randomBytes(100) + const offset = 10 + + await ipfs.files.write(path, data, { + create: true + }) + + const buffer = await concat(ipfs.files.read(path, { + offset + })) + + expect(buffer.slice()).to.deep.equal(data.slice(offset)) + }) + + it('reads a file with a length', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = randomBytes(100) + const length = 10 + + await ipfs.files.write(path, data, { + create: true + }) + + const buffer = await concat(ipfs.files.read(path, { + length + })) + + expect(buffer.slice()).to.deep.equal(data.slice(0, length)) + }) + + it('reads a file with a legacy count argument', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = randomBytes(100) + const length = 10 + + await ipfs.files.write(path, data, { + create: true + }) + + const buffer = await concat(ipfs.files.read(path, { + count: length + })) + + expect(buffer.slice()).to.deep.equal(data.slice(0, length)) + }) + + it('reads a file with an offset and a length', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = randomBytes(100) + const offset = 10 + const length = 10 + + await ipfs.files.write(path, data, { + create: true + }) + + const buffer = await concat(ipfs.files.read(path, { + offset, + length + })) + + expect(buffer.slice()).to.deep.equal(data.slice(offset, offset + length)) + }) + + it('reads a file with an offset and a legacy count argument', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = randomBytes(100) + const offset = 10 + const length = 10 + + await ipfs.files.write(path, data, { + create: true + }) + + const buffer = await concat(ipfs.files.read(path, { + offset, + count: length + })) + + expect(buffer.slice()).to.deep.equal(data.slice(offset, offset + length)) + }) + + it('refuses to read a directory', async () => { + const path = '/' + + await expect(concat(ipfs.files.read(path))).to.eventually.be.rejectedWith(/not a file/) + }) + + it('refuses to read a non-existent file', async () => { + const path = `/file-${Math.random()}.txt` + + await expect(concat(ipfs.files.read(path))).to.eventually.be.rejectedWith(/does not exist/) }) - it('should read file', async () => { - const testDir = `/test-${hat()}` + it('reads file from inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const filePath = `${shardedDirPath}/file-${Math.random()}.txt` + const content = Buffer.from([0, 1, 2, 3, 4]) - await ipfs.files.mkdir(testDir) - await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) + await ipfs.files.write(filePath, content, { + create: true + }) - const buf = await concat(ipfs.files.read(`${testDir}/a`)) + const buffer = await concat(ipfs.files.read(filePath)) - expect(buf.slice()).to.eql(Buffer.from('Hello, world!')) + expect(buffer.slice()).to.deep.equal(content) }) it('should read from outside of mfs', async () => { diff --git a/packages/interface-ipfs-core/src/files/rm.js b/packages/interface-ipfs-core/src/files/rm.js index 21223fae3a..d49423e32f 100644 --- a/packages/interface-ipfs-core/src/files/rm.js +++ b/packages/interface-ipfs-core/src/files/rm.js @@ -1,9 +1,12 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') -const all = require('it-all') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') +const createShardedDirectory = require('../utils/create-sharded-directory') +const createTwoShards = require('../utils/create-two-shards') +const randomBytes = require('iso-random-stream/src/random') +const isShardAtPath = require('../utils/is-shard-at-path') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -24,32 +27,234 @@ module.exports = (common, options) => { after(() => common.clean()) it('should not remove not found file/dir, expect error', () => { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` return expect(ipfs.files.rm(`${testDir}/a`)).to.eventually.be.rejected() }) - it('should remove file, expect no error', async () => { - const testDir = `/test-${hat()}` + it('refuses to remove files without arguments', async () => { + await expect(ipfs.files.rm()).to.eventually.be.rejected() + }) + + it('refuses to remove the root path', async () => { + await expect(ipfs.files.rm('/')).to.eventually.be.rejected() + }) + + it('refuses to remove a directory without the recursive flag', async () => { + const path = `/directory-${Math.random()}.txt` + + await ipfs.files.mkdir(path) + + await expect(ipfs.files.rm(path)).to.eventually.be.rejectedWith(/use -r to remove directories/) + }) + + it('refuses to remove a non-existent file', async () => { + await expect(ipfs.files.rm(`/file-${Math.random()}`)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('removes a file', async () => { + const file = `/some-file-${Math.random()}.txt` + + await ipfs.files.write(file, randomBytes(100), { + create: true, + parents: true + }) + + await ipfs.files.rm(file) + + await expect(ipfs.files.stat(file)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('removes multiple files', async () => { + const file1 = `/some-file-${Math.random()}.txt` + const file2 = `/some-file-${Math.random()}.txt` + + await ipfs.files.write(file1, randomBytes(100), { + create: true, + parents: true + }) + await ipfs.files.write(file2, randomBytes(100), { + create: true, + parents: true + }) + await ipfs.files.rm(file1, file2) + + await expect(ipfs.files.stat(file1)).to.eventually.be.rejectedWith(/does not exist/) + await expect(ipfs.files.stat(file2)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('removes a directory', async () => { + const directory = `/directory-${Math.random()}` + + await ipfs.files.mkdir(directory) + await ipfs.files.rm(directory, { + recursive: true + }) + + await expect(ipfs.files.stat(directory)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('recursively removes a directory', async () => { + const directory = `/directory-${Math.random()}` + const subdirectory = `/directory-${Math.random()}` + const path = `${directory}${subdirectory}` + + await ipfs.files.mkdir(path, { + parents: true + }) + await ipfs.files.rm(directory, { + recursive: true + }) + + await expect(ipfs.files.stat(path)).to.eventually.be.rejectedWith(/does not exist/) + await expect(ipfs.files.stat(directory)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('recursively removes a directory with files in', async () => { + const directory = `/directory-${Math.random()}` + const file = `${directory}/some-file-${Math.random()}.txt` + + await ipfs.files.write(file, randomBytes(100), { + create: true, + parents: true + }) + await ipfs.files.rm(directory, { + recursive: true + }) + + await expect(ipfs.files.stat(file)).to.eventually.be.rejectedWith(/does not exist/) + await expect(ipfs.files.stat(directory)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('recursively removes a sharded directory inside a normal directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const dir = `dir-${Math.random()}` + const dirPath = `/${dir}` + + await ipfs.files.mkdir(dirPath) - await ipfs.files.mkdir(testDir, { parents: true }) - await ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }) + await ipfs.files.mv(shardedDirPath, dirPath) - await ipfs.files.rm(`${testDir}/c`) + const finalShardedDirPath = `${dirPath}${shardedDirPath}` - const contents = await all(ipfs.files.ls(testDir)) - expect(contents).to.be.an('array').and.to.be.empty() + await expect(isShardAtPath(finalShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(finalShardedDirPath)).type).to.equal('directory') + + await ipfs.files.rm(dirPath, { + recursive: true + }) + + await expect(ipfs.files.stat(dirPath)).to.eventually.be.rejectedWith(/does not exist/) + await expect(ipfs.files.stat(shardedDirPath)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('recursively removes a sharded directory inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const otherDirPath = await createShardedDirectory(ipfs) + + await ipfs.files.mv(shardedDirPath, otherDirPath) + + const finalShardedDirPath = `${otherDirPath}${shardedDirPath}` + + await expect(isShardAtPath(finalShardedDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(finalShardedDirPath)).type).to.equal('directory') + await expect(isShardAtPath(otherDirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(otherDirPath)).type).to.equal('directory') + + await ipfs.files.rm(otherDirPath, { + recursive: true + }) + + await expect(ipfs.files.stat(otherDirPath)).to.eventually.be.rejectedWith(/does not exist/) + await expect(ipfs.files.stat(finalShardedDirPath)).to.eventually.be.rejectedWith(/does not exist/) + }) + + it('results in the same hash as a sharded directory created by the importer when removing a file', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithAllFiles, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 1001) + + await ipfs.files.cp(`/ipfs/${dirWithAllFiles}`, dirPath) + + await ipfs.files.rm(nextFile.path) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) }) - it('should remove dir, expect no error', async () => { - const testDir = `/test-${hat()}` + it('results in the same hash as a sharded directory created by the importer when removing a subshard', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithAllFiles, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 31) + + await ipfs.files.cp(`/ipfs/${dirWithAllFiles}`, dirPath) + + await ipfs.files.rm(nextFile.path) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) + }) + + it('results in the same hash as a sharded directory created by the importer when removing a file from a subshard of a subshard', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithAllFiles, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 2187) + + await ipfs.files.cp(`/ipfs/${dirWithAllFiles}`, dirPath) + + await ipfs.files.rm(nextFile.path) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) + }) + + it('results in the same hash as a sharded directory created by the importer when removing a subshard of a subshard', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithAllFiles, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 139) + + await ipfs.files.cp(`/ipfs/${dirWithAllFiles}`, dirPath) - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) + await ipfs.files.rm(nextFile.path) - await ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }) + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid - const lv1Contents = await all(ipfs.files.ls(`${testDir}/lv1`)) - expect(lv1Contents).to.be.an('array').and.to.be.empty() + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect((await ipfs.files.stat(dirPath)).type).to.equal('directory') + expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) }) }) } diff --git a/packages/interface-ipfs-core/src/files/stat.js b/packages/interface-ipfs-core/src/files/stat.js index 51f32f22ca..3a1908c35d 100644 --- a/packages/interface-ipfs-core/src/files/stat.js +++ b/packages/interface-ipfs-core/src/files/stat.js @@ -1,10 +1,16 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const all = require('it-all') const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const createShardedDirectory = require('../utils/create-sharded-directory') +const CID = require('cids') +const mh = require('multihashes') +const Block = require('ipfs-block') +const randomBytes = require('iso-random-stream/src/random') +const isShardAtPath = require('../utils/is-shard-at-path') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -14,6 +20,8 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (common, options) => { const describe = getDescribe(options) const it = getIt(options) + const smallFile = randomBytes(13) + const largeFile = randomBytes(490668) describe('.files.stat', function () { this.timeout(40 * 1000) @@ -30,35 +38,170 @@ module.exports = (common, options) => { after(() => common.clean()) - it('should not stat not found file/dir, expect error', function () { - const testDir = `/test-${hat()}` + it('refuses to stat files with an empty path', async () => { + await expect(ipfs.files.stat('')).to.be.rejected() + }) - return expect(ipfs.files.stat(`${testDir}/404`)).to.eventually.be.rejected() + it('refuses to lists files with an invalid path', async () => { + await expect(ipfs.files.stat('not-valid')).to.be.rejectedWith(/paths must start with a leading slash/) }) - it('should stat file', async function () { - const testDir = `/test-${hat()}` + it('fails to stat non-existent file', async () => { + await expect(ipfs.files.stat('/i-do-not-exist')).to.be.rejectedWith(/does not exist/) + }) - await ipfs.files.mkdir(testDir, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + it('stats an empty directory', async () => { + const path = `/directory-${Math.random()}` - const stat = await ipfs.files.stat(`${testDir}/b`) - stat.cid = stat.cid.toString() + await ipfs.files.mkdir(path) - expect(stat).to.include({ - type: 'file', - blocks: 1, - size: 13, - cid: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + await expect(ipfs.files.stat(path)).to.eventually.include({ + size: 0, + cumulativeSize: 4, + blocks: 0, + type: 'directory' + }) + }) + + it.skip('computes how much of the DAG is local', async () => { + + }) + + it('stats a small file', async () => { + const filePath = `/stat-${Math.random()}/small-file-${Math.random()}.txt` + + await ipfs.files.write(filePath, smallFile, { + create: true, + parents: true + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.include({ + size: smallFile.length, cumulativeSize: 71, - withLocality: false + blocks: 1, + type: 'file' }) - expect(stat.local).to.be.undefined() - expect(stat.sizeLocal).to.be.undefined() + }) + + it('stats a large file', async () => { + const filePath = `/stat-${Math.random()}/large-file-${Math.random()}.txt` + + await ipfs.files.write(filePath, largeFile, { + create: true, + parents: true + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.include({ + size: largeFile.length, + cumulativeSize: 490800, + blocks: 2, + type: 'file' + }) + }) + + it('stats a raw node', async () => { + const filePath = `/stat-${Math.random()}/large-file-${Math.random()}.txt` + + await ipfs.files.write(filePath, largeFile, { + create: true, + parents: true, + rawLeaves: true + }) + + const stats = await ipfs.files.stat(filePath) + const { value: node } = await ipfs.dag.get(stats.cid) + + expect(node).to.have.nested.property('Links[0].Hash.codec', 'raw') + + const child = node.Links[0] + + const rawNodeStats = await ipfs.files.stat(`/ipfs/${child.Hash}`) + + expect(rawNodeStats.cid.toString()).to.equal(child.Hash.toString()) + expect(rawNodeStats.type).to.equal('file') // this is what go does + }) + + it('stats a raw node in an mfs directory', async () => { + const filePath = `/stat-${Math.random()}/large-file-${Math.random()}.txt` + + await ipfs.files.write(filePath, largeFile, { + create: true, + parents: true, + rawLeaves: true + }) + + const stats = await ipfs.files.stat(filePath) + const { value: node } = await ipfs.dag.get(stats.cid) + const child = node.Links[0] + + expect(child.Hash.codec).to.equal('raw') + + const dir = `/dir-with-raw-${Math.random()}` + const path = `${dir}/raw-${Math.random()}` + + await ipfs.files.mkdir(dir) + await ipfs.files.cp(`/ipfs/${child.Hash}`, path) + + const rawNodeStats = await ipfs.files.stat(path) + + expect(rawNodeStats.cid.toString()).to.equal(child.Hash.toString()) + expect(rawNodeStats.type).to.equal('file') // this is what go does + }) + + it('stats a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + + const stats = await ipfs.files.stat(`${shardedDirPath}`) + + expect(stats.type).to.equal('directory') + expect(stats.size).to.equal(0) + }) + + it('stats a file inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const files = [] + + for await (const file of ipfs.files.ls(`${shardedDirPath}`)) { + files.push(file) + } + + const stats = await ipfs.files.stat(`${shardedDirPath}/${files[0].name}`) + + expect(stats.type).to.equal('file') + expect(stats.size).to.equal(7) + }) + + it('stats a dag-cbor node', async () => { + const path = '/cbor.node' + const node = {} + const cid = await ipfs.dag.put(node, { + format: 'dag-cbor', + hashAlg: 'sha2-256' + }) + await ipfs.files.cp(`/ipfs/${cid}`, path) + + const stats = await ipfs.files.stat(path) + + expect(stats.cid.toString()).to.equal(cid.toString()) + }) + + it('stats an identity CID', async () => { + const data = Buffer.from('derp') + const path = `/test-${nanoid()}/identity.node` + const cid = new CID(1, 'identity', mh.encode(data, 'identity')) + await ipfs.block.put(new Block(data, cid)) + await ipfs.files.cp(`/ipfs/${cid}`, path, { + parents: true + }) + + const stats = await ipfs.files.stat(path) + + expect(stats.cid.toString()).to.equal(cid.toString()) + expect(stats).to.have.property('size', data.length) }) it('should stat file with mode', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) @@ -71,7 +214,7 @@ module.exports = (common, options) => { }) it('should stat file with mtime', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { @@ -93,7 +236,7 @@ module.exports = (common, options) => { }) it('should stat dir', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) @@ -112,7 +255,7 @@ module.exports = (common, options) => { }) it('should stat dir with mode', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true }) const stat = await ipfs.files.stat(testDir) @@ -123,7 +266,7 @@ module.exports = (common, options) => { }) it('should stat dir with mtime', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true, @@ -144,7 +287,7 @@ module.exports = (common, options) => { }) it('should stat sharded dir with mode', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { @@ -154,14 +297,15 @@ module.exports = (common, options) => { const stat = await ipfs.files.stat(testDir) - expect(stat).to.have.property('type', 'hamt-sharded-directory') + await expect(isShardAtPath(testDir, ipfs)).to.eventually.be.true() + expect(stat).to.have.property('type', 'directory') expect(stat).to.include({ mode: 0o755 }) }) it('should stat sharded dir with mtime', async function () { - const testDir = `/test-${hat()}` + const testDir = `/test-${nanoid()}` await ipfs.files.mkdir(testDir, { parents: true, @@ -177,7 +321,8 @@ module.exports = (common, options) => { const stat = await ipfs.files.stat(testDir) - expect(stat).to.have.property('type', 'hamt-sharded-directory') + await expect(isShardAtPath(testDir, ipfs)).to.eventually.be.true() + expect(stat).to.have.property('type', 'directory') expect(stat).to.deep.include({ mtime: { secs: 5, diff --git a/packages/interface-ipfs-core/src/files/touch.js b/packages/interface-ipfs-core/src/files/touch.js index aec4c76c9b..6d5fb9c743 100644 --- a/packages/interface-ipfs-core/src/files/touch.js +++ b/packages/interface-ipfs-core/src/files/touch.js @@ -1,9 +1,10 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') +const concat = require('it-concat') module.exports = (common, options) => { const describe = getDescribe(options) @@ -15,7 +16,7 @@ module.exports = (common, options) => { let ipfs async function testMtime (mtime, expectedMtime) { - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true @@ -38,7 +39,7 @@ module.exports = (common, options) => { it('should have default mtime', async function () { this.slow(5 * 1000) - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true @@ -58,7 +59,7 @@ module.exports = (common, options) => { it('should update file mtime', async function () { this.slow(5 * 1000) - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` const mtime = new Date() const seconds = Math.floor(mtime.getTime() / 1000) @@ -75,7 +76,7 @@ module.exports = (common, options) => { it('should update directory mtime', async function () { this.slow(5 * 1000) - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` const mtime = new Date() const seconds = Math.floor(mtime.getTime() / 1000) @@ -90,6 +91,38 @@ module.exports = (common, options) => { expect(stat2).to.have.nested.property('mtime.secs').that.is.greaterThan(seconds) }) + it('should update the mtime for a hamt-sharded-directory', async () => { + const path = `/foo-${Math.random()}` + + await ipfs.files.mkdir(path, { + mtime: new Date() + }) + await ipfs.files.write(`${path}/foo.txt`, Buffer.from('Hello world'), { + create: true, + shardSplitThreshold: 0 + }) + const originalMtime = (await ipfs.files.stat(path)).mtime + await delay(1000) + await ipfs.files.touch(path, { + flush: true + }) + + const updatedMtime = (await ipfs.files.stat(path)).mtime + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) + }) + + it('should create an empty file', async () => { + const path = `/foo-${Math.random()}` + + await ipfs.files.touch(path, { + flush: true + }) + + const buffer = await concat(ipfs.files.read(path)) + + expect(buffer.slice()).to.deep.equal(Buffer.from([])) + }) + it('should set mtime as Date', async function () { await testMtime(new Date(5000), { secs: 5, diff --git a/packages/interface-ipfs-core/src/files/write.js b/packages/interface-ipfs-core/src/files/write.js index 5125fe2a5b..e8484ecedc 100644 --- a/packages/interface-ipfs-core/src/files/write.js +++ b/packages/interface-ipfs-core/src/files/write.js @@ -1,8 +1,24 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') +const { isNode } = require('ipfs-utils/src/env') +const multihash = require('multihashes') +const traverseLeafNodes = require('../utils/traverse-leaf-nodes') +const createShardedDirectory = require('../utils/create-sharded-directory') +const createTwoShards = require('../utils/create-two-shards') +const randomBytes = require('iso-random-stream/src/random') +const all = require('it-all') +const concat = require('it-concat') +const isShardAtPath = require('../utils/is-shard-at-path') + +let fs, tempWrite + +if (isNode) { + fs = require('fs') + tempWrite = require('temp-write') +} /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -12,6 +28,38 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (common, options) => { const describe = getDescribe(options) const it = getIt(options) + const smallFile = randomBytes(13) + const largeFile = randomBytes(490668) + + const runTest = (fn) => { + const iterations = 5 + const files = [{ + type: 'Small file', + path: `/small-file-${Math.random()}.txt`, + content: smallFile, + contentSize: smallFile.length + }, { + type: 'Large file', + path: `/large-file-${Math.random()}.jpg`, + content: largeFile, + contentSize: largeFile.length + }, { + type: 'Really large file', + path: `/really-large-file-${Math.random()}.jpg`, + content: { + [Symbol.asyncIterator]: function * () { + for (let i = 0; i < iterations; i++) { + yield largeFile + } + } + }, + contentSize: largeFile.length * iterations + }] + + files.forEach((file) => { + fn(file) + }) + } describe('.files.write', function () { this.timeout(40 * 1000) @@ -19,7 +67,7 @@ module.exports = (common, options) => { let ipfs async function testMode (mode, expectedMode) { - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, @@ -32,7 +80,7 @@ module.exports = (common, options) => { } async function testMtime (mtime, expectedMtime) { - const testPath = `/test-${hat()}` + const testPath = `/test-${nanoid()}` await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, @@ -44,32 +92,741 @@ module.exports = (common, options) => { expect(stats).to.have.deep.property('mtime', expectedMtime) } - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { + ipfs = (await common.spawn()).api + }) after(() => common.clean()) - it('should not write to non existent file, expect error', function () { - const testDir = `/test-${hat()}` + // TODO: streaming request errors do not work over http + it.skip('explodes if it cannot convert content to a source', async () => { + await expect(ipfs.files.write('/foo-bad-source', -1, { + create: true + })).to.eventually.be.rejectedWith(/unexpected input/) + }) + + it('explodes if given an invalid path', async () => { + await expect(ipfs.files.write('foo-no-slash', null, { + create: true + })).to.eventually.be.rejected() + }) - return expect(ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'))).to.eventually.be.rejected() + it('explodes if given a negtive offset', async () => { + await expect(ipfs.files.write('/foo-negative-offset', Buffer.from('foo'), { + offset: -1 + })).to.eventually.be.rejected() }) - it('should write to non existent file with create flag', async function () { - const testPath = `/test-${hat()}` + it('explodes if given a negative length', async () => { + await expect(ipfs.files.write('/foo-negative-length', Buffer.from('foo'), { + length: -1 + })).to.eventually.be.rejected() + }) - await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true }) + it('creates a zero length file when passed a zero length', async () => { + await ipfs.files.write('/foo-zero-length', Buffer.from('foo'), { + length: 0, + create: true + }) - const stats = await ipfs.files.stat(testPath) - expect(stats.type).to.equal('file') + await expect(all(ipfs.files.ls('/'))).to.eventually.have.lengthOf(1) + .and.to.have.nested.property('[0]').that.includes({ + name: 'foo-zero-length', + size: 0 + }) }) - it('should write to deeply nested non existent file with create and parents flags', async function () { - const testPath = `/foo/bar/baz/test-${hat()}` + it('writes a small file using a buffer', async () => { + const filePath = `/small-file-${Math.random()}.txt` - await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, parents: true }) + await ipfs.files.write(filePath, smallFile, { + create: true + }) - const stats = await ipfs.files.stat(testPath) - expect(stats.type).to.equal('file') + await expect(ipfs.files.stat(filePath)).to.eventually.have.property('size', smallFile.length) + expect(Buffer.concat(await all(ipfs.files.read(filePath)))).to.deep.equal(smallFile) + }) + + it('writes a small file using a string', async function () { + const filePath = `/string-${Math.random()}.txt` + const content = 'hello world' + + await ipfs.files.write(filePath, content, { + create: true + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.property('size', content.length) + expect(Buffer.concat(await all(ipfs.files.read(filePath)))).to.deep.equal(Buffer.from(content)) + }) + + it('writes part of a small file using a string', async function () { + const filePath = `/string-${Math.random()}.txt` + const content = 'hello world' + + await ipfs.files.write(filePath, content, { + create: true, + length: 2 + }) + + const stats = await ipfs.files.stat(filePath) + + expect(stats.size).to.equal(2) + }) + + it('writes a small file using a Node stream (Node only)', async function () { + if (!isNode) { + return this.skip() + } + + const filePath = `/small-file-${Math.random()}.txt` + const pathToFile = await tempWrite(smallFile) + const stream = fs.createReadStream(pathToFile) + + await ipfs.files.write(filePath, stream, { + create: true + }) + + const stats = await ipfs.files.stat(filePath) + + expect(stats.size).to.equal(smallFile.length) + }) + + it('writes a small file using an HTML5 Blob (Browser only)', async function () { + if (!global.Blob) { + return this.skip() + } + + const filePath = `/small-file-${Math.random()}.txt` + const blob = new global.Blob([smallFile.buffer.slice(smallFile.byteOffset, smallFile.byteOffset + smallFile.byteLength)]) + + await ipfs.files.write(filePath, blob, { + create: true + }) + + const stats = await ipfs.files.stat(filePath) + + expect(stats.size).to.equal(smallFile.length) + }) + + it('writes a small file with an escaped slash in the title', async () => { + const filePath = `/small-\\/file-${Math.random()}.txt` + + await ipfs.files.write(filePath, smallFile, { + create: true + }) + + const stats = await ipfs.files.stat(filePath) + + expect(stats.size).to.equal(smallFile.length) + + await expect(ipfs.files.stat('/small-\\')).to.eventually.rejectedWith(/does not exist/) + }) + + it('writes a deeply nested small file', async () => { + const filePath = '/foo/bar/baz/qux/quux/garply/small-file.txt' + + await ipfs.files.write(filePath, smallFile, { + create: true, + parents: true + }) + + const stats = await ipfs.files.stat(filePath) + + expect(stats.size).to.equal(smallFile.length) + }) + + it('refuses to write to a file in a folder that does not exist', async () => { + const filePath = `/${Math.random()}/small-file.txt` + + try { + await ipfs.files.write(filePath, smallFile, { + create: true + }) + throw new Error('Writing a file to a non-existent folder without the --parents flag should have failed') + } catch (err) { + expect(err.message).to.contain('does not exist') + } + }) + + it('refuses to write to a file that does not exist', async () => { + const filePath = `/small-file-${Math.random()}.txt` + + try { + await ipfs.files.write(filePath, smallFile) + throw new Error('Writing a file to a non-existent file without the --create flag should have failed') + } catch (err) { + expect(err.message).to.contain('file does not exist') + } + }) + + it('refuses to write to a path that has a file in it', async () => { + const filePath = `/small-file-${Math.random()}.txt` + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + try { + await ipfs.files.write(`${filePath}/other-file-${Math.random()}.txt`, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + throw new Error('Writing a path with a file in it should have failed') + } catch (err) { + expect(err.message).to.contain('Not a directory') + } + }) + + runTest(({ type, path, content }) => { + it(`limits how many bytes to write to a file (${type})`, async () => { + await ipfs.files.write(path, content, { + create: true, + parents: true, + length: 2 + }) + + const buffer = await concat(ipfs.files.read(path)) + + expect(buffer.length).to.equal(2) + }) + }) + + runTest(({ type, path, content, contentSize }) => { + it(`overwrites start of a file without truncating (${type})`, async () => { + const newContent = Buffer.from('Goodbye world') + + await ipfs.files.write(path, content, { + create: true + }) + + await expect(ipfs.files.stat(path)).to.eventually.have.property('size', contentSize) + + await ipfs.files.write(path, newContent) + + const stats = await ipfs.files.stat(path) + expect(stats.size).to.equal(contentSize) + + const buffer = Buffer.concat(await all(ipfs.files.read(path, { + offset: 0, + length: newContent.length + }))) + + expect(buffer).to.deep.equal(newContent) + }) + }) + + runTest(({ type, path, content, contentSize }) => { + it(`pads the start of a new file when an offset is specified (${type})`, async () => { + const offset = 10 + + await ipfs.files.write(path, content, { + offset, + create: true + }) + + await expect(ipfs.files.stat(path)).to.eventually.have.property('size', offset + contentSize) + + const buffer = Buffer.concat(await all(ipfs.files.read(path, { + offset: 0, + length: offset + }))) + + expect(buffer).to.deep.equal(Buffer.alloc(offset, 0)) + }) + }) + + runTest(({ type, path, content, contentSize }) => { + it(`expands a file when an offset is specified (${type})`, async () => { + const offset = contentSize - 1 + const newContent = Buffer.from('Oh hai!') + + await ipfs.files.write(path, content, { + create: true + }) + + await ipfs.files.write(path, newContent, { + offset + }) + + await expect(ipfs.files.stat(path)).to.eventually.have.property('size', contentSize + newContent.length - 1) + + const buffer = Buffer.concat(await all(ipfs.files.read(path, { + offset: offset + }))) + + expect(buffer).to.deep.equal(newContent) + }) + }) + + runTest(({ type, path, content, contentSize }) => { + it(`expands a file when an offset is specified and the offset is longer than the file (${type})`, async () => { + const offset = contentSize + 5 + const newContent = Buffer.from('Oh hai!') + + await ipfs.files.write(path, content, { + create: true + }) + await ipfs.files.write(path, newContent, { + offset + }) + + await expect(ipfs.files.stat(path)).to.eventually.have.property('size', newContent.length + offset) + + const buffer = Buffer.concat(await all(ipfs.files.read(path))) + + if (content[Symbol.asyncIterator]) { + content = Buffer.concat(await all(content)) + } + + expect(buffer).to.deep.equal(Buffer.concat([content, Buffer.from([0, 0, 0, 0, 0]), newContent])) + }) + }) + + runTest(({ type, path, content }) => { + it(`truncates a file after writing (${type})`, async () => { + const newContent = Buffer.from('Oh hai!') + + await ipfs.files.write(path, content, { + create: true + }) + await ipfs.files.write(path, newContent, { + truncate: true + }) + + await expect(ipfs.files.stat(path)).to.eventually.have.property('size', newContent.length) + + const buffer = Buffer.concat(await all(ipfs.files.read(path))) + + expect(buffer).to.deep.equal(newContent) + }) + }) + + runTest(({ type, path, content }) => { + it(`writes a file with raw blocks for newly created leaf nodes (${type})`, async () => { + await ipfs.files.write(path, content, { + create: true, + rawLeaves: true + }) + + const stats = await ipfs.files.stat(path) + + for await (const { cid } of traverseLeafNodes(ipfs, stats.cid)) { + expect(cid.codec).to.equal('raw') + } + }) + }) + + it('supports concurrent writes', async function () { + const files = [] + + for (let i = 0; i < 10; i++) { + files.push({ + name: `source-file-${Math.random()}.txt`, + source: randomBytes(100) + }) + } + + await Promise.all( + files.map(({ name, source }) => ipfs.files.write(`/concurrent/${name}`, source, { + create: true, + parents: true + })) + ) + + const listing = await all(ipfs.files.ls('/concurrent')) + expect(listing.length).to.equal(files.length) + + listing.forEach(listedFile => { + expect(files.find(file => file.name === listedFile.name)) + }) + }) + + it('rewrites really big files', async function () { + const initialStream = randomBytes(1024 * 300) + const newDataStream = randomBytes(1024 * 300) + + const fileName = `/rewrite/file-${Math.random()}.txt` + + await ipfs.files.write(fileName, initialStream, { + create: true, + parents: true + }) + + await ipfs.files.write(fileName, newDataStream, { + offset: 0 + }) + + const actualBytes = Buffer.concat(await all(ipfs.files.read(fileName))) + + for (var i = 0; i < newDataStream.length; i++) { + if (newDataStream[i] !== actualBytes[i]) { + if (initialStream[i] === actualBytes[i]) { + throw new Error(`Bytes at index ${i} were not overwritten - expected ${newDataStream[i]} actual ${initialStream[i]}`) + } + + throw new Error(`Bytes at index ${i} not equal - expected ${newDataStream[i]} actual ${actualBytes[i]}`) + } + } + + expect(actualBytes).to.deep.equal(newDataStream) + }) + + it('shards a large directory when writing too many links to it', async () => { + const shardSplitThreshold = 10 + const dirPath = `/sharded-dir-${Math.random()}` + const newFile = `file-${Math.random()}` + const newFilePath = `/${dirPath}/${newFile}` + + await ipfs.files.mkdir(dirPath, { + shardSplitThreshold + }) + + for (let i = 0; i < shardSplitThreshold; i++) { + await ipfs.files.write(`/${dirPath}/file-${Math.random()}`, Buffer.from([0, 1, 2, 3]), { + create: true, + shardSplitThreshold + }) + } + + await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory') + + await ipfs.files.write(newFilePath, Buffer.from([0, 1, 2, 3]), { + create: true, + shardSplitThreshold + }) + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory') + + const files = await all(ipfs.files.ls(dirPath, { + long: true + })) + + // new file should be in directory + expect(files.filter(file => file.name === newFile).pop()).to.be.ok() + }) + + it('writes a file to an already sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + + const newFile = `file-${Math.random()}` + const newFilePath = `${shardedDirPath}/${newFile}` + + await ipfs.files.write(newFilePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(shardedDirPath)).to.eventually.have.property('type', 'directory') + + const files = await all(ipfs.files.ls(shardedDirPath, { + long: true + })) + + // new file should be in the directory + expect(files.filter(file => file.name === newFile).pop()).to.be.ok() + + // should be able to ls new file directly + await expect(all(ipfs.files.ls(newFilePath, { + long: true + }))).to.eventually.not.be.empty() + }) + + it('overwrites a file in a sharded directory when positions do not match', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const newFile = 'file-0.6944395883502592' + const newFilePath = `${shardedDirPath}/${newFile}` + const newContent = Buffer.from([3, 2, 1, 0]) + + await ipfs.files.write(newFilePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(shardedDirPath)).to.eventually.have.property('type', 'directory') + + // overwrite the file + await ipfs.files.write(newFilePath, newContent, { + create: true + }) + + // read the file back + const buffer = Buffer.concat(await all(ipfs.files.read(newFilePath))) + + expect(buffer).to.deep.equal(newContent) + + // should be able to ls new file directly + await expect(all(ipfs.files.ls(newFilePath, { + long: true + }))).to.eventually.not.be.empty() + }) + + it('overwrites file in a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const newFile = `file-${Math.random()}` + const newFilePath = `${shardedDirPath}/${newFile}` + const newContent = Buffer.from([3, 2, 1, 0]) + + await ipfs.files.write(newFilePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(shardedDirPath)).to.eventually.have.property('type', 'directory') + + // overwrite the file + await ipfs.files.write(newFilePath, newContent, { + create: true + }) + + // read the file back + const buffer = Buffer.concat(await all(ipfs.files.read(newFilePath))) + + expect(buffer).to.deep.equal(newContent) + + // should be able to ls new file directly + await expect(all(ipfs.files.ls(newFilePath, { + long: true + }))).to.eventually.not.be.empty() + }) + + it('overwrites a file in a subshard of a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(ipfs) + const newFile = 'file-1a.txt' + const newFilePath = `${shardedDirPath}/${newFile}` + const newContent = Buffer.from([3, 2, 1, 0]) + + await ipfs.files.write(newFilePath, Buffer.from([0, 1, 2, 3]), { + create: true + }) + + // should still be a sharded directory + await expect(isShardAtPath(shardedDirPath, ipfs)).to.eventually.be.true() + await expect(ipfs.files.stat(shardedDirPath)).to.eventually.have.property('type', 'directory') + + // overwrite the file + await ipfs.files.write(newFilePath, newContent, { + create: true + }) + + // read the file back + const buffer = Buffer.concat(await all(ipfs.files.read(newFilePath))) + + expect(buffer).to.deep.equal(newContent) + + // should be able to ls new file directly + await expect(all(ipfs.files.ls(newFilePath, { + long: true + }))).to.eventually.not.be.empty() + }) + + it('writes a file with a different CID version to the parent', async () => { + const directory = `cid-versions-${Math.random()}` + const directoryPath = `/${directory}` + const fileName = `file-${Math.random()}.txt` + const filePath = `${directoryPath}/${fileName}` + const expectedBytes = Buffer.from([0, 1, 2, 3]) + + await ipfs.files.mkdir(directoryPath, { + cidVersion: 0 + }) + + await expect(ipfs.files.stat(directoryPath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.write(filePath, expectedBytes, { + create: true, + cidVersion: 1 + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.version', 1) + + const actualBytes = Buffer.concat(await all(ipfs.files.read(filePath))) + + expect(actualBytes).to.deep.equal(expectedBytes) + }) + + it('overwrites a file with a different CID version', async () => { + const directory = `cid-versions-${Math.random()}` + const directoryPath = `/${directory}` + const fileName = `file-${Math.random()}.txt` + const filePath = `${directoryPath}/${fileName}` + const expectedBytes = Buffer.from([0, 1, 2, 3]) + + await ipfs.files.mkdir(directoryPath, { + cidVersion: 0 + }) + + await expect(ipfs.files.stat(directoryPath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.write(filePath, Buffer.from([5, 6]), { + create: true, + cidVersion: 0 + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.write(filePath, expectedBytes, { + cidVersion: 1 + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.version', 1) + + const actualBytes = Buffer.concat(await all(ipfs.files.read(filePath))) + + expect(actualBytes).to.deep.equal(expectedBytes) + }) + + it('partially overwrites a file with a different CID version', async () => { + const directory = `cid-versions-${Math.random()}` + const directoryPath = `/${directory}` + const fileName = `file-${Math.random()}.txt` + const filePath = `${directoryPath}/${fileName}` + + await ipfs.files.mkdir(directoryPath, { + cidVersion: 0 + }) + + await expect(ipfs.files.stat(directoryPath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.write(filePath, Buffer.from([5, 6, 7, 8, 9, 10, 11]), { + create: true, + cidVersion: 0 + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.write(filePath, Buffer.from([0, 1, 2, 3]), { + cidVersion: 1, + offset: 1 + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.version', 1) + + const actualBytes = Buffer.concat(await all(ipfs.files.read(filePath))) + + expect(actualBytes).to.deep.equal(Buffer.from([5, 0, 1, 2, 3, 10, 11])) + }) + + it('writes a file with a different hash function to the parent', async () => { + const directory = `cid-versions-${Math.random()}` + const directoryPath = `/${directory}` + const fileName = `file-${Math.random()}.txt` + const filePath = `${directoryPath}/${fileName}` + const expectedBytes = Buffer.from([0, 1, 2, 3]) + + await ipfs.files.mkdir(directoryPath, { + cidVersion: 0 + }) + + await expect(ipfs.files.stat(directoryPath)).to.eventually.have.nested.property('cid.version', 0) + + await ipfs.files.write(filePath, expectedBytes, { + create: true, + cidVersion: 1, + hashAlg: 'sha2-512' + }) + + await expect(ipfs.files.stat(filePath)).to.eventually.have.nested.property('cid.multihash') + .that.satisfies(hash => { + return multihash.decode(hash).name === 'sha2-512' + }) + + const actualBytes = Buffer.concat(await all(ipfs.files.read(filePath))) + + expect(actualBytes).to.deep.equal(expectedBytes) + }) + + it('results in the same hash as a sharded directory created by the importer when adding a new file', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 75) + + await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) + + await ipfs.files.write(nextFile.path, nextFile.content, { + create: true + }) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect(stats.type).to.equal('directory') + expect(updatedDirCid.toString()).to.equal('QmbLw9uCrQaFgweMskqMrsVKTwwakSg94GuMT3zht1P7CQ') + }) + + it('results in the same hash as a sharded directory created by the importer when creating a new subshard', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 100) + + await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) + + await ipfs.files.write(nextFile.path, nextFile.content, { + create: true + }) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + expect(updatedDirCid.toString()).to.deep.equal('QmcGTKoaZeMxVenyxnkP2riibE8vSEPobkN1oxvcEZpBW5') + }) + + it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 82) + + await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) + + await ipfs.files.write(nextFile.path, nextFile.content, { + create: true + }) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect(stats.type).to.equal('directory') + expect(updatedDirCid.toString()).to.deep.equal('QmXeJ4ercHcxdiX7Vxm1Hit9AwsTNXcwCw5Ad32yW2HdHR') + }) + + it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard of a subshard', async function () { + this.timeout(60000) + + const { + nextFile, + dirWithSomeFiles, + dirPath + } = await createTwoShards(ipfs, 2187) + + await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) + + await ipfs.files.write(nextFile.path, nextFile.content, { + create: true + }) + + const stats = await ipfs.files.stat(dirPath) + const updatedDirCid = stats.cid + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + expect(stats.type).to.equal('directory') + expect(updatedDirCid.toString()).to.deep.equal('QmY4o7GNvr5eZPnT6k6ALp5zkQ4eiUkJQ6eeUNsdSiqS4f') }) it('should write file and specify mode as a string', async function () { diff --git a/packages/interface-ipfs-core/src/key/export.js b/packages/interface-ipfs-core/src/key/export.js index 48fdb731b6..d650686261 100644 --- a/packages/interface-ipfs-core/src/key/export.js +++ b/packages/interface-ipfs-core/src/key/export.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -23,7 +23,7 @@ module.exports = (common, options) => { after(() => common.clean()) it('should export "self" key', async function () { - const pem = await ipfs.key.export('self', hat()) + const pem = await ipfs.key.export('self', nanoid()) expect(pem).to.exist() }) }) diff --git a/packages/interface-ipfs-core/src/key/gen.js b/packages/interface-ipfs-core/src/key/gen.js index c6afda3c02..f53410ac0b 100644 --- a/packages/interface-ipfs-core/src/key/gen.js +++ b/packages/interface-ipfs-core/src/key/gen.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -29,7 +29,7 @@ module.exports = (common, options) => { keyTypes.forEach((kt) => { it(`should generate a new ${kt.type} key`, async function () { this.timeout(20 * 1000) - const name = hat() + const name = nanoid() const key = await ipfs.key.gen(name, kt) expect(key).to.exist() expect(key).to.have.property('name', name) diff --git a/packages/interface-ipfs-core/src/key/import.js b/packages/interface-ipfs-core/src/key/import.js index 8fc2162fd7..816f68a69a 100644 --- a/packages/interface-ipfs-core/src/key/import.js +++ b/packages/interface-ipfs-core/src/key/import.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -23,7 +23,7 @@ module.exports = (common, options) => { after(() => common.clean()) it('should import an exported key', async () => { - const password = hat() + const password = nanoid() const pem = await ipfs.key.export('self', password) expect(pem).to.exist() diff --git a/packages/interface-ipfs-core/src/key/list.js b/packages/interface-ipfs-core/src/key/list.js index 792ec92a4f..7ea5283fe3 100644 --- a/packages/interface-ipfs-core/src/key/list.js +++ b/packages/interface-ipfs-core/src/key/list.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -25,7 +25,7 @@ module.exports = (common, options) => { it('should list all the keys', async function () { this.timeout(60 * 1000) - const keys = await Promise.all([1, 2, 3].map(() => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }))) + const keys = await Promise.all([1, 2, 3].map(() => ipfs.key.gen(nanoid(), { type: 'rsa', size: 2048 }))) const res = await ipfs.key.list() expect(res).to.exist() diff --git a/packages/interface-ipfs-core/src/key/rename.js b/packages/interface-ipfs-core/src/key/rename.js index d18c2a2855..de49554f72 100644 --- a/packages/interface-ipfs-core/src/key/rename.js +++ b/packages/interface-ipfs-core/src/key/rename.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -25,8 +25,8 @@ module.exports = (common, options) => { it('should rename a key', async function () { this.timeout(30 * 1000) - const oldName = hat() - const newName = hat() + const oldName = nanoid() + const newName = nanoid() const key = await ipfs.key.gen(oldName, { type: 'rsa', size: 2048 }) diff --git a/packages/interface-ipfs-core/src/key/rm.js b/packages/interface-ipfs-core/src/key/rm.js index 8e86c1e467..02233f31c2 100644 --- a/packages/interface-ipfs-core/src/key/rm.js +++ b/packages/interface-ipfs-core/src/key/rm.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -25,7 +25,7 @@ module.exports = (common, options) => { it('should rm a key', async function () { this.timeout(30 * 1000) - const key = await ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }) + const key = await ipfs.key.gen(nanoid(), { type: 'rsa', size: 2048 }) const removeRes = await ipfs.key.rm(key.name) expect(removeRes).to.exist() diff --git a/packages/interface-ipfs-core/src/miscellaneous/id.js b/packages/interface-ipfs-core/src/miscellaneous/id.js index de84289ecb..98cbf6db50 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/id.js +++ b/packages/interface-ipfs-core/src/miscellaneous/id.js @@ -29,9 +29,13 @@ module.exports = (common, options) => { expect(res).to.have.a.property('id').that.is.a('string') expect(CID.isCID(new CID(res.id))).to.equal(true) expect(res).to.have.a.property('publicKey') - expect(res).to.have.a.property('addresses').that.is.an('array').and.all.satisfy(ma => Multiaddr.isMultiaddr(ma)) expect(res).to.have.a.property('agentVersion').that.is.a('string') expect(res).to.have.a.property('protocolVersion').that.is.a('string') + expect(res).to.have.a.property('addresses').that.is.an('array') + + for (const ma of res.addresses) { + expect(Multiaddr.isMultiaddr(ma)).to.be.true() + } }) }) } diff --git a/packages/interface-ipfs-core/src/miscellaneous/resolve.js b/packages/interface-ipfs-core/src/miscellaneous/resolve.js index 21dac8de2e..ab916ee2cc 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/resolve.js +++ b/packages/interface-ipfs-core/src/miscellaneous/resolve.js @@ -3,7 +3,7 @@ const isIpfs = require('is-ipfs') const loadFixture = require('aegir/fixtures') -const hat = require('hat') +const nanoid = require('nanoid') const multibase = require('multibase') const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') @@ -55,7 +55,7 @@ module.exports = (common, options) => { }) it('should resolve up to the last node', async () => { - const content = { path: { to: { file: hat() } } } + const content = { path: { to: { file: nanoid() } } } const options = { format: 'dag-cbor', hashAlg: 'sha2-256' } const cid = await ipfs.dag.put(content, options) const path = `/ipfs/${cid}/path/to/file` @@ -66,7 +66,7 @@ module.exports = (common, options) => { it('should resolve up to the last node across multiple nodes', async () => { const options = { format: 'dag-cbor', hashAlg: 'sha2-256' } - const childCid = await ipfs.dag.put({ node: { with: { file: hat() } } }, options) + const childCid = await ipfs.dag.put({ node: { with: { file: nanoid() } } }, options) const parentCid = await ipfs.dag.put({ path: { to: childCid } }, options) const resolved = await ipfs.resolve(`/ipfs/${parentCid}/path/to/node/with/file`) diff --git a/packages/interface-ipfs-core/src/name/publish.js b/packages/interface-ipfs-core/src/name/publish.js index 51adc51537..375ca94eb6 100644 --- a/packages/interface-ipfs-core/src/name/publish.js +++ b/packages/interface-ipfs-core/src/name/publish.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { fixture } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -18,7 +18,7 @@ module.exports = (common, options) => { const it = getIt(options) describe('.name.publish offline', () => { - const keyName = hat() + const keyName = nanoid() let ipfs let nodeId diff --git a/packages/interface-ipfs-core/src/object/data.js b/packages/interface-ipfs-core/src/object/data.js index 50ca22b02e..3141c458e4 100644 --- a/packages/interface-ipfs-core/src/object/data.js +++ b/packages/interface-ipfs-core/src/object/data.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ @@ -26,7 +26,7 @@ module.exports = (common, options) => { it('should get data by multihash', async () => { const testObj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -38,7 +38,7 @@ module.exports = (common, options) => { it('should get data by base58 encoded multihash string', async () => { const testObj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } diff --git a/packages/interface-ipfs-core/src/object/get.js b/packages/interface-ipfs-core/src/object/get.js index e0beb72f17..548aa221ed 100644 --- a/packages/interface-ipfs-core/src/object/get.js +++ b/packages/interface-ipfs-core/src/object/get.js @@ -3,10 +3,10 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const UnixFs = require('ipfs-unixfs') -const crypto = require('crypto') +const randomBytes = require('iso-random-stream/src/random') const { asDAGLink } = require('./utils') const all = require('it-all') @@ -32,7 +32,7 @@ module.exports = (common, options) => { it('should get object by multihash', async () => { const obj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -52,7 +52,7 @@ module.exports = (common, options) => { it('should get object by multihash string', async () => { const obj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -91,7 +91,7 @@ module.exports = (common, options) => { it('should get object by base58 encoded multihash', async () => { const obj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -111,7 +111,7 @@ module.exports = (common, options) => { it('should get object by base58 encoded multihash string', async () => { const obj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -131,7 +131,7 @@ module.exports = (common, options) => { it('should supply unaltered data', async () => { // has to be big enough to span several DAGNodes - const data = crypto.randomBytes(1024 * 3000) + const data = randomBytes(1024 * 3000) const result = await all(ipfs.add({ path: '', diff --git a/packages/interface-ipfs-core/src/object/links.js b/packages/interface-ipfs-core/src/object/links.js index 61ccd72e67..814753ac45 100644 --- a/packages/interface-ipfs-core/src/object/links.js +++ b/packages/interface-ipfs-core/src/object/links.js @@ -3,7 +3,7 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') const all = require('it-all') @@ -30,7 +30,7 @@ module.exports = (common, options) => { it('should get empty links by multihash', async () => { const testObj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -60,7 +60,7 @@ module.exports = (common, options) => { it('should get links by base58 encoded multihash', async () => { const testObj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -73,7 +73,7 @@ module.exports = (common, options) => { it('should get links by base58 encoded multihash string', async () => { const testObj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } diff --git a/packages/interface-ipfs-core/src/object/put.js b/packages/interface-ipfs-core/src/object/put.js index e26c3bccaa..61c736dd76 100644 --- a/packages/interface-ipfs-core/src/object/put.js +++ b/packages/interface-ipfs-core/src/object/put.js @@ -3,7 +3,7 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const hat = require('hat') +const nanoid = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -29,7 +29,7 @@ module.exports = (common, options) => { it('should put an object', async () => { const obj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -43,7 +43,7 @@ module.exports = (common, options) => { it('should put a JSON encoded Buffer', async () => { const obj = { - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [] } @@ -62,7 +62,7 @@ module.exports = (common, options) => { }) it('should put a Protobuf encoded Buffer', async () => { - const node = new DAGNode(Buffer.from(hat())) + const node = new DAGNode(Buffer.from(nanoid())) const serialized = node.serialize() const cid = await ipfs.object.put(serialized, { enc: 'protobuf' }) @@ -72,7 +72,7 @@ module.exports = (common, options) => { }) it('should put a Buffer as data', async () => { - const data = Buffer.from(hat()) + const data = Buffer.from(nanoid()) const cid = await ipfs.object.put(data) const node = await ipfs.object.get(cid) @@ -82,7 +82,7 @@ module.exports = (common, options) => { }) it('should put a Protobuf DAGNode', async () => { - const dNode = new DAGNode(Buffer.from(hat())) + const dNode = new DAGNode(Buffer.from(nanoid())) const cid = await ipfs.object.put(dNode) const node = await ipfs.object.get(cid) @@ -91,12 +91,12 @@ module.exports = (common, options) => { }) it('should fail if a string is passed', () => { - return expect(ipfs.object.put(hat())).to.eventually.be.rejected() + return expect(ipfs.object.put(nanoid())).to.eventually.be.rejected() }) it('should put a Protobuf DAGNode with a link', async () => { - const node1a = new DAGNode(Buffer.from(hat())) - const node2 = new DAGNode(Buffer.from(hat())) + const node1a = new DAGNode(Buffer.from(nanoid())) + const node2 = new DAGNode(Buffer.from(nanoid())) const link = await asDAGLink(node2, 'some-link') diff --git a/packages/interface-ipfs-core/src/pubsub/publish.js b/packages/interface-ipfs-core/src/pubsub/publish.js index f6359f5806..0ab8c5da51 100644 --- a/packages/interface-ipfs-core/src/pubsub/publish.js +++ b/packages/interface-ipfs-core/src/pubsub/publish.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { getTopic } = require('./utils') const { getDescribe, getIt } = require('../utils/mocha') @@ -32,7 +32,7 @@ module.exports = (common, options) => { it('should publish message from buffer', () => { const topic = getTopic() - return ipfs.pubsub.publish(topic, Buffer.from(hat())) + return ipfs.pubsub.publish(topic, Buffer.from(nanoid())) }) it('should publish 10 times within time limit', async () => { @@ -40,7 +40,7 @@ module.exports = (common, options) => { const topic = getTopic() for (let i = 0; i < count; i++) { - await ipfs.pubsub.publish(topic, Buffer.from(hat())) + await ipfs.pubsub.publish(topic, Buffer.from(nanoid())) } }) }) diff --git a/packages/interface-ipfs-core/src/pubsub/utils.js b/packages/interface-ipfs-core/src/pubsub/utils.js index 80b53c6597..7c75fec5b0 100644 --- a/packages/interface-ipfs-core/src/pubsub/utils.js +++ b/packages/interface-ipfs-core/src/pubsub/utils.js @@ -1,6 +1,6 @@ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const delay = require('delay') async function waitForPeers (ipfs, topic, peersToWait, waitForMs) { @@ -24,4 +24,4 @@ async function waitForPeers (ipfs, topic, peersToWait, waitForMs) { exports.waitForPeers = waitForPeers -exports.getTopic = () => 'pubsub-tests-' + hat() +exports.getTopic = () => 'pubsub-tests-' + nanoid() diff --git a/packages/interface-ipfs-core/src/swarm/addrs.js b/packages/interface-ipfs-core/src/swarm/addrs.js index d5e21e6018..2f13b214b6 100644 --- a/packages/interface-ipfs-core/src/swarm/addrs.js +++ b/packages/interface-ipfs-core/src/swarm/addrs.js @@ -35,12 +35,14 @@ module.exports = (common, options) => { expect(peerInfos).to.not.be.empty() expect(peerInfos).to.be.an('array') - expect(peerInfos).to.all.satisfy(peerInfo => { + for (const peerInfo of peerInfos) { expect(CID.isCID(new CID(peerInfo.id))).to.be.true() - expect(peerInfo).to.have.a.property('addrs').that.is.an('array').and.all.satisfy(ma => Multiaddr.isMultiaddr(ma)) + expect(peerInfo).to.have.a.property('addrs').that.is.an('array') - return true - }) + for (const ma of peerInfo.addrs) { + expect(Multiaddr.isMultiaddr(ma)).to.be.true() + } + } }) }) } diff --git a/packages/interface-ipfs-core/src/utils/create-sharded-directory.js b/packages/interface-ipfs-core/src/utils/create-sharded-directory.js new file mode 100644 index 0000000000..bfd2771937 --- /dev/null +++ b/packages/interface-ipfs-core/src/utils/create-sharded-directory.js @@ -0,0 +1,24 @@ +'use strict' + +const { expect } = require('./mocha') +const isShardAtPath = require('./is-shard-at-path') +const last = require('it-last') + +module.exports = async (ipfs, files = 1001) => { + const dirPath = `/sharded-dir-${Math.random()}` + + const result = await last(ipfs.add(function * () { + for (let i = 0; i < files; i++) { + yield { + path: `${dirPath}/file-${i}`, + content: Buffer.from([0, 1, 2, 3, 4, 5, i]) + } + } + }())) + + await ipfs.files.cp(`/ipfs/${result.cid}`, dirPath) + + await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true() + + return dirPath +} diff --git a/packages/interface-ipfs-core/src/utils/create-two-shards.js b/packages/interface-ipfs-core/src/utils/create-two-shards.js new file mode 100644 index 0000000000..bfa96c6d01 --- /dev/null +++ b/packages/interface-ipfs-core/src/utils/create-two-shards.js @@ -0,0 +1,43 @@ +'use strict' + +const { expect } = require('./mocha') +const isShardAtPath = require('./is-shard-at-path') +const last = require('it-last') + +const createTwoShards = async (ipfs, fileCount) => { + const dirPath = `/sharded-dir-${Math.random()}` + const files = new Array(fileCount).fill(0).map((_, index) => ({ + path: `${dirPath}/file-${index}`, + content: Buffer.from([0, 1, 2, 3, 4, index]) + })) + files[files.length - 1].path = `${dirPath}/file-${fileCount - 1}` + + const allFiles = files.map(file => ({ + ...file + })) + const someFiles = files.map(file => ({ + ...file + })) + const nextFile = someFiles.pop() + + const { cid: dirWithAllFiles } = await last(ipfs.add(allFiles, { + // for js-ipfs - go-ipfs shards everything when sharding is turned on + shardSplitThreshold: files.length - 1 + })) + const { cid: dirWithSomeFiles } = await last(ipfs.add(someFiles, { + // for js-ipfs - go-ipfs shards everything when sharding is turned on + shardSplitThreshold: files.length - 1 + })) + + await expect(isShardAtPath(`/ipfs/${dirWithAllFiles}`, ipfs)).to.eventually.be.true() + await expect(isShardAtPath(`/ipfs/${dirWithSomeFiles}`, ipfs)).to.eventually.be.true() + + return { + nextFile, + dirWithAllFiles, + dirWithSomeFiles, + dirPath + } +} + +module.exports = createTwoShards diff --git a/packages/interface-ipfs-core/src/utils/is-shard-at-path.js b/packages/interface-ipfs-core/src/utils/is-shard-at-path.js new file mode 100644 index 0000000000..f2be8ee480 --- /dev/null +++ b/packages/interface-ipfs-core/src/utils/is-shard-at-path.js @@ -0,0 +1,11 @@ +'use strict' + +const UnixFS = require('ipfs-unixfs') + +module.exports = async (path, ipfs) => { + const stats = await ipfs.files.stat(path) + const { value: node } = await ipfs.dag.get(stats.cid) + const entry = UnixFS.unmarshal(node.Data) + + return entry.type === 'hamt-sharded-directory' +} diff --git a/packages/interface-ipfs-core/src/utils/mocha.js b/packages/interface-ipfs-core/src/utils/mocha.js index d765631266..b0c566a1c8 100644 --- a/packages/interface-ipfs-core/src/utils/mocha.js +++ b/packages/interface-ipfs-core/src/utils/mocha.js @@ -6,7 +6,7 @@ const chai = require('chai') // Do not reorder these statements - https://github.com/chaijs/chai/issues/1298 chai.use(require('chai-as-promised')) chai.use(require('dirty-chai')) -chai.use(require('chai-things')) +chai.use(require('chai-subset')) module.exports.expect = chai.expect diff --git a/packages/ipfs-mfs/test/helpers/traverse-leaf-nodes.js b/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js similarity index 70% rename from packages/ipfs-mfs/test/helpers/traverse-leaf-nodes.js rename to packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js index 58e2fdb564..56cf220453 100644 --- a/packages/ipfs-mfs/test/helpers/traverse-leaf-nodes.js +++ b/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js @@ -1,8 +1,8 @@ 'use strict' -module.exports = function traverseLeafNodes (mfs, cid) { +module.exports = function traverseLeafNodes (ipfs, cid) { async function * traverse (cid) { - const node = await mfs.ipld.get(cid) + const { value: node } = await ipfs.dag.get(cid) if (Buffer.isBuffer(node) || !node.Links.length) { yield { diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index a0530e75fe..2a111071ae 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -15,8 +15,7 @@ ], "main": "src/index.js", "browser": { - "./src/add/form-data.js": "./src/add/form-data.browser.js", - "./src/lib/buffer-to-form-data.js": "./src/lib/buffer-to-form-data.browser.js", + "./src/lib/to-stream.js": "./src/lib/to-stream.browser.js", "ipfs-utils/src/files/glob-source": false }, "repository": { @@ -53,6 +52,7 @@ "ipld-raw": "^4.0.1", "iso-url": "^0.4.6", "it-tar": "^1.2.1", + "it-to-buffer": "^1.0.0", "it-to-stream": "^0.1.1", "merge-options": "^2.0.0", "multiaddr": "^7.2.1", @@ -60,12 +60,13 @@ "multibase": "^0.6.0", "multicodec": "^1.0.0", "multihashes": "^0.4.14", + "nanoid": "^2.1.11", "node-fetch": "^2.6.0", "parse-duration": "^0.1.2", "stream-to-it": "^0.2.0" }, "devDependencies": { - "aegir": "^21.3.0", + "aegir": "21.3.0", "browser-process-platform": "^0.1.1", "cross-env": "^7.0.0", "go-ipfs-dep": "0.4.23-3", diff --git a/packages/ipfs-http-client/src/add/index.js b/packages/ipfs-http-client/src/add.js similarity index 65% rename from packages/ipfs-http-client/src/add/index.js rename to packages/ipfs-http-client/src/add.js index 94e3fd6dc3..e2fb989fc1 100644 --- a/packages/ipfs-http-client/src/add/index.js +++ b/packages/ipfs-http-client/src/add.js @@ -1,29 +1,27 @@ 'use strict' const CID = require('cids') -const merge = require('merge-options') -const { toFormData } = require('./form-data') -const toCamel = require('../lib/object-to-camel') -const configure = require('../lib/configure') +const toCamel = require('./lib/object-to-camel') +const configure = require('./lib/configure') +const multipartRequest = require('./lib/multipart-request') +const toUrlSearchParams = require('./lib/to-url-search-params') module.exports = configure((api) => { return async function * add (input, options = {}) { const progressFn = options.progress - options = merge( - options, - { - 'stream-channels': true, - progress: Boolean(progressFn), - hash: options.hashAlg // TODO fix this either is hash or hashAlg - } - ) const res = await api.ndjson('add', { method: 'POST', - searchParams: options, - body: await toFormData(input), + searchParams: toUrlSearchParams(null, { + ...options, + 'stream-channels': true, + progress: Boolean(progressFn) + }), timeout: options.timeout, - signal: options.signal + signal: options.signal, + ...( + await multipartRequest(input) + ) }) for await (let file of res) { diff --git a/packages/ipfs-http-client/src/add/form-data.browser.js b/packages/ipfs-http-client/src/add/form-data.browser.js deleted file mode 100644 index 2b26f7abec..0000000000 --- a/packages/ipfs-http-client/src/add/form-data.browser.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' -/* eslint-env browser */ - -const normaliseInput = require('ipfs-utils/src/files/normalise-input') -const mtimeToObject = require('../lib/mtime-to-object') - -exports.toFormData = async input => { - const files = normaliseInput(input) - const formData = new FormData() - let i = 0 - - for await (const file of files) { - // TODO FormData.append doesnt have a 4th arg - const headers = {} - - if (file.mtime !== undefined && file.mtime !== null) { - const mtime = mtimeToObject(file.mtime) - - if (mtime) { - headers.mtime = mtime.secs - headers['mtime-nsecs'] = mtime.nsecs - } - } - - if (file.mode !== undefined && file.mode !== null) { - headers.mode = file.mode.toString(8).padStart(4, '0') - } - - if (file.content) { - // In the browser there's _currently_ no streaming upload, buffer up our - // async iterator chunks and append a big Blob :( - // One day, this will be browser streams - const bufs = [] - for await (const chunk of file.content) { - bufs.push(chunk) - } - - formData.append( - `file-${i}`, - new Blob(bufs, { type: 'application/octet-stream' }), - encodeURIComponent(file.path) - // { - // header: headers - // } - ) - } else { - formData.append( - `dir-${i}`, - new Blob([], { type: 'application/x-directory' }), - encodeURIComponent(file.path) - // { - // header: headers - // } - ) - } - - i++ - } - - return formData -} diff --git a/packages/ipfs-http-client/src/add/form-data.js b/packages/ipfs-http-client/src/add/form-data.js deleted file mode 100644 index 80411c3c6d..0000000000 --- a/packages/ipfs-http-client/src/add/form-data.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict' - -const FormData = require('form-data') -const { Buffer } = require('buffer') -const toStream = require('it-to-stream') -const normaliseInput = require('ipfs-utils/src/files/normalise-input') -const mtimeToObject = require('../lib/mtime-to-object') - -exports.toFormData = async input => { - const files = normaliseInput(input) - const formData = new FormData() - let i = 0 - - for await (const file of files) { - const headers = {} - - if (file.mtime !== undefined && file.mtime !== null) { - const mtime = mtimeToObject(file.mtime) - - if (mtime) { - headers.mtime = mtime.secs - headers['mtime-nsecs'] = mtime.nsecs - } - } - - if (file.mode !== undefined && file.mode !== null) { - headers.mode = file.mode.toString(8).padStart(4, '0') - } - - if (file.content) { - // In Node.js, FormData can be passed a stream so no need to buffer - formData.append( - `file-${i}`, - // FIXME: add a `path` property to the stream so `form-data` doesn't set - // a Content-Length header that is only the sum of the size of the - // header/footer when knownLength option (below) is null. - Object.assign( - toStream.readable(file.content), - { path: file.path || `file-${i}` } - ), - { - filepath: encodeURIComponent(file.path), - contentType: 'application/octet-stream', - knownLength: file.content.length, // Send Content-Length header if known - header: headers - } - ) - } else { - formData.append(`dir-${i}`, Buffer.alloc(0), { - filepath: encodeURIComponent(file.path), - contentType: 'application/x-directory', - header: headers - }) - } - - i++ - } - - return formData -} diff --git a/packages/ipfs-http-client/src/block/put.js b/packages/ipfs-http-client/src/block/put.js index 8adcd2946c..fa6fcf216f 100644 --- a/packages/ipfs-http-client/src/block/put.js +++ b/packages/ipfs-http-client/src/block/put.js @@ -3,7 +3,7 @@ const Block = require('ipfs-block') const CID = require('cids') const multihash = require('multihashes') -const toFormData = require('../lib/buffer-to-form-data') +const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') module.exports = configure(api => { @@ -37,7 +37,9 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: options, - body: toFormData(data) + ...( + await multipartRequest(data) + ) }) res = await response.json() } catch (err) { diff --git a/packages/ipfs-http-client/src/config/replace.js b/packages/ipfs-http-client/src/config/replace.js index 55d86fc907..db94fe113c 100644 --- a/packages/ipfs-http-client/src/config/replace.js +++ b/packages/ipfs-http-client/src/config/replace.js @@ -1,7 +1,7 @@ 'use strict' const { Buffer } = require('buffer') -const toFormData = require('../lib/buffer-to-form-data') +const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') module.exports = configure(api => { @@ -10,7 +10,9 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: options, - body: toFormData(Buffer.from(JSON.stringify(config))) + ...( + await multipartRequest(Buffer.from(JSON.stringify(config))) + ) }) return res.text() diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index 05707f4f3a..733de11611 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -3,8 +3,8 @@ const dagCBOR = require('ipld-dag-cbor') const CID = require('cids') const multihash = require('multihashes') -const toFormData = require('../lib/buffer-to-form-data') const configure = require('../lib/configure') +const multipartRequest = require('../lib/multipart-request') module.exports = configure(api => { return async (dagNode, options = {}) => { @@ -51,7 +51,9 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams, - body: toFormData(serialized) + ...( + await multipartRequest(serialized) + ) }) const data = await rsp.json() diff --git a/packages/ipfs-http-client/src/files/chmod.js b/packages/ipfs-http-client/src/files/chmod.js index cc1a3f3a4f..0990b65a0c 100644 --- a/packages/ipfs-http-client/src/files/chmod.js +++ b/packages/ipfs-http-client/src/files/chmod.js @@ -1,21 +1,16 @@ 'use strict' -const modeToString = require('../lib/mode-to-string') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async function chmod (path, mode, options = {}) { - options.arg = path - options.mode = modeToString(mode) - options.hash = options.hashAlg - options.hashAlg = null - const res = await api.post('files/chmod', { timeout: options.timeout, signal: options.signal, - searchParams: options + searchParams: toUrlSearchParams(path, { ...options, mode }) }) - return res.text() + await res.text() } }) diff --git a/packages/ipfs-http-client/src/files/cp.js b/packages/ipfs-http-client/src/files/cp.js index 571e3dd1bb..f266f7b94b 100644 --- a/packages/ipfs-http-client/src/files/cp.js +++ b/packages/ipfs-http-client/src/files/cp.js @@ -3,20 +3,21 @@ const CID = require('cids') const { findSources } = require('./utils') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async (...args) => { const { sources, options } = findSources(args) - const searchParams = new URLSearchParams(options) - sources.forEach(src => searchParams.append('arg', CID.isCID(src) ? `/ipfs/${src}` : src)) - if (options.hashAlg) searchParams.set('hash', options.hashAlg) - const res = await api.post('files/cp', { timeout: options.timeout, signal: options.signal, - searchParams + searchParams: toUrlSearchParams( + sources.map(src => CID.isCID(src) ? `/ipfs/${src}` : src), + options + ) }) - return res.text() + + await res.text() } }) diff --git a/packages/ipfs-http-client/src/files/index.js b/packages/ipfs-http-client/src/files/index.js index 5dbf395a27..563cff691e 100644 --- a/packages/ipfs-http-client/src/files/index.js +++ b/packages/ipfs-http-client/src/files/index.js @@ -3,13 +3,13 @@ module.exports = config => ({ chmod: require('./chmod')(config), cp: require('./cp')(config), - mkdir: require('./mkdir')(config), flush: require('./flush')(config), - stat: require('./stat')(config), - rm: require('./rm')(config), ls: require('./ls')(config), + mkdir: require('./mkdir')(config), + mv: require('./mv')(config), read: require('./read')(config), + rm: require('./rm')(config), + stat: require('./stat')(config), touch: require('./touch')(config), - write: require('./write')(config), - mv: require('./mv')(config) + write: require('./write')(config) }) diff --git a/packages/ipfs-http-client/src/files/ls.js b/packages/ipfs-http-client/src/files/ls.js index 639c466747..40b2a35b69 100644 --- a/packages/ipfs-http-client/src/files/ls.js +++ b/packages/ipfs-http-client/src/files/ls.js @@ -3,6 +3,7 @@ const CID = require('cids') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async function * ls (path, options = {}) { @@ -11,19 +12,22 @@ module.exports = configure(api => { path = '/' } - const searchParams = new URLSearchParams(options) - searchParams.set('arg', CID.isCID(path) ? `/ipfs/${path}` : path) - // TODO the args below are not in the go-ipfs or interface core - searchParams.set('stream', options.stream == null ? true : options.stream) - searchParams.set('long', options.long == null ? true : options.long) - // TODO: remove after go-ipfs 0.5 is released - searchParams.set('l', options.long == null ? true : options.long) - const res = await api.ndjson('files/ls', { method: 'POST', timeout: options.timeout, signal: options.signal, - searchParams + searchParams: toUrlSearchParams( + CID.isCID(path) ? `/ipfs/${path}` : path, { + ...options, + + // TODO the args below are not in the go-ipfs or interface core + stream: options.stream == null ? true : options.stream, + long: options.long == null ? true : options.long, + + // TODO: remove after go-ipfs 0.5 is released + l: options.long == null ? true : options.long + } + ) }) for await (const result of res) { diff --git a/packages/ipfs-http-client/src/files/mkdir.js b/packages/ipfs-http-client/src/files/mkdir.js index 2eaebb795c..da4976c1f5 100644 --- a/packages/ipfs-http-client/src/files/mkdir.js +++ b/packages/ipfs-http-client/src/files/mkdir.js @@ -1,28 +1,16 @@ 'use strict' -const modeToString = require('../lib/mode-to-string') -const mtimeToObject = require('../lib/mtime-to-object') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async (path, options = {}) => { - const mtime = mtimeToObject(options.mtime) - - const searchParams = new URLSearchParams(options) - searchParams.set('arg', path) - searchParams.set('mode', modeToString(options.mode)) - searchParams.set('hash', options.hashAlg) - searchParams.set('hashAlg', null) - if (mtime) { - searchParams.set('mtime', mtime.secs) - searchParams.set('mtimeNsecs', mtime.nsecs) - } - const res = await api.post('files/mkdir', { timeout: options.timeout, signal: options.signal, - searchParams + searchParams: toUrlSearchParams(path, options) }) - return res.text() + + await res.text() } }) diff --git a/packages/ipfs-http-client/src/files/mv.js b/packages/ipfs-http-client/src/files/mv.js index ce4d8c381b..183c7abed9 100644 --- a/packages/ipfs-http-client/src/files/mv.js +++ b/packages/ipfs-http-client/src/files/mv.js @@ -3,21 +3,21 @@ const CID = require('cids') const { findSources } = require('./utils') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async (...args) => { const { sources, options } = findSources(args) - const searchParams = new URLSearchParams(options) - sources.forEach(src => searchParams.append('arg', CID.isCID(src) ? `/ipfs/${src}` : src)) - if (options.hashAlg) searchParams.set('hash', options.hashAlg) - const res = await api.post('files/mv', { timeout: options.timeout, signal: options.signal, - searchParams + searchParams: toUrlSearchParams( + sources.map(src => CID.isCID(src) ? `/ipfs/${src}` : src), + options + ) }) - return res.text() + await res.text() } }) diff --git a/packages/ipfs-http-client/src/files/read.js b/packages/ipfs-http-client/src/files/read.js index 38a51f3f6d..995b259207 100644 --- a/packages/ipfs-http-client/src/files/read.js +++ b/packages/ipfs-http-client/src/files/read.js @@ -3,14 +3,17 @@ const { Buffer } = require('buffer') const toIterable = require('stream-to-it/source') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async function * read (path, options = {}) { - options.arg = path const res = await api.post('files/read', { timeout: options.timeout, signal: options.signal, - searchParams: options + searchParams: toUrlSearchParams(path, { + ...options, + count: options.count || options.length + }) }) for await (const chunk of toIterable(res.body)) { diff --git a/packages/ipfs-http-client/src/files/rm.js b/packages/ipfs-http-client/src/files/rm.js index dfa2609fe4..d1ee3853c5 100644 --- a/packages/ipfs-http-client/src/files/rm.js +++ b/packages/ipfs-http-client/src/files/rm.js @@ -1,16 +1,19 @@ 'use strict' const configure = require('../lib/configure') +const { findSources } = require('./utils') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (path, options = {}) => { - options.arg = path + return async (...args) => { + const { sources, options } = findSources(args) + const res = await api.post('files/rm', { timeout: options.timeout, signal: options.signal, - searchParams: options + searchParams: toUrlSearchParams(sources, options) }) - return res.text() + await res.text() } }) diff --git a/packages/ipfs-http-client/src/files/stat.js b/packages/ipfs-http-client/src/files/stat.js index cdf7e51e9a..a5421d7069 100644 --- a/packages/ipfs-http-client/src/files/stat.js +++ b/packages/ipfs-http-client/src/files/stat.js @@ -3,6 +3,7 @@ const CID = require('cids') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async (path, options = {}) => { @@ -11,13 +12,10 @@ module.exports = configure(api => { path = '/' } - const searchParams = new URLSearchParams(options) - searchParams.set('arg', path) - const res = await api.post('files/stat', { timeout: options.timeout, signal: options.signal, - searchParams + searchParams: toUrlSearchParams(path, options) }) const data = await res.json() diff --git a/packages/ipfs-http-client/src/files/touch.js b/packages/ipfs-http-client/src/files/touch.js index 11568bb4f5..b9330e4567 100644 --- a/packages/ipfs-http-client/src/files/touch.js +++ b/packages/ipfs-http-client/src/files/touch.js @@ -1,26 +1,16 @@ 'use strict' -const mtimeToObject = require('../lib/mtime-to-object') const configure = require('../lib/configure') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async function touch (path, options = {}) { - const mtime = mtimeToObject(options.mtime) - - const searchParams = new URLSearchParams(options) - searchParams.append('arg', path) - if (mtime) { - searchParams.set('mtime', mtime.secs) - searchParams.set('mtimeNsecs', mtime.nsecs) - } - searchParams.set('hash', options.hashAlg) - searchParams.set('hashAlg', null) - const res = await api.post('files/touch', { timeout: options.timeout, signal: options.signal, - searchParams + searchParams: toUrlSearchParams(path, options) }) - return res.text() + + await res.text() } }) diff --git a/packages/ipfs-http-client/src/files/write.js b/packages/ipfs-http-client/src/files/write.js index 06c43ab596..764d87db82 100644 --- a/packages/ipfs-http-client/src/files/write.js +++ b/packages/ipfs-http-client/src/files/write.js @@ -1,35 +1,31 @@ 'use strict' -const toFormData = require('../lib/buffer-to-form-data') const modeToString = require('../lib/mode-to-string') const mtimeToObject = require('../lib/mtime-to-object') const configure = require('../lib/configure') +const multipartRequest = require('../lib/multipart-request') +const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { return async (path, input, options = {}) => { - const mtime = mtimeToObject(options.mtime) - - const searchParams = new URLSearchParams(options) - searchParams.set('arg', path) - searchParams.set('stream-channels', 'true') - searchParams.set('hash', options.hashAlg) - searchParams.set('hashAlg', null) - if (mtime) { - searchParams.set('mtime', mtime.secs) - searchParams.set('mtimeNsecs', mtime.nsecs) - } - const res = await api.post('files/write', { timeout: options.timeout, signal: options.signal, - searchParams, - body: toFormData(input, { - mode: options.mode != null ? modeToString(options.mode) : undefined, - mtime: mtime ? mtime.secs : undefined, - mtimeNsecs: mtime ? mtime.nsecs : undefined - }) // TODO: support inputs other than buffer as per spec + searchParams: toUrlSearchParams(path, { + ...options, + streamChannels: true, + count: options.count || options.length + }), + ...( + await multipartRequest({ + content: input, + path: 'arg', + mode: modeToString(options.mode), + mtime: mtimeToObject(options.mtime) + }) + ) }) - return res.text() + await res.text() } }) diff --git a/packages/ipfs-http-client/src/lib/core.js b/packages/ipfs-http-client/src/lib/core.js index 33ae8d14be..bb38e5d5b4 100644 --- a/packages/ipfs-http-client/src/lib/core.js +++ b/packages/ipfs-http-client/src/lib/core.js @@ -122,6 +122,7 @@ class Client extends HTTP { out.append(kebabCase(key), value) } } + return out } }) diff --git a/packages/ipfs-http-client/src/lib/multipart-request.js b/packages/ipfs-http-client/src/lib/multipart-request.js new file mode 100644 index 0000000000..7dbe1edeb1 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/multipart-request.js @@ -0,0 +1,65 @@ +'use strict' + +const normaliseInput = require('ipfs-utils/src/files/normalise-input') +const toStream = require('./to-stream') +const nanoid = require('nanoid') +const modeToString = require('../lib/mode-to-string') +const mtimeToObject = require('../lib/mtime-to-object') + +async function multipartRequest (source, boundary = `-----------------------------${nanoid()}`) { + async function * streamFiles (source) { + try { + let index = 0 + + for await (const { content, path, mode, mtime } of normaliseInput(source)) { + let fileSuffix = '' + const type = content ? 'file' : 'dir' + + if (index > 0) { + yield '\r\n' + + fileSuffix = `-${index}` + } + + yield `--${boundary}\r\n` + yield `Content-Disposition: form-data; name="${type}${fileSuffix}"; filename="${encodeURIComponent(path)}"\r\n` + yield `Content-Type: ${content ? 'application/octet-stream' : 'application/x-directory'}\r\n` + + if (mode !== null && mode !== undefined) { + yield `mode: ${modeToString(mode)}\r\n` + } + + if (mtime != null) { + const { + secs, nsecs + } = mtimeToObject(mtime) + + yield `mtime: ${secs}\r\n` + + if (nsecs != null) { + yield `mtime-nsecs: ${nsecs}\r\n` + } + } + + yield '\r\n' + + if (content) { + yield * content + } + + index++ + } + } finally { + yield `\r\n--${boundary}--\r\n` + } + } + + return { + headers: { + 'Content-Type': `multipart/form-data; boundary=${boundary}` + }, + body: await toStream(streamFiles(source)) + } +} + +module.exports = multipartRequest diff --git a/packages/ipfs-http-client/src/lib/to-stream.browser.js b/packages/ipfs-http-client/src/lib/to-stream.browser.js new file mode 100644 index 0000000000..825a2134b5 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/to-stream.browser.js @@ -0,0 +1,21 @@ +'use strict' + +// browsers can't stream. When the 'Send ReadableStream in request body' row +// is green here: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request#Browser_compatibility +// we'll be able to wrap the passed iterator in the it-to-browser-readablestream module +// in the meantime we have to convert the whole thing to a BufferSource of some sort +const toBuffer = require('it-to-buffer') + +module.exports = (it) => { + async function * bufferise (source) { + for await (const chunk of source) { + if (Buffer.isBuffer(chunk)) { + yield chunk + } else { + yield Buffer.from(chunk) + } + } + } + + return toBuffer(bufferise(it)) +} diff --git a/packages/ipfs-http-client/src/lib/to-stream.js b/packages/ipfs-http-client/src/lib/to-stream.js new file mode 100644 index 0000000000..f0f59ffc50 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/to-stream.js @@ -0,0 +1,7 @@ +'use strict' + +const toStream = require('it-to-stream') + +module.exports = (it) => { + return toStream.readable(it) +} diff --git a/packages/ipfs-http-client/src/lib/to-url-search-params.js b/packages/ipfs-http-client/src/lib/to-url-search-params.js new file mode 100644 index 0000000000..21ee3ea87f --- /dev/null +++ b/packages/ipfs-http-client/src/lib/to-url-search-params.js @@ -0,0 +1,34 @@ +'use strict' + +const modeToString = require('./mode-to-string') +const mtimeToObject = require('./mtime-to-object') + +module.exports = (args, options) => { + const searchParams = new URLSearchParams(options) + + if (args === undefined) { + args = [] + } else if (!Array.isArray(args)) { + args = [args] + } + + args.forEach(arg => searchParams.append('arg', arg)) + + if (options.hashAlg) { + searchParams.set('hash', options.hashAlg) + searchParams.delete('hashAlg') + } + + if (options.mtime != null) { + const mtime = mtimeToObject(options.mtime) + + searchParams.set('mtime', mtime.secs) + searchParams.set('mtime-nsecs', mtime.nsecs) + } + + if (options.mode != null) { + searchParams.set('mode', modeToString(options.mode)) + } + + return searchParams +} diff --git a/packages/ipfs-http-client/src/object/patch/append-data.js b/packages/ipfs-http-client/src/object/patch/append-data.js index 93615e96f7..7c54adb177 100644 --- a/packages/ipfs-http-client/src/object/patch/append-data.js +++ b/packages/ipfs-http-client/src/object/patch/append-data.js @@ -2,7 +2,7 @@ const { Buffer } = require('buffer') const CID = require('cids') -const toFormData = require('../../lib/buffer-to-form-data') +const multipartRequest = require('../../lib/multipart-request') const configure = require('../../lib/configure') module.exports = configure(api => { @@ -14,7 +14,9 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams, - body: toFormData(data) + ...( + await multipartRequest(data) + ) })).json() return new CID(Hash) diff --git a/packages/ipfs-http-client/src/object/patch/set-data.js b/packages/ipfs-http-client/src/object/patch/set-data.js index 1169520e67..af75ffea72 100644 --- a/packages/ipfs-http-client/src/object/patch/set-data.js +++ b/packages/ipfs-http-client/src/object/patch/set-data.js @@ -2,7 +2,7 @@ const { Buffer } = require('buffer') const CID = require('cids') -const toFormData = require('../../lib/buffer-to-form-data') +const multipartRequest = require('../../lib/multipart-request') const configure = require('../../lib/configure') module.exports = configure(api => { @@ -14,7 +14,9 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams, - body: toFormData(data) + ...( + await multipartRequest(data) + ) })).json() return new CID(Hash) diff --git a/packages/ipfs-http-client/src/object/put.js b/packages/ipfs-http-client/src/object/put.js index 8f9cccd215..06fc588961 100644 --- a/packages/ipfs-http-client/src/object/put.js +++ b/packages/ipfs-http-client/src/object/put.js @@ -3,7 +3,7 @@ const CID = require('cids') const { DAGNode } = require('ipld-dag-pb') const { Buffer } = require('buffer') -const toFormData = require('../lib/buffer-to-form-data') +const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') module.exports = configure(api => { @@ -47,7 +47,9 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: options, - body: toFormData(buf) + ...( + await multipartRequest(buf) + ) }) const { Hash } = await res.json() diff --git a/packages/ipfs-http-client/test/files-mfs.spec.js b/packages/ipfs-http-client/test/files-mfs.spec.js deleted file mode 100644 index 87b5f2116c..0000000000 --- a/packages/ipfs-http-client/test/files-mfs.spec.js +++ /dev/null @@ -1,389 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ -'use strict' - -const { expect } = require('interface-ipfs-core/src/utils/mocha') -const loadFixture = require('aegir/fixtures') -const mh = require('multihashes') -const all = require('it-all') -const pipe = require('it-pipe') -const API = require('../src/lib/core') - -const f = require('./utils/factory')() - -const testfile = loadFixture('test/fixtures/testfile.txt') - -// TODO: Test against all algorithms Object.keys(mh.names) -// This subset is known to work with both go-ipfs and js-ipfs as of 2017-09-05 -const HASH_ALGS = [ - 'sha1', - 'sha2-256', - 'sha2-512', - // 'keccak-224', // go throws - 'keccak-256', - // 'keccak-384', // go throws - 'keccak-512' -] - -describe('.files (the MFS API part)', function () { - this.timeout(20 * 1000) - - let ipfs - - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - before(async () => { - ipfs = (await f.spawn()).api - }) - - after(() => f.clean()) - - it('.add file for testing', async () => { - const res = await all(ipfs.add(testfile)) - - expect(res).to.have.length(1) - expect(res[0].cid.toString()).to.equal(expectedMultihash) - expect(res[0].path).to.equal(expectedMultihash) - }) - - it('.add with Buffer module', async () => { - const { Buffer } = require('buffer') - - const expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' - const file = Buffer.from('hello') - - const res = await all(ipfs.add(file)) - - expect(res).to.have.length(1) - expect(res[0].cid.toString()).to.equal(expectedBufferMultihash) - expect(res[0].path).to.equal(expectedBufferMultihash) - }) - - it('.add with empty path and buffer content', async () => { - const expectedHash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' - const content = Buffer.from('hello') - - const res = await all(ipfs.add([{ path: '', content }])) - - expect(res).to.have.length(1) - expect(res[0].cid.toString()).to.equal(expectedHash) - expect(res[0].path).to.equal(expectedHash) - }) - - it('.add with cid-version=1 and raw-leaves=false', async () => { - const expectedCid = 'bafybeifogzovjqrcxvgt7g36y7g63hvwvoakledwk4b2fr2dl4wzawpnny' - const options = { cidVersion: 1, rawLeaves: false } - - const res = await all(ipfs.add(testfile, options)) - - expect(res).to.have.length(1) - expect(res[0].cid.toString()).to.equal(expectedCid) - expect(res[0].path).to.equal(expectedCid) - }) - - it('.add with only-hash=true', async () => { - const content = String(Math.random() + Date.now()) - - const files = await all(ipfs.add(Buffer.from(content), { onlyHash: true })) - expect(files).to.have.length(1) - - // 'ipfs.object.get()' should timeout because content wasn't actually added - return expect(ipfs.object.get(files[0].cid, { timeout: 2000 })) - .to.be.rejectedWith(API.TimeoutError) - }) - - it('.add with options', async () => { - const res = await all(ipfs.add(testfile, { pin: false })) - - expect(res).to.have.length(1) - expect(res[0].cid.toString()).to.equal(expectedMultihash) - expect(res[0].path).to.equal(expectedMultihash) - }) - - it('.add pins by default', async () => { - const newContent = Buffer.from(String(Math.random())) - - const initialPins = await all(ipfs.pin.ls()) - - await all(ipfs.add(newContent)) - - const pinsAfterAdd = await all(ipfs.pin.ls()) - - expect(pinsAfterAdd.length).to.eql(initialPins.length + 1) - }) - - it('.add with pin=false', async () => { - const newContent = Buffer.from(String(Math.random())) - - const initialPins = await all(ipfs.pin.ls()) - - await all(ipfs.add(newContent, { pin: false })) - - const pinsAfterAdd = await all(ipfs.pin.ls()) - - expect(pinsAfterAdd.length).to.eql(initialPins.length) - }) - - HASH_ALGS.forEach((name) => { - it(`.add with hash=${name} and raw-leaves=false`, async () => { - const content = String(Math.random() + Date.now()) - const file = { - path: content + '.txt', - content: Buffer.from(content) - } - const options = { hashAlg: name, rawLeaves: false } - - const res = await all(ipfs.add([file], options)) - - expect(res).to.have.length(1) - const { cid } = res[0] - expect(mh.decode(cid.multihash).name).to.equal(name) - }) - }) - - it('.add file with progress option', async () => { - let progress - let progressCount = 0 - - const progressHandler = (p) => { - progressCount += 1 - progress = p - } - - const res = await all(ipfs.add(testfile, { progress: progressHandler })) - - expect(res).to.have.length(1) - expect(progress).to.be.equal(testfile.byteLength) - expect(progressCount).to.be.equal(1) - }) - - it('.add big file with progress option', async () => { - let progress = 0 - let progressCount = 0 - - const progressHandler = (p) => { - progressCount += 1 - progress = p - } - - // TODO: needs to be using a big file - const res = await all(ipfs.add(testfile, { progress: progressHandler })) - - expect(res).to.have.length(1) - expect(progress).to.be.equal(testfile.byteLength) - expect(progressCount).to.be.equal(1) - }) - - it('.add directory with progress option', async () => { - let progress = 0 - let progressCount = 0 - - const progressHandler = (p) => { - progressCount += 1 - progress = p - } - - // TODO: needs to be using a directory - const res = await all(ipfs.add(testfile, { progress: progressHandler })) - - expect(res).to.have.length(1) - expect(progress).to.be.equal(testfile.byteLength) - expect(progressCount).to.be.equal(1) - }) - - it('.add without progress options', async () => { - const res = await all(ipfs.add(testfile)) - - expect(res).to.have.length(1) - }) - - HASH_ALGS.forEach((name) => { - it(`.add with hash=${name} and raw-leaves=false`, async () => { - const content = String(Math.random() + Date.now()) - const file = { - path: content + '.txt', - content: Buffer.from(content) - } - const options = { hashAlg: name, rawLeaves: false } - - const res = await all(ipfs.add([file], options)) - - expect(res).to.have.length(1) - const { cid } = res[0] - expect(mh.decode(cid.multihash).name).to.equal(name) - }) - }) - - it('.add with object chunks and iterable content', async () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - const res = await pipe( - [{ content: [Buffer.from('test')] }], - ipfs.add, - all - ) - - expect(res).to.have.length(1) - res[0].cid = res[0].cid.toString() - expect(res[0]).to.deep.equal({ path: expectedCid, cid: expectedCid, size: 12 }) - }) - - it('.add with iterable', async () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - const res = await all(ipfs.add([Buffer.from('test')])) - - expect(res).to.have.length(1) - res[0].cid = res[0].cid.toString() - expect(res[0]).to.deep.equal({ path: expectedCid, cid: expectedCid, size: 12 }) - }) - - it('files.mkdir', async () => { - await ipfs.files.mkdir('/test-folder') - }) - - it('files.flush', async () => { - await ipfs.files.flush('/') - }) - - it('files.cp', async () => { - const folder = `/test-folder-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.cp([ - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - `${folder}/test-file-${Math.random()}` - ]) - }) - - it('files.cp with non-array arguments', async () => { - const folder = `/test-folder-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.cp( - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - `${folder}/test-file-${Math.random()}` - ) - }) - - it('files.mv', async () => { - const folder = `/test-folder-${Math.random()}` - const source = `${folder}/test-file-${Math.random()}` - const dest = `${folder}/test-file-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.cp( - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - source - ) - await ipfs.files.mv([ - source, - dest - ]) - }) - - it('files.mv with non-array arguments', async () => { - const folder = `/test-folder-${Math.random()}` - const source = `${folder}/test-file-${Math.random()}` - const dest = `${folder}/test-file-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.cp( - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - source - ) - await ipfs.files.mv( - source, - dest - ) - }) - - it('files.ls', async () => { - const folder = `/test-folder-${Math.random()}` - const file = `${folder}/test-file-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.write(file, Buffer.from('Hello, world'), { - create: true - }) - const files = await all(ipfs.files.ls(folder)) - - expect(files.length).to.equal(1) - }) - - it('files.ls mfs root by default', async () => { - const folder = `test-folder-${Math.random()}` - - await ipfs.files.mkdir(`/${folder}`) - const files = await all(ipfs.files.ls()) - - expect(files.find(file => file.name === folder)).to.be.ok() - }) - - it('files.write', async () => { - await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world'), { - create: true - }) - - const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt'))) - - expect(buf.toString()).to.be.equal('hello world') - }) - - it('files.write without options', async () => { - await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world')) - - const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt'))) - - expect(buf.toString()).to.be.equal('hello world') - }) - - it('files.stat', async () => { - const folder = `/test-folder-${Math.random()}` - const file = `${folder}/test-file-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.write(file, testfile, { - create: true - }) - - const stats = await ipfs.files.stat(file) - stats.cid = stats.cid.toString() - - expect(stats).to.deep.equal({ - cid: 'QmQhouoDPAnzhVM148yCa9CbUXK65wSEAZBtgrLGHtmdmP', - size: 12, - cumulativeSize: 70, - blocks: 1, - type: 'file', - withLocality: false - }) - }) - - it('files.stat file that does not exist()', async () => { - await expect(ipfs.files.stat('/test-folder/does-not-exist()')).to.be.rejectedWith({ - code: 0, - type: 'error' - }) - }) - - it('files.read', async () => { - const folder = `/test-folder-${Math.random()}` - const file = `${folder}/test-file-${Math.random()}` - - await ipfs.files.mkdir(folder) - await ipfs.files.write(file, testfile, { - create: true - }) - const buf = Buffer.concat(await all(ipfs.files.read(file))) - - expect(Buffer.from(buf)).to.deep.equal(testfile) - }) - - it('files.rm without options', async () => { - await ipfs.files.rm('/test-folder/test-file-2.txt') - }) - - it('files.rm', async () => { - await ipfs.files.rm('/test-folder', { recursive: true }) - }) -}) diff --git a/packages/ipfs-http-client/test/interface.spec.js b/packages/ipfs-http-client/test/interface.spec.js index e3e4df9037..02e74f6540 100644 --- a/packages/ipfs-http-client/test/interface.spec.js +++ b/packages/ipfs-http-client/test/interface.spec.js @@ -120,7 +120,24 @@ describe('interface-ipfs-core tests', () => { tests.dht(commonFactory) - tests.files(commonFactory, { + tests.files(factory({}, { + go: { + ipfsOptions: { + config: { + Experimental: { + ShardingEnabled: true + } + } + } + }, + js: { + ipfsOptions: { + EXPERIMENTAL: { + sharding: true + } + } + } + }), { skip: [ { name: 'should ls directory', @@ -143,19 +160,51 @@ describe('interface-ipfs-core tests', () => { reason: 'TODO not implemented in go-ipfs yet' }, { - name: 'should change file mode', + name: 'should update the mode for a file', reason: 'TODO not implemented in go-ipfs yet' }, { - name: 'should change directory mode', + name: 'should update the mode for a directory', reason: 'TODO not implemented in go-ipfs yet' }, { - name: 'should change file mode as string', + name: 'should update the mode for a hamt-sharded-directory', reason: 'TODO not implemented in go-ipfs yet' }, { - name: 'should change file mode to 0', + name: 'should update modes with basic symbolic notation that adds bits', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should update modes with basic symbolic notation that removes bits', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should update modes with basic symbolic notation that overrides bits', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should update modes with multiple symbolic notation', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should update modes with special symbolic notation', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should apply special execute permissions to world', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should apply special execute permissions to user', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should apply special execute permissions to user and group', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should apply special execute permissions to sharded directories', reason: 'TODO not implemented in go-ipfs yet' }, { @@ -166,6 +215,14 @@ describe('interface-ipfs-core tests', () => { name: 'should update directory mtime', reason: 'TODO not implemented in go-ipfs yet' }, + { + name: 'should update the mtime for a hamt-sharded-directory', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should create an empty file', + reason: 'TODO not implemented in go-ipfs yet' + }, { name: 'should make directory and specify mode', reason: 'TODO not implemented in go-ipfs yet' @@ -289,6 +346,106 @@ describe('interface-ipfs-core tests', () => { { name: 'should stat sharded dir with mtime', reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'lists a raw node', + reason: 'TODO go-ipfs does not support ipfs paths for all mfs commands' + }, + { + name: 'lists a raw node in an mfs directory', + reason: 'TODO go-ipfs does not support non-ipfs nodes in mfs' + }, + { + name: 'writes a small file with an escaped slash in the title', + reason: 'TODO go-ipfs does not support escapes in paths' + }, + { + name: 'overwrites a file with a different CID version', + reason: 'TODO go-ipfs does not support changing the CID version' + }, + { + name: 'partially overwrites a file with a different CID version', + reason: 'TODO go-ipfs does not support changing the CID version' + }, + { + name: 'refuses to copy multiple files to a non-existent child directory', + reason: 'TODO go-ipfs does not support copying multiple files at once' + }, + { + name: 'refuses to copy files to an unreadable node', + reason: 'TODO go-ipfs does not support identity format, maybe in 0.5.0?' + }, + { + name: 'copies a file to a pre-existing directory', + reason: 'TODO go-ipfs does not copying files into existing directories if the directory is specify as the target path' + }, + { + name: 'copies multiple files to new location', + reason: 'TODO go-ipfs does not support copying multiple files at once' + }, + { + name: 'copies files to deep mfs paths and creates intermediate directories', + reason: 'TODO go-ipfs does not support the parents flag in the cp command' + }, + { + name: 'copies a sharded directory to a normal directory', + reason: 'TODO go-ipfs does not copying files into existing directories if the directory is specify as the target path' + }, + { + name: 'copies a normal directory to a sharded directory', + reason: 'TODO go-ipfs does not copying files into existing directories if the directory is specify as the target path' + }, + { + name: 'removes multiple files', + reason: 'TODO go-ipfs does not support removing multiple files' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when removing a file', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "85675" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when removing a subshard', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "2109" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when removing a file from a subshard of a subshard', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "170441" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when removing a subshard of a subshard', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "11463" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when adding a new file', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "5835" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when creating a new subshard', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "8038" to type "bool" (for option "-size")' + }, + { + name: ' results in the same hash as a sharded directory created by the importer when adding a file to a subshard', + reason: 'TODO go-ipfs errors out with HTTPError: Could not convert value "6620" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when adding a file to a subshard', + reason: 'HTTPError: Could not convert value "6620" to type "bool" (for option "-size")' + }, + { + name: 'results in the same hash as a sharded directory created by the importer when adding a file to a subshard of a subshard', + reason: 'HTTPError: Could not convert value "170441" to type "bool" (for option "-size")' + }, + { + name: 'stats a dag-cbor node', + reason: 'TODO go-ipfs does not support non-dag-pb nodes in mfs' + }, + { + name: 'stats an identity CID', + reason: 'TODO go-ipfs does not support non-dag-pb nodes in mfs' + }, + { + name: 'limits how many bytes to write to a file (Really large file)', + reason: 'TODO go-ipfs drops the connection' } ] }) diff --git a/packages/ipfs-http-client/test/utils/factory.js b/packages/ipfs-http-client/test/utils/factory.js index f38064a555..fcb2c9394c 100644 --- a/packages/ipfs-http-client/test/utils/factory.js +++ b/packages/ipfs-http-client/test/utils/factory.js @@ -5,7 +5,7 @@ const merge = require('merge-options') const { isNode } = require('ipfs-utils/src/env') const commonOptions = { - test: 'true', + test: true, type: 'go', ipfsHttpModule: require('../../src'), endpoint: 'http://localhost:48372' diff --git a/packages/ipfs-mfs/.aegir.js b/packages/ipfs-mfs/.aegir.js deleted file mode 100644 index e69c15825d..0000000000 --- a/packages/ipfs-mfs/.aegir.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = { - bundlesize: { - maxSize: '160kB' - } -} diff --git a/packages/ipfs-mfs/CHANGELOG.md b/packages/ipfs-mfs/CHANGELOG.md deleted file mode 100644 index 7a36a40c35..0000000000 --- a/packages/ipfs-mfs/CHANGELOG.md +++ /dev/null @@ -1,584 +0,0 @@ - -# [1.0.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.16.0...v1.0.0) (2020-01-23) - - - - -# [0.16.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.15.0...v0.16.0) (2020-01-15) - - -### Features - -* adds support for -X symbolic mode and recursive chmod ([#73](https://github.com/ipfs/js-ipfs-mfs/issues/73)) ([f3b6e5d](https://github.com/ipfs/js-ipfs-mfs/commit/f3b6e5d)) - - - -# [0.15.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.14.0...v0.15.0) (2020-01-10) - - -### Bug Fixes - -* allow writing starting at offset beyond file length ([#71](https://github.com/ipfs/js-ipfs-mfs/issues/71)) ([68bd372](https://github.com/ipfs/js-ipfs-mfs/commit/68bd37249f40bf3318fb0f0ab527231e671ed0df)), closes [#53](https://github.com/ipfs/js-ipfs-mfs/issues/53) -* fix formatting of mode and optional mtimes ([#70](https://github.com/ipfs/js-ipfs-mfs/issues/70)) ([5747297](https://github.com/ipfs/js-ipfs-mfs/commit/57472970bde1f36f525b0e65f29bb2e5c32a6717)) -* remove format and/or codec options ([#69](https://github.com/ipfs/js-ipfs-mfs/issues/69)) ([009fb98](https://github.com/ipfs/js-ipfs-mfs/commit/009fb9817b7c65eb014338500724150f33d002d5)), closes [#67](https://github.com/ipfs/js-ipfs-mfs/issues/67) -* returns cid of flushed path ([#72](https://github.com/ipfs/js-ipfs-mfs/issues/72)) ([d331b35](https://github.com/ipfs/js-ipfs-mfs/commit/d331b3554332c13d294fa17071a030f404799dc1)), closes [#50](https://github.com/ipfs/js-ipfs-mfs/issues/50) - - -### BREAKING CHANGES - -* `--format` and/or `--codec` option has been removed from the CLI, the HTTP API and the core API. - - - - -# [0.14.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.13.2...v0.14.0) (2020-01-09) - - - - -## [0.13.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.13.1...v0.13.2) (2019-11-22) - - -### Bug Fixes - -* response for empty dir when ?stream=true ([14d53ce](https://github.com/ipfs/js-ipfs-mfs/commit/14d53ce)), closes [/github.com/ipfs/interface-js-ipfs-core/blob/c766dbff654fd259f7094070ee71858091898750/src/files-mfs/ls.js#L106-L112](https://github.com//github.com/ipfs/interface-js-ipfs-core/blob/c766dbff654fd259f7094070ee71858091898750/src/files-mfs/ls.js/issues/L106-L112) - - - - -## [0.13.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.13.0...v0.13.1) (2019-08-29) - - -### Features - -* export MFS root key ([265eee5](https://github.com/ipfs/js-ipfs-mfs/commit/265eee5)), closes [/github.com/ipfs/js-ipfs/pull/2022/files#r303383848](https://github.com//github.com/ipfs/js-ipfs/pull/2022/files/issues/r303383848) - - - -# [0.13.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.12.0...v0.13.0) (2019-08-05) - - -### Bug Fixes - -* update to newest IPLD libraries ([c21e032](https://github.com/ipfs/js-ipfs-mfs/commit/c21e032)) - - - - -# [0.12.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.7...v0.12.0) (2019-07-18) - - -### Features - -* support -p flag on cp ([#56](https://github.com/ipfs/js-ipfs-mfs/issues/56)) ([0743d90](https://github.com/ipfs/js-ipfs-mfs/commit/0743d90)) - - - - -## [0.11.7](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.6...v0.11.7) (2019-07-12) - - - - -## [0.11.6](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.5...v0.11.6) (2019-07-12) - - - - -## [0.11.5](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.4...v0.11.5) (2019-06-12) - - -### Bug Fixes - -* handle dag-cbor nodes in trail ([3b49d4b](https://github.com/ipfs/js-ipfs-mfs/commit/3b49d4b)) -* return the CID for dag-cbor nodes ([#52](https://github.com/ipfs/js-ipfs-mfs/issues/52)) ([4159b90](https://github.com/ipfs/js-ipfs-mfs/commit/4159b90)) - - - - -## [0.11.4](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.3...v0.11.4) (2019-05-24) - - - - -## [0.11.3](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.2...v0.11.3) (2019-05-24) - - - - -## [0.11.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.1...v0.11.2) (2019-05-20) - - - - -## [0.11.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.11.0...v0.11.1) (2019-05-20) - - - - -# [0.11.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.10.4...v0.11.0) (2019-05-18) - - -### Features - -* convert to async/await ([#49](https://github.com/ipfs/js-ipfs-mfs/issues/49)) ([f02a941](https://github.com/ipfs/js-ipfs-mfs/commit/f02a941)) - - -### BREAKING CHANGES - -* 1. Everything is now async/await -2. No more callbacks, Readable Streams or Pull Streams -3. `stat` and `ls` commands return `cid` objects instead of string hashes -4. `stat` and `ls` commands return all fields, `hash`, `long` etc options are now ignored - -* chore: standardise error codes, use latest cids and ipld formats - -* chore: update importer and exporter - -* chore: update importer again - -* chore: update deps - - - - -## [0.10.4](https://github.com/ipfs/js-ipfs-mfs/compare/v0.10.3...v0.10.4) (2019-04-08) - - -### Features - -* handle raw nodes in mfs ([#48](https://github.com/ipfs/js-ipfs-mfs/issues/48)) ([ad1df5a](https://github.com/ipfs/js-ipfs-mfs/commit/ad1df5a)) - - - - -## [0.10.3](https://github.com/ipfs/js-ipfs-mfs/compare/v0.10.2...v0.10.3) (2019-03-26) - - -### Bug Fixes - -* handle shard updates that create subshards of subshards ([#47](https://github.com/ipfs/js-ipfs-mfs/issues/47)) ([1158951](https://github.com/ipfs/js-ipfs-mfs/commit/1158951)) - - - - -## [0.10.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.10.1...v0.10.2) (2019-03-18) - - - - -## [0.10.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.9.2...v0.10.1) (2019-03-18) - - -### Bug Fixes - -* correct hamt structure when modifying deep sub-shards ([#46](https://github.com/ipfs/js-ipfs-mfs/issues/46)) ([c08a42f](https://github.com/ipfs/js-ipfs-mfs/commit/c08a42f)), closes [#45](https://github.com/ipfs/js-ipfs-mfs/issues/45) -* expect dir size without protobuf ([ba5b9dc](https://github.com/ipfs/js-ipfs-mfs/commit/ba5b9dc)) - - - - -# [0.10.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.9.2...v0.10.0) (2019-03-18) - - -### Bug Fixes - -* expect dir size without protobuf ([d2ab171](https://github.com/ipfs/js-ipfs-mfs/commit/d2ab171)) - - - - -## [0.9.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.9.1...v0.9.2) (2019-02-19) - - -### Bug Fixes - -* validate and coerce count param for read in HTTP API ([73dc2fc](https://github.com/ipfs/js-ipfs-mfs/commit/73dc2fc)) - - - - -## [0.9.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.9.0...v0.9.1) (2019-01-31) - - - - -# [0.9.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.8.2...v0.9.0) (2019-01-31) - - -### Bug Fixes - -* parser does not end until file data is consumed ([af4d6f7](https://github.com/ipfs/js-ipfs-mfs/commit/af4d6f7)) - - - - -## [0.8.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.8.1...v0.8.2) (2019-01-16) - - - - -## [0.8.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.8.0...v0.8.1) (2019-01-04) - - -### Bug Fixes - -* initialise progress as noop ([2a8cf65](https://github.com/ipfs/js-ipfs-mfs/commit/2a8cf65)) - - - - -# [0.8.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.7...v0.8.0) (2018-12-04) - - -### Features - -* add streaming option to http ([d832277](https://github.com/ipfs/js-ipfs-mfs/commit/d832277)) - - - - -## [0.7.7](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.6...v0.7.7) (2018-12-04) - - -### Bug Fixes - -* flush should error on non-existent entries ([dbe7089](https://github.com/ipfs/js-ipfs-mfs/commit/dbe7089)) - - - - -## [0.7.6](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.5...v0.7.6) (2018-12-04) - - -### Features - -* push sorting out of core ([4ce16b7](https://github.com/ipfs/js-ipfs-mfs/commit/4ce16b7)) - - - - -## [0.7.5](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.4...v0.7.5) (2018-12-04) - - -### Bug Fixes - -* fix regex for splitting files ([a8142d3](https://github.com/ipfs/js-ipfs-mfs/commit/a8142d3)) - - - - -## [0.7.4](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.3...v0.7.4) (2018-12-03) - - - - -## [0.7.3](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.2...v0.7.3) (2018-12-02) - - -### Bug Fixes - -* add missing dependency ([cc7d708](https://github.com/ipfs/js-ipfs-mfs/commit/cc7d708)) - - - - -## [0.7.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.1...v0.7.2) (2018-12-01) - - -### Bug Fixes - -* handle sub-sub shards properly ([9302f01](https://github.com/ipfs/js-ipfs-mfs/commit/9302f01)) -* make sure hashes are the same after shard changes ([b2fbd5d](https://github.com/ipfs/js-ipfs-mfs/commit/b2fbd5d)) - - - - -## [0.7.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.7.0...v0.7.1) (2018-11-29) - - -### Performance Improvements - -* do not load a node when we only want the hash or size ([a029c7e](https://github.com/ipfs/js-ipfs-mfs/commit/a029c7e)) - - - - -# [0.7.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.6.0...v0.7.0) (2018-11-28) - - -### Features - -* adds ls streaming methods ([1b07f58](https://github.com/ipfs/js-ipfs-mfs/commit/1b07f58)), closes [ipfs/interface-ipfs-core#401](https://github.com/ipfs/interface-ipfs-core/issues/401) - - -### Performance Improvements - -* do not list directory contents when statting files ([d16a4e4](https://github.com/ipfs/js-ipfs-mfs/commit/d16a4e4)) - - - - -# [0.6.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.5.2...v0.6.0) (2018-11-28) - - -### Features - -* support sharded directories ([e1c7308](https://github.com/ipfs/js-ipfs-mfs/commit/e1c7308)) - - - - -## [0.5.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.5.1...v0.5.2) (2018-11-16) - - -### Bug Fixes - -* support `count` as well as `length` ([e787bf9](https://github.com/ipfs/js-ipfs-mfs/commit/e787bf9)), closes [#21](https://github.com/ipfs/js-ipfs-mfs/issues/21) - - - - -## [0.5.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.5.0...v0.5.1) (2018-11-16) - - -### Features - -* allow write and mkdir with different hash algs and cid versions ([0a12b3e](https://github.com/ipfs/js-ipfs-mfs/commit/0a12b3e)) - - - - -# [0.5.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.4.2...v0.5.0) (2018-11-12) - - -### Bug Fixes - -* updates ipld-dag-pb dep to version without .cid properties ([fa9029d](https://github.com/ipfs/js-ipfs-mfs/commit/fa9029d)), closes [ipld/js-ipld-dag-pb#99](https://github.com/ipld/js-ipld-dag-pb/issues/99) [#24](https://github.com/ipfs/js-ipfs-mfs/issues/24) -* use ipfs.add instead of files.add ([6aa245f](https://github.com/ipfs/js-ipfs-mfs/commit/6aa245f)) - - - - -## [0.4.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.4.1...v0.4.2) (2018-10-24) - - - - -## [0.4.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.4.0...v0.4.1) (2018-10-01) - - -### Bug Fixes - -* simplify write command ([710a2d6](https://github.com/ipfs/js-ipfs-mfs/commit/710a2d6)) - - - - -# [0.4.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.3.2...v0.4.0) (2018-09-28) - - -### Bug Fixes - -* allow for graceful release with datastore-level ([64ff6a1](https://github.com/ipfs/js-ipfs-mfs/commit/64ff6a1)) -* avoid creating a cid with a null result ([59bcf3c](https://github.com/ipfs/js-ipfs-mfs/commit/59bcf3c)) -* update database not found error ([62212c4](https://github.com/ipfs/js-ipfs-mfs/commit/62212c4)) -* update read cli to use returned pull stream ([62cf0cd](https://github.com/ipfs/js-ipfs-mfs/commit/62cf0cd)) - - - - -## [0.3.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.3.1...v0.3.2) (2018-08-23) - - - - -## [0.3.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.3.0...v0.3.1) (2018-08-20) - - -### Bug Fixes - -* make error messages consistent with go for interop tests ([08f60c3](https://github.com/ipfs/js-ipfs-mfs/commit/08f60c3)) - - - - -# [0.3.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.2.5...v0.3.0) (2018-08-09) - - -### Performance Improvements - -* write files to repo outside of write lock ([63940b4](https://github.com/ipfs/js-ipfs-mfs/commit/63940b4)) - - - - -## [0.2.5](https://github.com/ipfs/js-ipfs-mfs/compare/v0.2.4...v0.2.5) (2018-08-02) - - -### Bug Fixes - -* removes extra sort added to ensure go compatibility ([c211941](https://github.com/ipfs/js-ipfs-mfs/commit/c211941)), closes [ipfs/go-ipfs#5181](https://github.com/ipfs/go-ipfs/issues/5181) - - - - -## [0.2.4](https://github.com/ipfs/js-ipfs-mfs/compare/v0.2.3...v0.2.4) (2018-07-31) - - -### Bug Fixes - -* prevent returning from http write command early ([1018e7d](https://github.com/ipfs/js-ipfs-mfs/commit/1018e7d)) - - - - -## [0.2.3](https://github.com/ipfs/js-ipfs-mfs/compare/v0.2.2...v0.2.3) (2018-07-26) - - - - -## [0.2.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.2.1...v0.2.2) (2018-07-20) - - - - -## [0.2.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.2.0...v0.2.1) (2018-07-20) - - - - -# [0.2.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.1.1...v0.2.0) (2018-07-19) - - - - -## [0.1.1](https://github.com/ipfs/js-ipfs-mfs/compare/v0.1.0...v0.1.1) (2018-07-19) - - -### Features - -* adds --cid-base argument to stringify cids in different bases ([5ee75a6](https://github.com/ipfs/js-ipfs-mfs/commit/5ee75a6)) -* support --raw-leaves ([61f77dc](https://github.com/ipfs/js-ipfs-mfs/commit/61f77dc)) - - - - -# [0.1.0](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.17...v0.1.0) (2018-07-13) - - - - -## [0.0.17](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.16...v0.0.17) (2018-07-13) - - - - -## [0.0.16](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.15...v0.0.16) (2018-07-10) - - -### Bug Fixes - -* handle copying files onto each other ([749b7a2](https://github.com/ipfs/js-ipfs-mfs/commit/749b7a2)) - - - - -## [0.0.15](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.14...v0.0.15) (2018-07-10) - - -### Bug Fixes - -* refuse to read directories ([1a81d66](https://github.com/ipfs/js-ipfs-mfs/commit/1a81d66)) - - - - -## [0.0.14](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.13...v0.0.14) (2018-07-05) - - - - -## [0.0.13](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.12...v0.0.13) (2018-07-04) - - - - -## [0.0.12](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.11...v0.0.12) (2018-07-04) - - - - -## [0.0.11](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.10...v0.0.11) (2018-07-03) - - - - -## [0.0.10](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.9...v0.0.10) (2018-07-03) - - - - -## [0.0.9](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.8...v0.0.9) (2018-07-03) - - - - -## [0.0.8](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.7...v0.0.8) (2018-07-02) - - - - -## [0.0.7](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.6...v0.0.7) (2018-06-29) - - - - -## [0.0.6](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.5...v0.0.6) (2018-06-27) - - - - -## [0.0.5](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.4...v0.0.5) (2018-06-27) - - - - -## [0.0.4](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.3...v0.0.4) (2018-06-14) - - - - -## [0.0.3](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.2...v0.0.3) (2018-06-13) - - - - -## [0.0.2](https://github.com/ipfs/js-ipfs-mfs/compare/v0.0.1...v0.0.2) (2018-06-12) - - -### Features - -* added mv command ([1577094](https://github.com/ipfs/js-ipfs-mfs/commit/1577094)) -* adds rm command ([682c478](https://github.com/ipfs/js-ipfs-mfs/commit/682c478)) -* allow for truncating files ([c515184](https://github.com/ipfs/js-ipfs-mfs/commit/c515184)) -* basic mfs.write command ([ccecb1b](https://github.com/ipfs/js-ipfs-mfs/commit/ccecb1b)) -* copy directories ([cb0135c](https://github.com/ipfs/js-ipfs-mfs/commit/cb0135c)) -* Happy path mfs.write command ([2ea064b](https://github.com/ipfs/js-ipfs-mfs/commit/2ea064b)) -* implement streaming mfs.read methods ([3e5620b](https://github.com/ipfs/js-ipfs-mfs/commit/3e5620b)) -* integrate with jsipfs cli ([79981d8](https://github.com/ipfs/js-ipfs-mfs/commit/79981d8)) -* mfs ls and mkdir commands ([bad24b3](https://github.com/ipfs/js-ipfs-mfs/commit/bad24b3)) -* More stat tests ([d4fc07e](https://github.com/ipfs/js-ipfs-mfs/commit/d4fc07e)) -* most of the cp command ([5d189a6](https://github.com/ipfs/js-ipfs-mfs/commit/5d189a6)) -* Replacing chunks of files that do not increase the size of the file ([77b5c32](https://github.com/ipfs/js-ipfs-mfs/commit/77b5c32)) -* simple mfs.read command ([035fde5](https://github.com/ipfs/js-ipfs-mfs/commit/035fde5)) -* Stat command working on directories ([4671b2e](https://github.com/ipfs/js-ipfs-mfs/commit/4671b2e)) - - - - - - -# [0.0.1](https://github.com/ipfs/js-ipfs-mfs/releases/tag/v0.0.1) - -Initial release. No features but also no bugs. diff --git a/packages/ipfs-mfs/CONTRIBUTING.md b/packages/ipfs-mfs/CONTRIBUTING.md deleted file mode 100644 index 33fe54e076..0000000000 --- a/packages/ipfs-mfs/CONTRIBUTING.md +++ /dev/null @@ -1,58 +0,0 @@ -# Contributing - -## Setup - -You should have [node.js] and [npm] installed. - -## Linting - -Linting is done using [eslint] and the rules are based on [standard]. - -```bash -$ npm run lint -``` - -## Tests - -Tests in node - -```bash -$ npm run test:node -``` - -Tests in the browser - -```bash -$ npm run test:browser -``` - -## Building browser version - -```bash -$ npm run build -``` - -## Releases - -The `release` task will - -1. Run a build -2. Commit the build -3. Bump the version in `package.json` -4. Commit the version change -5. Create a git tag -6. Run `git push` to `upstream/master` (You can change this with `--remote my-remote`) - -```bash -# Major release -$ npm run release-major -# Minor relase -$ npm run release-minor -# Patch release -$ npm run release -``` - -[node.js]: https://nodejs.org/ -[npm]: http://npmjs.org/ -[eslint]: http://eslint.org/ -[standard]: https://github.com/feross/standard diff --git a/packages/ipfs-mfs/LICENSE b/packages/ipfs-mfs/LICENSE deleted file mode 100644 index bb9cf402af..0000000000 --- a/packages/ipfs-mfs/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 David Dias - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/packages/ipfs-mfs/README.md b/packages/ipfs-mfs/README.md deleted file mode 100644 index e847241ba0..0000000000 --- a/packages/ipfs-mfs/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# MFS (Mutable File System) JavaScript Implementation - -[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) -[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) -[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) -[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) -[![Build Status](https://flat.badgen.net/travis/ipfs/js-ipfs-mfs)](https://travis-ci.com/ipfs/js-ipfs-mfs) -[![Code Coverage](https://codecov.io/gh/ipfs/js-ipfs-mfs/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-mfs) -[![Dependency Status](https://david-dm.org/ipfs/js-ipfs-mfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-mfs) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) -![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) -![](https://img.shields.io/badge/Node.js-%3E%3D8.0.0-orange.svg?style=flat-square) - -> JavaScript implementation of the IPFS Mutable File System - -[The MFS spec can be found inside the ipfs/specs repository](https://github.com/ipfs/js-ipfs/blob/master/packages/interface-ipfs-core/SPEC/FILES.md#mutable-file-system) - -## Lead Maintainer - -[Alex Potsides](https://github.com/achingbrain) - -## Table of Contents - -- [Install](#install) - - [npm](#npm) - - [Use in Node.js](#use-in-nodejs) - - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) - - [Use in a browser using a script tag](#use-in-a-browser-using-a-script-tag) - - [A note on concurrency](#a-note-on-concurrency) -- [Contribute](#contribute) -- [Changelog](#changelog) -- [License](#license) - -## Install - -### npm - -```sh -> npm i ipfs-mfs -``` - -### Use in Node.js - -```JavaScript -const mfs = require('ipfs-mfs') -``` - -### Use in a browser with browserify, webpack or any other bundler - -The code published to npm that gets loaded on require is an ES5 transpiled version with the right shims added. This means that you can require it and use with your favourite bundler without having to adjust asset management process. - -```JavaScript -const mfs = require('ipfs-mfs') -``` - -### Use in a browser using a script tag - -Loading this module through a script tag will make the `mfs` obj available in the global namespace. - -```html - - - -``` - -### A note on concurrency - -The mfs works by storing a reference to the root node's CID in LevelDB. LevelDB does not support concurrent access so there are read/write locks around bits of the code that modify the the root node's CID. - -A lock is kept on the main thread and any requests to read/write from workers or the main thread itself are queued pending release of the lock by the existing holder. - -Reads are executed together, writes are executed sequentially and prevent any reads from starting. - -If you are using IPFS in a single process or with the node [cluster](https://nodejs.org/api/cluster.html) module this should be completely transparent. - -If you are using [Web Workers](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) there is no way to globally listen to messages sent between workers and the main thread so you will need to also use the [observable-webworkers](https://www.npmjs.com/package/observable-webworkers) module to ensure the right message transports are set up to allow requesting/releasing the locks. - -## Contribute - -All are welcome, please join in! - -This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). - -Open an [issue](https://github.com/ipfs/js-ipfs-mfs/issues) or send a [PR](https://github.com/ipfs/js-ipfs-mfs/pulls) - see [CONTRIBUTING.md](./CONTRIBUTING.md) for how to make sure your branch is ready for PRing. - -[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) - -## Changelog - -See [CHANGELOG.md](./CHANGELOG.md) for details of what has changed between releases. - -## License - -[MIT](LICENSE) diff --git a/packages/ipfs-mfs/cli.js b/packages/ipfs-mfs/cli.js deleted file mode 100644 index f93e4d30a3..0000000000 --- a/packages/ipfs-mfs/cli.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = require('./src/cli') diff --git a/packages/ipfs-mfs/core.js b/packages/ipfs-mfs/core.js deleted file mode 100644 index 23cb8419b7..0000000000 --- a/packages/ipfs-mfs/core.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = require('./src/core') diff --git a/packages/ipfs-mfs/http.js b/packages/ipfs-mfs/http.js deleted file mode 100644 index 6607325bd6..0000000000 --- a/packages/ipfs-mfs/http.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = require('./src/http') diff --git a/packages/ipfs-mfs/package.json b/packages/ipfs-mfs/package.json deleted file mode 100644 index 4026c1e2db..0000000000 --- a/packages/ipfs-mfs/package.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "name": "ipfs-mfs", - "version": "1.0.0", - "description": "JavaScript implementation of the IPFS Mutable File System", - "leadMaintainer": "Alex Potsides ", - "homepage": "https://github.com/ipfs/js-ipfs", - "bugs": "https://github.com/ipfs/js-ipfs/issues", - "main": "src/index.js", - "browser": { - "@hapi/joi": "joi-browser", - "fs": false - }, - "scripts": { - "test": "aegir test", - "test:node": "aegir test -t node", - "test:cli": "aegir test -t node -f test/cli/index.js", - "test:core": "aegir test -t node -f test/core/index.js", - "test:http": "aegir test -t node -f test/http/index.js", - "test:browser": "aegir test -t browser", - "test:webworker": "aegir test -t webworker", - "build": "aegir build", - "lint": "aegir lint", - "coverage": "nyc --reporter=text --reporter=lcov --reporter=html npm run test:node", - "dep-check": "aegir dep-check", - "clean": "rm -rf ./dist" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/ipfs/js-ipfs.git" - }, - "keywords": [ - "IPFS" - ], - "license": "MIT", - "engines": { - "node": ">=8.0.0", - "npm": ">=3.0.0" - }, - "devDependencies": { - "@hapi/hapi": "^18.4.0", - "aegir": "^21.3.0", - "chai": "^4.2.0", - "chai-as-promised": "^7.1.1", - "delay": "^4.3.0", - "detect-node": "^2.0.4", - "detect-webworker": "^1.0.0", - "dirty-chai": "^2.0.1", - "form-data": "^3.0.0", - "ipfs-block": "^0.8.1", - "ipfs-block-service": "^0.16.0", - "ipfs-repo": "^0.30.1", - "ipld": "^0.25.0", - "memdown": "^5.1.0", - "nyc": "^15.0.0", - "sinon": "^8.0.4", - "stream-to-promise": "^2.2.0", - "temp-write": "^4.0.0", - "yargs": "^15.1.0" - }, - "dependencies": { - "@hapi/boom": "^7.4.3", - "@hapi/joi": "^15.1.0", - "cids": "^0.7.3", - "debug": "^4.1.0", - "err-code": "^2.0.0", - "hamt-sharding": "^1.0.0", - "interface-datastore": "^0.8.0", - "ipfs-multipart": "^0.3.0", - "ipfs-unixfs": "^1.0.0", - "ipfs-unixfs-exporter": "^1.0.1", - "ipfs-unixfs-importer": "^1.0.1", - "ipfs-utils": "^0.7.0", - "ipld-dag-pb": "^0.18.3", - "it-all": "^1.0.1", - "it-last": "^1.0.1", - "it-to-stream": "^0.1.1", - "it-pipe": "^1.1.0", - "joi-browser": "^13.4.0", - "mortice": "^2.0.0", - "multicodec": "^1.0.0", - "multihashes": "^0.4.14" - }, - "contributors": [ - "Alan Shaw ", - "David ", - "David Dias ", - "Jacob Heun ", - "Vasco Santos ", - "Volker Mische ", - "achingbrain " - ] -} diff --git a/packages/ipfs-mfs/src/cli/utils.js b/packages/ipfs-mfs/src/cli/utils.js deleted file mode 100644 index fc0dcf03b6..0000000000 --- a/packages/ipfs-mfs/src/cli/utils.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' - -let visible = true - -const disablePrinting = () => { - visible = false -} - -const print = (msg = '', newline = true) => { - if (!visible) { - return - } - - if (msg instanceof Error && process.env.DEBUG) { - msg = msg.stack - } - - msg = newline ? msg + '\n' : msg - process.stdout.write(msg) -} - -const asBoolean = (value) => { - if (value === false || value === true) { - return value - } - - if (value === undefined) { - return true - } - - return false -} - -const asOctal = (value) => { - return parseInt(value, 8) -} - -const asDateFromSeconds = (value) => { - return new Date(parseInt(value, 10) * 1000) -} - -module.exports = { - disablePrinting, - print, - asBoolean, - asOctal, - asDateFromSeconds -} diff --git a/packages/ipfs-mfs/src/core/index.js b/packages/ipfs-mfs/src/core/index.js deleted file mode 100644 index a2335f7810..0000000000 --- a/packages/ipfs-mfs/src/core/index.js +++ /dev/null @@ -1,81 +0,0 @@ -'use strict' - -const assert = require('assert') -const createLock = require('./utils/create-lock') - -// These operations are read-locked at the function level and will execute simultaneously -const readOperations = { - stat: require('./stat') -} - -// These operations are locked at the function level and will execute in series -const writeOperations = { - chmod: require('./chmod'), - cp: require('./cp'), - flush: require('./flush'), - mkdir: require('./mkdir'), - mv: require('./mv'), - rm: require('./rm'), - touch: require('./touch') -} - -// These operations are asynchronous and manage their own locking -const unwrappedOperations = { - write: require('./write'), - read: require('./read'), - ls: require('./ls') -} - -const wrap = ({ - options, mfs, operations, lock -}) => { - Object.keys(operations).forEach(key => { - mfs[key] = lock(operations[key](options)) - }) -} - -const defaultOptions = { - repoOwner: true, - ipld: null, - repo: null -} - -module.exports = (options) => { - const { - repoOwner - } = Object.assign({}, defaultOptions || {}, options) - - assert(options.ipld, 'MFS requires an IPLD instance') - assert(options.blocks, 'MFS requires an BlockStore instance') - assert(options.datastore, 'MFS requires a DataStore instance') - - options.repo = { - blocks: options.blocks, - datastore: options.datastore - } - - const lock = createLock(repoOwner) - - const readLock = (operation) => { - return lock.readLock(operation) - } - - const writeLock = (operation) => { - return lock.writeLock(operation) - } - - const mfs = {} - - wrap({ - options, mfs, operations: readOperations, lock: readLock - }) - wrap({ - options, mfs, operations: writeOperations, lock: writeLock - }) - - Object.keys(unwrappedOperations).forEach(key => { - mfs[key] = unwrappedOperations[key](options) - }) - - return mfs -} diff --git a/packages/ipfs-mfs/src/core/utils/constants.js b/packages/ipfs-mfs/src/core/utils/constants.js deleted file mode 100644 index 96a8e36d32..0000000000 --- a/packages/ipfs-mfs/src/core/utils/constants.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -const Key = require('interface-datastore').Key - -const FILE_TYPES = { - file: 0, - directory: 1, - 'hamt-sharded-directory': 1 -} - -module.exports = { - FILE_SEPARATOR: '/', - MFS_ROOT_KEY: new Key('/local/filesroot'), - MAX_CHUNK_SIZE: 262144, - MAX_LINKS: 174, - FILE_TYPES -} diff --git a/packages/ipfs-mfs/src/index.js b/packages/ipfs-mfs/src/index.js deleted file mode 100644 index 3ae46eaaee..0000000000 --- a/packages/ipfs-mfs/src/index.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -const cli = require('./cli') -const core = require('./core') -const http = require('./http') -const { - FILE_TYPES, - MFS_ROOT_KEY -} = require('./core/utils/constants') - -module.exports = { - cli, - core, - http, - FILE_TYPES, - MFS_ROOT_KEY -} diff --git a/packages/ipfs-mfs/test/browser.js b/packages/ipfs-mfs/test/browser.js deleted file mode 100644 index ed5d9913f4..0000000000 --- a/packages/ipfs-mfs/test/browser.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -require('./core') diff --git a/packages/ipfs-mfs/test/core/chmod.js b/packages/ipfs-mfs/test/core/chmod.js deleted file mode 100644 index e5731af3f9..0000000000 --- a/packages/ipfs-mfs/test/core/chmod.js +++ /dev/null @@ -1,326 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('../helpers/chai') -const createMfs = require('../helpers/create-mfs') - -describe('chmod', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - async function testChmod (initialMode, modification, expectedFinalMode) { - const path = `/foo-${Math.random()}` - - await mfs.write(path, Buffer.from('Hello world'), { - create: true, - mtime: new Date(), - mode: initialMode - }) - await mfs.chmod(path, modification, { - flush: true - }) - - const updatedMode = (await mfs.stat(path)).mode - expect(updatedMode).to.equal(parseInt(expectedFinalMode, 8)) - } - - it('should update the mode for a file', async () => { - const path = `/foo-${Math.random()}` - - await mfs.write(path, Buffer.from('Hello world'), { - create: true, - mtime: new Date() - }) - const originalMode = (await mfs.stat(path)).mode - await mfs.chmod(path, '0777', { - flush: true - }) - - const updatedMode = (await mfs.stat(path)).mode - expect(updatedMode).to.not.equal(originalMode) - expect(updatedMode).to.equal(parseInt('0777', 8)) - }) - - it('should update the mode for a directory', async () => { - const path = `/foo-${Math.random()}` - - await mfs.mkdir(path) - const originalMode = (await mfs.stat(path)).mode - await mfs.chmod(path, '0777', { - flush: true - }) - - const updatedMode = (await mfs.stat(path)).mode - expect(updatedMode).to.not.equal(originalMode) - expect(updatedMode).to.equal(parseInt('0777', 8)) - }) - - it('should update the mode for a hamt-sharded-directory', async () => { - const path = `/foo-${Math.random()}` - - await mfs.mkdir(path) - await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { - create: true, - shardSplitThreshold: 0 - }) - const originalMode = (await mfs.stat(path)).mode - await mfs.chmod(path, '0777', { - flush: true - }) - - const updatedMode = (await mfs.stat(path)).mode - expect(updatedMode).to.not.equal(originalMode) - expect(updatedMode).to.equal(parseInt('0777', 8)) - }) - - it('should update modes with basic symbolic notation that adds bits', async () => { - await testChmod('0000', '+x', '0111') - await testChmod('0000', '+w', '0222') - await testChmod('0000', '+r', '0444') - await testChmod('0000', 'u+x', '0100') - await testChmod('0000', 'u+w', '0200') - await testChmod('0000', 'u+r', '0400') - await testChmod('0000', 'g+x', '0010') - await testChmod('0000', 'g+w', '0020') - await testChmod('0000', 'g+r', '0040') - await testChmod('0000', 'o+x', '0001') - await testChmod('0000', 'o+w', '0002') - await testChmod('0000', 'o+r', '0004') - await testChmod('0000', 'ug+x', '0110') - await testChmod('0000', 'ug+w', '0220') - await testChmod('0000', 'ug+r', '0440') - await testChmod('0000', 'ugo+x', '0111') - await testChmod('0000', 'ugo+w', '0222') - await testChmod('0000', 'ugo+r', '0444') - await testChmod('0000', 'a+x', '0111') - await testChmod('0000', 'a+w', '0222') - await testChmod('0000', 'a+r', '0444') - }) - - it('should update modes with basic symbolic notation that removes bits', async () => { - await testChmod('0111', '-x', '0000') - await testChmod('0222', '-w', '0000') - await testChmod('0444', '-r', '0000') - await testChmod('0100', 'u-x', '0000') - await testChmod('0200', 'u-w', '0000') - await testChmod('0400', 'u-r', '0000') - await testChmod('0010', 'g-x', '0000') - await testChmod('0020', 'g-w', '0000') - await testChmod('0040', 'g-r', '0000') - await testChmod('0001', 'o-x', '0000') - await testChmod('0002', 'o-w', '0000') - await testChmod('0004', 'o-r', '0000') - await testChmod('0110', 'ug-x', '0000') - await testChmod('0220', 'ug-w', '0000') - await testChmod('0440', 'ug-r', '0000') - await testChmod('0111', 'ugo-x', '0000') - await testChmod('0222', 'ugo-w', '0000') - await testChmod('0444', 'ugo-r', '0000') - await testChmod('0111', 'a-x', '0000') - await testChmod('0222', 'a-w', '0000') - await testChmod('0444', 'a-r', '0000') - }) - - it('should update modes with basic symbolic notation that overrides bits', async () => { - await testChmod('0777', '=x', '0111') - await testChmod('0777', '=w', '0222') - await testChmod('0777', '=r', '0444') - await testChmod('0777', 'u=x', '0177') - await testChmod('0777', 'u=w', '0277') - await testChmod('0777', 'u=r', '0477') - await testChmod('0777', 'g=x', '0717') - await testChmod('0777', 'g=w', '0727') - await testChmod('0777', 'g=r', '0747') - await testChmod('0777', 'o=x', '0771') - await testChmod('0777', 'o=w', '0772') - await testChmod('0777', 'o=r', '0774') - await testChmod('0777', 'ug=x', '0117') - await testChmod('0777', 'ug=w', '0227') - await testChmod('0777', 'ug=r', '0447') - await testChmod('0777', 'ugo=x', '0111') - await testChmod('0777', 'ugo=w', '0222') - await testChmod('0777', 'ugo=r', '0444') - await testChmod('0777', 'a=x', '0111') - await testChmod('0777', 'a=w', '0222') - await testChmod('0777', 'a=r', '0444') - }) - - it('should update modes with multiple symbolic notation', async () => { - await testChmod('0000', 'g+x,u+w', '0210') - }) - - it('should update modes with special symbolic notation', async () => { - await testChmod('0000', 'g+s', '2000') - await testChmod('0000', 'u+s', '4000') - await testChmod('0000', '+t', '1000') - await testChmod('0000', '+s', '6000') - }) - - it('should apply special execute permissions to world', async () => { - const path = `/foo-${Math.random()}` - const sub = `${path}/sub` - const file = `${path}/sub/foo.txt` - const bin = `${path}/sub/bar` - - await mfs.mkdir(sub, { - parents: true - }) - await mfs.touch(file) - await mfs.touch(bin) - - await mfs.chmod(path, 0o644, { - recursive: true - }) - await mfs.chmod(bin, 'u+x') - - expect((await mfs.stat(path)).mode).to.equal(0o644) - expect((await mfs.stat(sub)).mode).to.equal(0o644) - expect((await mfs.stat(file)).mode).to.equal(0o644) - expect((await mfs.stat(bin)).mode).to.equal(0o744) - - await mfs.chmod(path, 'a+X', { - recursive: true - }) - - // directories should be world-executable - expect((await mfs.stat(path)).mode).to.equal(0o755) - expect((await mfs.stat(sub)).mode).to.equal(0o755) - - // files without prior execute bit should be untouched - expect((await mfs.stat(file)).mode).to.equal(0o644) - - // files with prior execute bit should now be world-executable - expect((await mfs.stat(bin)).mode).to.equal(0o755) - }) - - it('should apply special execute permissions to user', async () => { - const path = `/foo-${Math.random()}` - const sub = `${path}/sub` - const file = `${path}/sub/foo.txt` - const bin = `${path}/sub/bar` - - await mfs.mkdir(sub, { - parents: true - }) - await mfs.touch(file) - await mfs.touch(bin) - - await mfs.chmod(path, 0o644, { - recursive: true - }) - await mfs.chmod(bin, 'u+x') - - expect((await mfs.stat(path)).mode).to.equal(0o644) - expect((await mfs.stat(sub)).mode).to.equal(0o644) - expect((await mfs.stat(file)).mode).to.equal(0o644) - expect((await mfs.stat(bin)).mode).to.equal(0o744) - - await mfs.chmod(path, 'u+X', { - recursive: true - }) - - // directories should be user executable - expect((await mfs.stat(path)).mode).to.equal(0o744) - expect((await mfs.stat(sub)).mode).to.equal(0o744) - - // files without prior execute bit should be untouched - expect((await mfs.stat(file)).mode).to.equal(0o644) - - // files with prior execute bit should now be user executable - expect((await mfs.stat(bin)).mode).to.equal(0o744) - }) - - it('should apply special execute permissions to user and group', async () => { - const path = `/foo-${Math.random()}` - const sub = `${path}/sub` - const file = `${path}/sub/foo.txt` - const bin = `${path}/sub/bar` - - await mfs.mkdir(sub, { - parents: true - }) - await mfs.touch(file) - await mfs.touch(bin) - - await mfs.chmod(path, 0o644, { - recursive: true - }) - await mfs.chmod(bin, 'u+x') - - expect((await mfs.stat(path)).mode).to.equal(0o644) - expect((await mfs.stat(sub)).mode).to.equal(0o644) - expect((await mfs.stat(file)).mode).to.equal(0o644) - expect((await mfs.stat(bin)).mode).to.equal(0o744) - - await mfs.chmod(path, 'ug+X', { - recursive: true - }) - - // directories should be user and group executable - expect((await mfs.stat(path)).mode).to.equal(0o754) - expect((await mfs.stat(sub)).mode).to.equal(0o754) - - // files without prior execute bit should be untouched - expect((await mfs.stat(file)).mode).to.equal(0o644) - - // files with prior execute bit should now be user and group executable - expect((await mfs.stat(bin)).mode).to.equal(0o754) - }) - - it('should apply special execute permissions to sharded directories', async () => { - const path = `/foo-${Math.random()}` - const sub = `${path}/sub` - const file = `${path}/sub/foo.txt` - const bin = `${path}/sub/bar` - - await mfs.mkdir(sub, { - parents: true, - shardSplitThreshold: 0 - }) - await mfs.touch(file, { - shardSplitThreshold: 0 - }) - await mfs.touch(bin, { - shardSplitThreshold: 0 - }) - - await mfs.chmod(path, 0o644, { - recursive: true, - shardSplitThreshold: 0 - }) - await mfs.chmod(bin, 'u+x', { - recursive: true, - shardSplitThreshold: 0 - }) - - expect((await mfs.stat(path)).mode).to.equal(0o644) - expect((await mfs.stat(sub)).mode).to.equal(0o644) - expect((await mfs.stat(file)).mode).to.equal(0o644) - expect((await mfs.stat(bin)).mode).to.equal(0o744) - - await mfs.chmod(path, 'ug+X', { - recursive: true, - shardSplitThreshold: 0 - }) - - // directories should be user and group executable - expect((await mfs.stat(path))).to.include({ - type: 'hamt-sharded-directory', - mode: 0o754 - }) - expect((await mfs.stat(sub)).mode).to.equal(0o754) - - // files without prior execute bit should be untouched - expect((await mfs.stat(file)).mode).to.equal(0o644) - - // files with prior execute bit should now be user and group executable - expect((await mfs.stat(bin)).mode).to.equal(0o754) - }) -}) diff --git a/packages/ipfs-mfs/test/core/cp.js b/packages/ipfs-mfs/test/core/cp.js deleted file mode 100644 index a29c7ebcf1..0000000000 --- a/packages/ipfs-mfs/test/core/cp.js +++ /dev/null @@ -1,371 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const streamToBuffer = require('../helpers/stream-to-buffer') -const streamToArray = require('../helpers/stream-to-array') -const crypto = require('crypto') -const CID = require('cids') -const mh = require('multihashes') -const Block = require('ipfs-block') - -describe('cp', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('refuses to copy files without arguments', async () => { - try { - await mfs.cp() - throw new Error('No error was thrown for missing files') - } catch (err) { - expect(err.message).to.contain('Please supply at least one source') - } - }) - - it('refuses to copy files without files', async () => { - try { - await mfs.cp('/destination') - throw new Error('No error was thrown for missing files') - } catch (err) { - expect(err.message).to.contain('Please supply at least one source') - } - }) - - it('refuses to copy files without files even with options', async () => { - try { - await mfs.cp('/destination', {}) - throw new Error('No error was thrown for missing files') - } catch (err) { - expect(err.message).to.contain('Please supply at least one source') - } - }) - - it('refuses to copy a non-existent file', async () => { - try { - await mfs.cp('/i-do-not-exist', '/output') - throw new Error('No error was thrown for a non-existent file') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('refuses to copy files to a non-existent child directory', async () => { - const src1 = `/src2-${Math.random()}` - const src2 = `/src2-${Math.random()}` - const parent = `/output-${Math.random()}` - - await mfs.touch(src1) - await mfs.touch(src2) - await mfs.mkdir(parent) - await expect(mfs.cp(src1, src2, `${parent}/child`)).to.eventually.be.rejectedWith(Error) - .that.has.property('message').that.matches(/destination did not exist/) - }) - - it('refuses to copy files to an unreadable node', async () => { - const src1 = `/src2-${Math.random()}` - const parent = `/output-${Math.random()}` - - const cid = new CID(1, 'identity', mh.encode(Buffer.from('derp'), 'identity')) - await mfs.repo.blocks.put(new Block(Buffer.from('derp'), cid)) - await mfs.cp(`/ipfs/${cid}`, parent) - - await mfs.touch(src1) - await expect(mfs.cp(src1, `${parent}/child`)).to.eventually.be.rejectedWith(Error) - .that.has.property('message').that.matches(/No resolver found for codec "identity"/) - }) - - it('refuses to copy files to an exsting file', async () => { - const source = `/source-file-${Math.random()}.txt` - const destination = `/dest-file-${Math.random()}.txt` - - await mfs.write(source, crypto.randomBytes(100), { - create: true - }) - await mfs.write(destination, crypto.randomBytes(100), { - create: true - }) - - try { - await mfs.cp(source, destination) - throw new Error('No error was thrown when trying to overwrite a file') - } catch (err) { - expect(err.message).to.contain('directory already has entry by that name') - } - }) - - it('refuses to copy a file to itself', async () => { - const source = `/source-file-${Math.random()}.txt` - - await mfs.write(source, crypto.randomBytes(100), { - create: true - }) - - try { - await mfs.cp(source, source) - throw new Error('No error was thrown for a non-existent file') - } catch (err) { - expect(err.message).to.contain('directory already has entry by that name') - } - }) - - it('copies a file to new location', async () => { - const source = `/source-file-${Math.random()}.txt` - const destination = `/dest-file-${Math.random()}.txt` - const data = crypto.randomBytes(500) - - await mfs.write(source, data, { - create: true - }) - - await mfs.cp(source, destination) - - const buffer = await streamToBuffer(mfs.read(destination)) - - expect(buffer).to.deep.equal(data) - }) - - it('copies a file to a pre-existing directory', async () => { - const source = `/source-file-${Math.random()}.txt` - const directory = `/dest-directory-${Math.random()}` - const destination = `${directory}${source}` - - await mfs.write(source, crypto.randomBytes(500), { - create: true - }) - await mfs.mkdir(directory) - await mfs.cp(source, directory) - - const stats = await mfs.stat(destination) - expect(stats.size).to.equal(500) - }) - - it('copies directories', async () => { - const source = `/source-directory-${Math.random()}` - const destination = `/dest-directory-${Math.random()}` - - await mfs.mkdir(source) - await mfs.cp(source, destination) - - const stats = await mfs.stat(destination) - expect(stats.type).to.equal('directory') - }) - - it('copies directories recursively', async () => { - const directory = `/source-directory-${Math.random()}` - const subDirectory = `/source-directory-${Math.random()}` - const source = `${directory}${subDirectory}` - const destination = `/dest-directory-${Math.random()}` - - await mfs.mkdir(source, { - parents: true - }) - await mfs.cp(directory, destination) - - const stats = await mfs.stat(destination) - expect(stats.type).to.equal('directory') - - const subDirStats = await mfs.stat(`${destination}/${subDirectory}`) - expect(subDirStats.type).to.equal('directory') - }) - - it('copies multiple files to new location', async () => { - const sources = [{ - path: `/source-file-${Math.random()}.txt`, - data: crypto.randomBytes(500) - }, { - path: `/source-file-${Math.random()}.txt`, - data: crypto.randomBytes(500) - }] - const destination = `/dest-dir-${Math.random()}` - - for (const source of sources) { - await mfs.write(source.path, source.data, { - create: true - }) - } - - await mfs.cp(sources[0].path, sources[1].path, destination, { - parents: true - }) - - for (const source of sources) { - const buffer = await streamToBuffer(mfs.read(`${destination}${source.path}`)) - - expect(buffer).to.deep.equal(source.data) - } - }) - - it('copies files from ipfs paths', async () => { - const source = `/source-file-${Math.random()}.txt` - const destination = `/dest-file-${Math.random()}.txt` - - await mfs.write(source, crypto.randomBytes(100), { - create: true - }) - - const stats = await mfs.stat(source) - await mfs.cp(`/ipfs/${stats.cid}`, destination) - - const destinationStats = await mfs.stat(destination) - expect(destinationStats.size).to.equal(100) - }) - - it('copies files from deep ipfs paths', async () => { - const dir = `dir-${Math.random()}` - const file = `source-file-${Math.random()}.txt` - const source = `/${dir}/${file}` - const destination = `/dest-file-${Math.random()}.txt` - - await mfs.write(source, crypto.randomBytes(100), { - create: true, - parents: true - }) - - const stats = await mfs.stat(`/${dir}`) - await mfs.cp(`/ipfs/${stats.cid}/${file}`, destination) - - const destinationStats = await mfs.stat(destination) - expect(destinationStats.size).to.equal(100) - }) - - it('copies files to deep mfs paths and creates intermediate directories', async () => { - const source = `/source-file-${Math.random()}.txt` - const destination = `/really/deep/path/to/dest-file-${Math.random()}.txt` - - await mfs.write(source, crypto.randomBytes(100), { - create: true - }) - - await mfs.cp(source, destination, { - parents: true - }) - - const destinationStats = await mfs.stat(destination) - expect(destinationStats.size).to.equal(100) - }) - - it('fails to copy files to deep mfs paths when intermediate directories do not exist', async () => { - const source = `/source-file-${Math.random()}.txt` - const destination = `/really/deep/path-${Math.random()}/to-${Math.random()}/dest-file-${Math.random()}.txt` - - await mfs.write(source, crypto.randomBytes(100), { - create: true - }) - - try { - await mfs.cp(source, destination) - throw new Error('No error was thrown when copying to deep directory with missing intermediate directories') - } catch (err) { - expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') - } - }) - - it('copies a sharded directory to a normal directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - - const normalDir = `dir-${Math.random()}` - const normalDirPath = `/${normalDir}` - - await mfs.mkdir(normalDirPath) - - await mfs.cp(shardedDirPath, normalDirPath) - - const finalShardedDirPath = `${normalDirPath}${shardedDirPath}` - - // should still be a sharded directory - expect((await mfs.stat(finalShardedDirPath)).type).to.equal('hamt-sharded-directory') - - const files = await streamToArray(mfs.ls(finalShardedDirPath)) - - expect(files.length).to.be.ok() - }) - - it('copies a normal directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - - const normalDir = `dir-${Math.random()}` - const normalDirPath = `/${normalDir}` - - await mfs.mkdir(normalDirPath) - - await mfs.cp(normalDirPath, shardedDirPath) - - const finalDirPath = `${shardedDirPath}${normalDirPath}` - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalDirPath)).type).to.equal('directory') - }) - - it('copies a file from a normal directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - - const file = `file-${Math.random()}.txt` - const filePath = `/${file}` - const finalFilePath = `${shardedDirPath}/${file}` - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - await mfs.cp(filePath, finalFilePath) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - }) - - it('copies a file from a sharded directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const othershardedDirPath = await createShardedDirectory(mfs) - - const file = `file-${Math.random()}.txt` - const filePath = `${shardedDirPath}/${file}` - const finalFilePath = `${othershardedDirPath}/${file}` - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - await mfs.cp(filePath, finalFilePath) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(othershardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - }) - - it('copies a file from a sharded directory to a normal directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dir = `dir-${Math.random()}` - const dirPath = `/${dir}` - - const file = `file-${Math.random()}.txt` - const filePath = `${shardedDirPath}/${file}` - const finalFilePath = `${dirPath}/${file}` - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - await mfs.mkdir(dirPath) - - await mfs.cp(filePath, finalFilePath) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(dirPath)).type).to.equal('directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - }) -}) diff --git a/packages/ipfs-mfs/test/core/flush.js b/packages/ipfs-mfs/test/core/flush.js deleted file mode 100644 index 91bffd2712..0000000000 --- a/packages/ipfs-mfs/test/core/flush.js +++ /dev/null @@ -1,34 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('../helpers/create-mfs') - -describe('flush', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('flushes the root node', async () => { - const cid = await mfs.flush() - - expect(cid.toString()).to.equal((await mfs.stat('/')).cid.toString()) - }) - - it('throws a error when trying to flush non-existent directories', async () => { - try { - await mfs.flush(`/some-dir-${Math.random()}`) - throw new Error('No error was thrown') - } catch (err) { - expect(err.message).to.include('does not exist') - } - }) -}) diff --git a/packages/ipfs-mfs/test/core/ls.js b/packages/ipfs-mfs/test/core/ls.js deleted file mode 100644 index 89d512d9ac..0000000000 --- a/packages/ipfs-mfs/test/core/ls.js +++ /dev/null @@ -1,208 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const CID = require('cids') -const { - FILE_TYPES -} = require('../../src') -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const streamToArray = require('../helpers/stream-to-array') -const crypto = require('crypto') - -describe('ls', () => { - let mfs - const largeFile = crypto.randomBytes(490668) - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('lists the root directory by default', async () => { - const fileName = `small-file-${Math.random()}.txt` - const content = Buffer.from('Hello world') - - await mfs.write(`/${fileName}`, content, { - create: true - }) - - const files = await streamToArray(mfs.ls()) - - expect(files.find(file => file.name === fileName)).to.be.ok() - }) - - it('refuses to lists files with an empty path', async () => { - try { - for await (const _ of mfs.ls('')) { // eslint-disable-line no-unused-vars - // falala - } - - throw new Error('No error was thrown for an empty path') - } catch (err) { - expect(err.code).to.equal('ERR_NO_PATH') - } - }) - - it('refuses to lists files with an invalid path', async () => { - try { - for await (const _ of mfs.ls('not-valid')) { // eslint-disable-line no-unused-vars - // falala - } - - throw new Error('No error was thrown for an empty path') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PATH') - } - }) - - it('lists files in a directory', async () => { - const dirName = `dir-${Math.random()}` - const fileName = `small-file-${Math.random()}.txt` - const content = Buffer.from('Hello world') - - await mfs.write(`/${dirName}/${fileName}`, content, { - create: true, - parents: true - }) - - const files = await streamToArray(mfs.ls(`/${dirName}`)) - - expect(files.find(file => file.name === fileName)).to.be.ok() - expect(files.length).to.equal(1) - expect(files[0].name).to.equal(fileName) - expect(files[0].type).to.equal(FILE_TYPES.file) - expect(files[0].size).to.equal(content.length) - expect(CID.isCID(files[0].cid)).to.be.ok() - }) - - it('lists a file', async () => { - const fileName = `small-file-${Math.random()}.txt` - const content = Buffer.from('Hello world') - - await mfs.write(`/${fileName}`, content, { - create: true - }) - - const files = await streamToArray(mfs.ls(`/${fileName}`)) - - expect(files.length).to.equal(1) - expect(files[0].name).to.equal(fileName) - expect(files[0].type).to.equal(FILE_TYPES.file) - expect(files[0].size).to.equal(content.length) - expect(CID.isCID(files[0].cid)).to.be.ok() - }) - - it('fails to list non-existent file', async () => { - try { - for await (const _ of mfs.ls('/i-do-not-exist')) { // eslint-disable-line no-unused-vars - // falala - } - - throw new Error('No error was thrown for a non-existent file') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('lists a raw node', async () => { - const filePath = '/stat/large-file.txt' - - await mfs.write(filePath, largeFile, { - create: true, - parents: true, - rawLeaves: true - }) - - const stats = await mfs.stat(filePath) - const node = await mfs.ipld.get(stats.cid) - const child = node.Links[0] - - expect(child.Hash.codec).to.equal('raw') - - const files = await streamToArray(mfs.ls(`/ipfs/${child.Hash}`)) - - expect(files.length).to.equal(1) - expect(files[0].type).to.equal(0) // this is what go does - expect(files[0].cid.toString()).to.equal(child.Hash.toString()) - }) - - it('lists a raw node in an mfs directory', async () => { - const filePath = '/stat/large-file.txt' - - await mfs.write(filePath, largeFile, { - create: true, - parents: true, - rawLeaves: true - }) - - const stats = await mfs.stat(filePath) - const cid = stats.cid - const node = await mfs.ipld.get(cid) - const child = node.Links[0] - - expect(child.Hash.codec).to.equal('raw') - - const dir = `/dir-with-raw-${Math.random()}` - const path = `${dir}/raw-${Math.random()}` - - await mfs.mkdir(dir) - await mfs.cp(`/ipfs/${child.Hash}`, path) - - const files = await streamToArray(mfs.ls(`/ipfs/${child.Hash}`)) - - expect(files.length).to.equal(1) - expect(files[0].type).to.equal(0) // this is what go does - expect(files[0].cid.toString()).to.equal(child.Hash.toString()) - }) - - it('lists a sharded directory contents', async () => { - const shardSplitThreshold = 10 - const fileCount = 11 - const dirPath = await createShardedDirectory(mfs, shardSplitThreshold, fileCount) - - const files = await streamToArray(mfs.ls(dirPath)) - - expect(files.length).to.equal(fileCount) - - files.forEach(file => { - // should be a file - expect(file.type).to.equal(0) - }) - }) - - it('lists a file inside a sharded directory directly', async () => { - const dirPath = await createShardedDirectory(mfs) - const files = await streamToArray(mfs.ls(dirPath)) - - const filePath = `${dirPath}/${files[0].name}` - - // should be able to ls new file directly - const file = await streamToArray(mfs.ls(filePath)) - - expect(file.length).to.equal(1) - expect(file[0].name).to.equal(files[0].name) - }) - - it('lists the contents of a directory inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dirPath = `${shardedDirPath}/subdir-${Math.random()}` - const fileName = `small-file-${Math.random()}.txt` - - await mfs.mkdir(`${dirPath}`) - await mfs.write(`${dirPath}/${fileName}`, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - const files = await streamToArray(mfs.ls(dirPath)) - - expect(files.length).to.equal(1) - expect(files.filter(file => file.name === fileName)).to.be.ok() - }) -}) diff --git a/packages/ipfs-mfs/test/core/mkdir.js b/packages/ipfs-mfs/test/core/mkdir.js deleted file mode 100644 index 07a26a564a..0000000000 --- a/packages/ipfs-mfs/test/core/mkdir.js +++ /dev/null @@ -1,176 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const multihash = require('multihashes') -const createMfs = require('../helpers/create-mfs') -const cidAtPath = require('../helpers/cid-at-path') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const all = require('it-all') - -describe('mkdir', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('requires a directory', async () => { - try { - await mfs.mkdir('') - throw new Error('No error was thrown when creating an directory with an empty path') - } catch (err) { - expect(err.message).to.contain('no path given') - } - }) - - it('refuses to create a directory without a leading slash', async () => { - try { - await mfs.mkdir('foo') - throw new Error('No error was thrown when creating an directory with no leading slash') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PATH') - } - }) - - it('refuses to recreate the root directory when -p is false', async () => { - try { - await mfs.mkdir('/', { - parents: false - }) - throw new Error('No error was thrown when creating the root directory without -p') - } catch (err) { - expect(err.message).to.contain("cannot create directory '/'") - } - }) - - it('refuses to create a nested directory when -p is false', async () => { - try { - await mfs.mkdir('/foo/bar/baz', { - parents: false - }) - throw new Error('No error was thrown when creating intermediate directories without -p') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('creates a directory', async () => { - const path = '/foo' - - await mfs.mkdir(path, {}) - - const stats = await mfs.stat(path) - expect(stats.type).to.equal('directory') - - const files = await all(mfs.ls(path)) - - expect(files.length).to.equal(0) - }) - - it('refuses to create a directory that already exists', async () => { - const path = '/qux/quux/quuux' - - await mfs.mkdir(path, { - parents: true - }) - - try { - await mfs.mkdir(path, { - parents: false - }) - - throw new Error('Did not refuse to create a path that already exists') - } catch (err) { - expect(err.code).to.equal('ERR_ALREADY_EXISTS') - } - }) - - it('does not error when creating a directory that already exists and parents is true', async () => { - const path = '/qux/quux/quuux' - - await mfs.mkdir(path, { - parents: true - }) - - await mfs.mkdir(path, { - parents: true - }) - }) - - it('creates a nested directory when -p is true', async () => { - const path = '/foo/bar/baz' - - await mfs.mkdir(path, { - parents: true - }) - - const files = await all(mfs.ls(path)) - - expect(files.length).to.equal(0) - }) - - it('creates nested directories', async () => { - await mfs.mkdir('/nested-dir') - await mfs.mkdir('/nested-dir/baz') - - const files = await all(mfs.ls('/nested-dir')) - - expect(files.length).to.equal(1) - }) - - it('creates a nested directory with a different CID version to the parent', async () => { - const directory = `cid-versions-${Math.random()}` - const directoryPath = `/${directory}` - const subDirectory = `cid-versions-${Math.random()}` - const subDirectoryPath = `${directoryPath}/${subDirectory}` - - await mfs.mkdir(directoryPath, { - cidVersion: 0 - }) - - expect((await cidAtPath(directoryPath, mfs)).version).to.equal(0) - - await mfs.mkdir(subDirectoryPath, { - cidVersion: 1 - }) - - expect((await cidAtPath(subDirectoryPath, mfs)).version).to.equal(1) - }) - - it('creates a nested directory with a different hash function to the parent', async () => { - const directory = `cid-versions-${Math.random()}` - const directoryPath = `/${directory}` - const subDirectory = `cid-versions-${Math.random()}` - const subDirectoryPath = `${directoryPath}/${subDirectory}` - - await mfs.mkdir(directoryPath, { - cidVersion: 0 - }) - - expect((await cidAtPath(directoryPath, mfs)).version).to.equal(0) - - await mfs.mkdir(subDirectoryPath, { - cidVersion: 1, - hashAlg: 'sha2-512' - }) - - expect(multihash.decode((await cidAtPath(subDirectoryPath, mfs)).multihash).name).to.equal('sha2-512') - }) - - it('makes a directory inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dirPath = `${shardedDirPath}/subdir-${Math.random()}` - - await mfs.mkdir(`${dirPath}`) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(dirPath)).type).to.equal('directory') - }) -}) diff --git a/packages/ipfs-mfs/test/core/mv.js b/packages/ipfs-mfs/test/core/mv.js deleted file mode 100644 index 4f48c9bd63..0000000000 --- a/packages/ipfs-mfs/test/core/mv.js +++ /dev/null @@ -1,265 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const streamToBuffer = require('../helpers/stream-to-buffer') -const crypto = require('crypto') - -describe('mv', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('refuses to move files without arguments', async () => { - try { - await mfs.mv() - throw new Error('No error was thrown for missing files') - } catch (err) { - expect(err.message).to.contain('Please supply at least one source') - } - }) - - it('refuses to move files without enough arguments', async () => { - try { - await mfs.mv('/destination') - throw new Error('No error was thrown for missing files') - } catch (err) { - expect(err.message).to.contain('Please supply at least one source') - } - }) - - it('moves a file', async () => { - const source = `/source-file-${Math.random()}.txt` - const destination = `/dest-file-${Math.random()}.txt` - const data = crypto.randomBytes(500) - - await mfs.write(source, data, { - create: true - }) - await mfs.mv(source, destination) - - const buffer = await streamToBuffer(mfs.read(destination)) - expect(buffer).to.deep.equal(data) - - try { - await mfs.stat(source) - throw new Error('File was copied but not removed') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('moves a directory', async () => { - const source = `/source-directory-${Math.random()}` - const destination = `/dest-directory-${Math.random()}` - - await mfs.mkdir(source) - await mfs.mv(source, destination, { - recursive: true - }) - const stats = await mfs.stat(destination) - - expect(stats.type).to.equal('directory') - - try { - await mfs.stat(source) - throw new Error('Directory was copied but not removed') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('moves directories recursively', async () => { - const directory = `source-directory-${Math.random()}` - const subDirectory = `/source-directory-${Math.random()}` - const source = `/${directory}${subDirectory}` - const destination = `/dest-directory-${Math.random()}` - - await mfs.mkdir(source, { - parents: true - }) - await mfs.mv(`/${directory}`, destination, { - recursive: true - }) - - const stats = await mfs.stat(destination) - expect(stats.type).to.equal('directory') - - const subDirectoryStats = await mfs.stat(`${destination}${subDirectory}`) - expect(subDirectoryStats.type).to.equal('directory') - - try { - await mfs.stat(source) - throw new Error('Directory was copied but not removed') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('moves a sharded directory to a normal directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dirPath = `/dir-${Math.random()}` - const finalShardedDirPath = `${dirPath}${shardedDirPath}` - - await mfs.mkdir(dirPath) - await mfs.mv(shardedDirPath, dirPath) - - expect((await mfs.stat(finalShardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(dirPath)).type).to.equal('directory') - - try { - await mfs.stat(shardedDirPath) - throw new Error('Dir was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) - - it('moves a normal directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dirPath = `/dir-${Math.random()}` - const finalDirPath = `${shardedDirPath}${dirPath}` - - await mfs.mkdir(dirPath) - await mfs.mv(dirPath, shardedDirPath) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalDirPath)).type).to.equal('directory') - - try { - await mfs.stat(dirPath) - throw new Error('Dir was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) - - it('moves a sharded directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const otherShardedDirPath = await createShardedDirectory(mfs) - const finalShardedDirPath = `${shardedDirPath}${otherShardedDirPath}` - - await mfs.mv(otherShardedDirPath, shardedDirPath) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalShardedDirPath)).type).to.equal('hamt-sharded-directory') - - try { - await mfs.stat(otherShardedDirPath) - throw new Error('Sharded dir was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) - - it('moves a file from a normal directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dirPath = `/dir-${Math.random()}` - const file = `file-${Math.random()}.txt` - const filePath = `${dirPath}/${file}` - const finalFilePath = `${shardedDirPath}/${file}` - - await mfs.mkdir(dirPath) - await mfs.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { - create: true - }) - - await mfs.mv(filePath, shardedDirPath) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - - try { - await mfs.stat(filePath) - throw new Error('File was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) - - it('moves a file from a sharded directory to a normal directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dirPath = `/dir-${Math.random()}` - const file = `file-${Math.random()}.txt` - const filePath = `${shardedDirPath}/${file}` - const finalFilePath = `${dirPath}/${file}` - - await mfs.mkdir(dirPath) - await mfs.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { - create: true - }) - - await mfs.mv(filePath, dirPath) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - expect((await mfs.stat(dirPath)).type).to.equal('directory') - - try { - await mfs.stat(filePath) - throw new Error('File was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) - - it('moves a file from a sharded directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const otherShardedDirPath = await createShardedDirectory(mfs) - const file = `file-${Math.random()}.txt` - const filePath = `${shardedDirPath}/${file}` - const finalFilePath = `${otherShardedDirPath}/${file}` - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { - create: true - }) - - await mfs.mv(filePath, otherShardedDirPath) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - expect((await mfs.stat(otherShardedDirPath)).type).to.equal('hamt-sharded-directory') - - try { - await mfs.stat(filePath) - throw new Error('File was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) - - it('moves a file from a sub-shard of a sharded directory to a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs, 10, 75) - const otherShardedDirPath = await createShardedDirectory(mfs) - const file = 'file-1a.txt' - const filePath = `${shardedDirPath}/${file}` - const finalFilePath = `${otherShardedDirPath}/${file}` - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3, 4]), { - create: true - }) - - await mfs.mv(filePath, otherShardedDirPath) - - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(finalFilePath)).type).to.equal('file') - expect((await mfs.stat(otherShardedDirPath)).type).to.equal('hamt-sharded-directory') - - try { - await mfs.stat(filePath) - throw new Error('File was not removed') - } catch (error) { - expect(error.message).to.contain('does not exist') - } - }) -}) diff --git a/packages/ipfs-mfs/test/core/read.js b/packages/ipfs-mfs/test/core/read.js deleted file mode 100644 index 36996a9450..0000000000 --- a/packages/ipfs-mfs/test/core/read.js +++ /dev/null @@ -1,153 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const crypto = require('crypto') -const streamToBuffer = require('../helpers/stream-to-buffer') - -describe('read', () => { - let mfs - const smallFile = crypto.randomBytes(13) - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('reads a small file', async () => { - const filePath = '/small-file.txt' - - await mfs.write(filePath, smallFile, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(filePath)) - - expect(buffer).to.deep.equal(smallFile) - }) - - it('reads a file with an offset', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset - })) - - expect(buffer).to.deep.equal(data.slice(offset)) - }) - - it('reads a file with a length', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - length - })) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - - it('reads a file with a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - count: length - })) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - - it('reads a file with an offset and a length', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset, - length - })) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - - it('reads a file with an offset and a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset, - count: length - })) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - - it('refuses to read a directory', async () => { - const path = '/' - - try { - await streamToBuffer(mfs.read(path)) - throw new Error('Should have errored on trying to read a directory') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FILE') - } - }) - - it('refuses to read a non-existent file', async () => { - try { - await streamToBuffer(mfs.read(`/file-${Math.random()}.txt`)) - throw new Error('Should have errored on non-existent file') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('reads file from inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const filePath = `${shardedDirPath}/file-${Math.random()}.txt` - const content = Buffer.from([0, 1, 2, 3, 4]) - - await mfs.write(filePath, content, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(filePath)) - - expect(buffer).to.deep.equal(content) - }) -}) diff --git a/packages/ipfs-mfs/test/core/rm.js b/packages/ipfs-mfs/test/core/rm.js deleted file mode 100644 index 18f30c2bfc..0000000000 --- a/packages/ipfs-mfs/test/core/rm.js +++ /dev/null @@ -1,332 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const createTwoShards = require('../helpers/create-two-shards') -const crypto = require('crypto') -const { - FILE_SEPARATOR -} = require('../../src/core/utils/constants') - -describe('rm', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('refuses to remove files without arguments', async () => { - try { - await mfs.rm() - throw new Error('No error was thrown for missing paths') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PARAMS') - } - }) - - it('refuses to remove the root path', async () => { - try { - await mfs.rm(FILE_SEPARATOR) - throw new Error('No error was thrown for missing paths') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PARAMS') - } - }) - - it('refuses to remove a directory without the recursive flag', async () => { - const path = `/directory-${Math.random()}` - - await mfs.mkdir(path) - - try { - await mfs.rm(path) - throw new Error('No error was thrown for missing recursive flag') - } catch (err) { - expect(err.code).to.equal('ERR_WAS_DIR') - } - }) - - it('refuses to remove a non-existent file', async () => { - try { - await mfs.rm(`/file-${Math.random()}`) - throw new Error('No error was thrown for non-existent file') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('removes a file', async () => { - const file = `/some-file-${Math.random()}.txt` - - await mfs.write(file, crypto.randomBytes(100), { - create: true, - parents: true - }) - - await mfs.rm(file, { - recursive: true - }) - - try { - await mfs.stat(file) - throw new Error('File was not removed') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('removes multiple files', async () => { - const file1 = `/some-file-${Math.random()}.txt` - const file2 = `/some-file-${Math.random()}.txt` - - await mfs.write(file1, crypto.randomBytes(100), { - create: true, - parents: true - }) - await mfs.write(file2, crypto.randomBytes(100), { - create: true, - parents: true - }) - await mfs.rm(file1, file2, { - recursive: true - }) - - try { - await mfs.stat(file1) - throw new Error('File #1 was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - - try { - await mfs.stat(file2) - throw new Error('File #2 was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('removes a directory', async () => { - const directory = `/directory-${Math.random()}` - - await mfs.mkdir(directory) - await mfs.rm(directory, { - recursive: true - }) - - try { - await mfs.stat(directory) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('recursively removes a directory', async () => { - const directory = `/directory-${Math.random()}` - const subdirectory = `/directory-${Math.random()}` - const path = `${directory}${subdirectory}` - - await mfs.mkdir(path, { - parents: true - }) - await mfs.rm(directory, { - recursive: true - }) - - try { - await mfs.stat(path) - throw new Error('File was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - - try { - await mfs.stat(directory) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('recursively removes a directory with files in', async () => { - const directory = `directory-${Math.random()}` - const file = `/${directory}/some-file-${Math.random()}.txt` - - await mfs.write(file, crypto.randomBytes(100), { - create: true, - parents: true - }) - await mfs.rm(`/${directory}`, { - recursive: true - }) - - try { - await mfs.stat(file) - throw new Error('File was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - - try { - await mfs.stat(`/${directory}`) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('recursively removes a sharded directory inside a normal directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const dir = `dir-${Math.random()}` - const dirPath = `/${dir}` - - await mfs.mkdir(dirPath) - - await mfs.mv(shardedDirPath, dirPath) - - const finalShardedDirPath = `${dirPath}${shardedDirPath}` - - expect((await mfs.stat(finalShardedDirPath)).type).to.equal('hamt-sharded-directory') - - await mfs.rm(dirPath, { - recursive: true - }) - - try { - await mfs.stat(dirPath) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - - try { - await mfs.stat(shardedDirPath) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('recursively removes a sharded directory inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const otherDirPath = await createShardedDirectory(mfs) - - await mfs.mv(shardedDirPath, otherDirPath) - - const finalShardedDirPath = `${otherDirPath}${shardedDirPath}` - - expect((await mfs.stat(finalShardedDirPath)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(otherDirPath)).type).to.equal('hamt-sharded-directory') - - await mfs.rm(otherDirPath, { - recursive: true - }) - - try { - await mfs.stat(otherDirPath) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - - try { - await mfs.stat(finalShardedDirPath) - throw new Error('Directory was not removed') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('results in the same hash as a sharded directory created by the importer when removing a file', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 15) - - await mfs.cp(`/ipfs/${dirWithAllFiles}`, dirPath) - - await mfs.rm(nextFile.path) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when removing a subshard', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 31) - - await mfs.cp(`/ipfs/${dirWithAllFiles}`, dirPath) - - await mfs.rm(nextFile.path) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when removing a file from a subshard of a subshard', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 2187) - - await mfs.cp(`/ipfs/${dirWithAllFiles}`, dirPath) - - await mfs.rm(nextFile.path) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when removing a subshard of a subshard', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 139) - - await mfs.cp(`/ipfs/${dirWithAllFiles}`, dirPath) - - await mfs.rm(nextFile.path) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithSomeFiles.toString()) - }) -}) diff --git a/packages/ipfs-mfs/test/core/stat.js b/packages/ipfs-mfs/test/core/stat.js deleted file mode 100644 index 790ad4e3b2..0000000000 --- a/packages/ipfs-mfs/test/core/stat.js +++ /dev/null @@ -1,195 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const crypto = require('crypto') -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const mc = require('multicodec') -const CID = require('cids') -const mh = require('multihashes') -const Block = require('ipfs-block') - -describe('stat', () => { - let mfs - const smallFile = crypto.randomBytes(13) - const largeFile = crypto.randomBytes(490668) - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('refuses to stat files with an empty path', async () => { - try { - await mfs.stat('') - throw new Error('No error was thrown for an empty path') - } catch (err) { - expect(err.message).to.contain('paths must not be empty') - } - }) - - it('refuses to lists files with an invalid path', async () => { - try { - await mfs.stat('not-valid') - throw new Error('No error was thrown for an empty path') - } catch (err) { - expect(err.message).to.contain('paths must start with a leading /') - } - }) - - it('fails to stat non-existent file', async () => { - try { - await mfs.stat('/i-do-not-exist') - throw new Error('No error was thrown for a non-existent file') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('stats an empty directory', async () => { - const path = `/directory-${Math.random()}` - - await mfs.mkdir(path) - - const stats = await mfs.stat(path) - expect(stats.size).to.equal(0) - expect(stats.cumulativeSize).to.equal(4) - expect(stats.blocks).to.equal(0) - expect(stats.type).to.equal('directory') - }) - - it.skip('computes how much of the DAG is local', async () => { - - }) - - it('stats a small file', async () => { - const filePath = '/stat/small-file.txt' - - await mfs.write(filePath, smallFile, { - create: true, - parents: true - }) - - const stats = await mfs.stat(filePath) - expect(stats.size).to.equal(smallFile.length) - expect(stats.cumulativeSize).to.equal(71) - expect(stats.blocks).to.equal(1) - expect(stats.type).to.equal('file') - }) - - it('stats a large file', async () => { - const filePath = '/stat/large-file.txt' - - await mfs.write(filePath, largeFile, { - create: true, - parents: true - }) - - const stats = await mfs.stat(filePath) - expect(stats.size).to.equal(largeFile.length) - expect(stats.cumulativeSize).to.equal(490800) - expect(stats.blocks).to.equal(2) - expect(stats.type).to.equal('file') - }) - - it('stats a raw node', async () => { - const filePath = '/stat/large-file.txt' - - await mfs.write(filePath, largeFile, { - create: true, - parents: true, - rawLeaves: true - }) - - const stats = await mfs.stat(filePath) - const node = await mfs.ipld.get(stats.cid) - const child = node.Links[0] - - expect(child.Hash.codec).to.equal('raw') - - const rawNodeStats = await mfs.stat(`/ipfs/${child.Hash}`) - - expect(rawNodeStats.cid.toString()).to.equal(child.Hash.toString()) - expect(rawNodeStats.type).to.equal('file') // this is what go does - }) - - it('stats a raw node in an mfs directory', async () => { - const filePath = '/stat/large-file.txt' - - await mfs.write(filePath, largeFile, { - create: true, - parents: true, - rawLeaves: true - }) - - const stats = await mfs.stat(filePath) - const node = await mfs.ipld.get(stats.cid) - const child = node.Links[0] - - expect(child.Hash.codec).to.equal('raw') - - const dir = `/dir-with-raw-${Math.random()}` - const path = `${dir}/raw-${Math.random()}` - - await mfs.mkdir(dir) - await mfs.cp(`/ipfs/${child.Hash}`, path) - - const rawNodeStats = await mfs.stat(path) - - expect(rawNodeStats.cid.toString()).to.equal(child.Hash.toString()) - expect(rawNodeStats.type).to.equal('file') // this is what go does - }) - - it('stats a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - - const stats = await mfs.stat(`${shardedDirPath}`) - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(stats.size).to.equal(0) - }) - - it('stats a file inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const files = [] - - for await (const file of mfs.ls(`${shardedDirPath}`)) { - files.push(file) - } - - const stats = await mfs.stat(`${shardedDirPath}/${files[0].name}`) - - expect(stats.type).to.equal('file') - expect(stats.size).to.equal(7) - }) - - it('stats a dag-cbor node', async () => { - const path = '/cbor.node' - const node = {} - const cid = await mfs.ipld.put(node, mc.getNumber('dag-cbor')) - await mfs.cp(`/ipfs/${cid}`, path) - - const stats = await mfs.stat(path) - - expect(stats.cid.toString()).to.equal(cid.toString()) - }) - - it('stats an identity CID', async () => { - const data = Buffer.from('derp') - const path = '/identity.node' - const cid = new CID(1, 'identity', mh.encode(data, 'identity')) - await mfs.repo.blocks.put(new Block(data, cid)) - await mfs.cp(`/ipfs/${cid}`, path) - - const stats = await mfs.stat(path) - - expect(stats.cid.toString()).to.equal(cid.toString()) - expect(stats).to.have.property('size', data.length) - }) -}) diff --git a/packages/ipfs-mfs/test/core/touch.js b/packages/ipfs-mfs/test/core/touch.js deleted file mode 100644 index 2b15e7521e..0000000000 --- a/packages/ipfs-mfs/test/core/touch.js +++ /dev/null @@ -1,84 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('../helpers/chai') -const createMfs = require('../helpers/create-mfs') -const streamToBuffer = require('../helpers/stream-to-buffer') -const delay = require('delay') - -describe('touch', () => { - let mfs - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('should update the mtime for a file', async () => { - const path = `/foo-${Math.random()}` - - await mfs.write(path, Buffer.from('Hello world'), { - create: true, - mtime: new Date() - }) - const originalMtime = (await mfs.stat(path)).mtime - await delay(1000) - await mfs.touch(path, { - flush: true - }) - - const updatedMtime = (await mfs.stat(path)).mtime - expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) - }) - - it('should update the mtime for a directory', async () => { - const path = `/foo-${Math.random()}` - - await mfs.mkdir(path, { - mtime: new Date() - }) - const originalMtime = (await mfs.stat(path)).mtime - await delay(1000) - await mfs.touch(path, { - flush: true - }) - - const updatedMtime = (await mfs.stat(path)).mtime - expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) - }) - - it('should update the mtime for a hamt-sharded-directory', async () => { - const path = `/foo-${Math.random()}` - - await mfs.mkdir(path, { - mtime: new Date() - }) - await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { - create: true, - shardSplitThreshold: 0 - }) - const originalMtime = (await mfs.stat(path)).mtime - await delay(1000) - await mfs.touch(path, { - flush: true - }) - - const updatedMtime = (await mfs.stat(path)).mtime - expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) - }) - - it('should create an empty file', async () => { - const path = `/foo-${Math.random()}` - - await mfs.touch(path, { - flush: true - }) - - const buffer = await streamToBuffer(mfs.read(path)) - - expect(buffer).to.deep.equal(Buffer.from([])) - }) -}) diff --git a/packages/ipfs-mfs/test/core/write.js b/packages/ipfs-mfs/test/core/write.js deleted file mode 100644 index a30cb950a4..0000000000 --- a/packages/ipfs-mfs/test/core/write.js +++ /dev/null @@ -1,926 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const isNode = require('detect-node') -const multihash = require('multihashes') -const util = require('util') -const createMfs = require('../helpers/create-mfs') -const cidAtPath = require('../helpers/cid-at-path') -const traverseLeafNodes = require('../helpers/traverse-leaf-nodes') -const createShard = require('../helpers/create-shard') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const createTwoShards = require('../helpers/create-two-shards') -const crypto = require('crypto') -const all = require('it-all') - -let fs, tempWrite - -if (isNode) { - fs = require('fs') - tempWrite = require('temp-write') -} - -describe('write', () => { - let mfs - const smallFile = crypto.randomBytes(13) - const largeFile = crypto.randomBytes(490668) - - const runTest = (fn) => { - const iterations = 5 - const files = [{ - type: 'Small file', - path: `/small-file-${Math.random()}.txt`, - content: smallFile, - contentSize: smallFile.length - }, { - type: 'Large file', - path: `/large-file-${Math.random()}.jpg`, - content: largeFile, - contentSize: largeFile.length - }, { - type: 'Really large file', - path: `/really-large-file-${Math.random()}.jpg`, - content: { - [Symbol.asyncIterator]: function * () { - for (let i = 0; i < iterations; i++) { - yield largeFile - } - } - }, - contentSize: largeFile.length * iterations - }] - - files.forEach((file) => { - fn(file) - }) - } - - before(async () => { - mfs = await createMfs() - }) - - after(async () => { - await mfs.repo.close() - }) - - it('explodes if it cannot convert content to a pull stream', async () => { - try { - await mfs.write('/foo', -1, { - create: true - }) - throw new Error('Did not fail to convert -1 into a pull stream source') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PARAMS') - } - }) - - it('explodes if given an invalid path', async () => { - try { - await mfs.write('foo', null, { - create: true - }) - throw new Error('Did not object to invalid paths') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PATH') - } - }) - - it('explodes if given a negtive offset', async () => { - try { - await mfs.write('/foo.txt', Buffer.from('foo'), { - offset: -1 - }) - throw new Error('Did not object to negative write offset') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PARAMS') - } - }) - - it('explodes if given a negative length', async () => { - try { - await mfs.write('/foo.txt', Buffer.from('foo'), { - length: -1 - }) - throw new Error('Did not object to negative byte count') - } catch (err) { - expect(err.code).to.equal('ERR_INVALID_PARAMS') - } - }) - - it('creates a zero length file when passed a zero length', async () => { - await mfs.write('/foo.txt', Buffer.from('foo'), { - length: 0, - create: true - }) - - const files = await all(mfs.ls('/')) - - expect(files.length).to.equal(1) - expect(files[0].name).to.equal('foo.txt') - expect(files[0].size).to.equal(0) - }) - - it('writes a small file using a buffer', async () => { - const filePath = `/small-file-${Math.random()}.txt` - - await mfs.write(filePath, smallFile, { - create: true - }) - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(smallFile.length) - }) - - it('writes a small file using a path (Node only)', async function () { - if (!isNode) { - return this.skip() - } - - const filePath = `/small-file-${Math.random()}.txt` - const pathToFile = await tempWrite(smallFile) - const fsStats = await util.promisify(fs.stat)(pathToFile) - - await mfs.write(filePath, pathToFile, { - create: true - }) - - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(fsStats.size) - }) - - it('writes part of a small file using a path (Node only)', async function () { - if (!isNode) { - return this.skip() - } - - const filePath = `/small-file-${Math.random()}.txt` - const pathToFile = await tempWrite(smallFile) - - await mfs.write(filePath, pathToFile, { - create: true, - length: 2 - }) - - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(2) - }) - - it('writes a small file using a Node stream (Node only)', async function () { - if (!isNode) { - return this.skip() - } - - const filePath = `/small-file-${Math.random()}.txt` - const pathToFile = await tempWrite(smallFile) - const stream = fs.createReadStream(pathToFile) - - await mfs.write(filePath, stream, { - create: true - }) - - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(smallFile.length) - }) - - it('writes a small file using an HTML5 Blob (Browser only)', async function () { - if (!global.Blob) { - return this.skip() - } - - const filePath = `/small-file-${Math.random()}.txt` - const blob = new global.Blob([smallFile.buffer.slice(smallFile.byteOffset, smallFile.byteOffset + smallFile.byteLength)]) - - await mfs.write(filePath, blob, { - create: true - }) - - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(smallFile.length) - }) - - it('writes a small file with an escaped slash in the title', async () => { - const filePath = `/small-\\/file-${Math.random()}.txt` - - await mfs.write(filePath, smallFile, { - create: true - }) - - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(smallFile.length) - - try { - await mfs.stat('/small-\\') - throw new Error('Created path section before escape as directory') - } catch (err) { - expect(err.message).to.include('does not exist') - } - }) - - it('writes a deeply nested small file', async () => { - const filePath = '/foo/bar/baz/qux/quux/garply/small-file.txt' - - await mfs.write(filePath, smallFile, { - create: true, - parents: true - }) - - const stats = await mfs.stat(filePath) - - expect(stats.size).to.equal(smallFile.length) - }) - - it('refuses to write to a file in a folder that does not exist', async () => { - const filePath = `/${Math.random()}/small-file.txt` - - try { - await mfs.write(filePath, smallFile, { - create: true - }) - throw new Error('Writing a file to a non-existent folder without the --parents flag should have failed') - } catch (err) { - expect(err.message).to.contain('does not exist') - } - }) - - it('refuses to write to a file that does not exist', async () => { - const filePath = `/small-file-${Math.random()}.txt` - - try { - await mfs.write(filePath, smallFile) - throw new Error('Writing a file to a non-existent file without the --create flag should have failed') - } catch (err) { - expect(err.message).to.contain('file does not exist') - } - }) - - it('refuses to write to a path that has a file in it', async () => { - const filePath = `/small-file-${Math.random()}.txt` - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - try { - await mfs.write(`${filePath}/other-file-${Math.random()}.txt`, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - throw new Error('Writing a path with a file in it should have failed') - } catch (err) { - expect(err.message).to.contain('Not a directory') - } - }) - - runTest(({ type, path, content }) => { - it(`limits how many bytes to write to a file (${type})`, async () => { - await mfs.write(path, content, { - create: true, - parents: true, - length: 2 - }) - - const buffer = Buffer.concat(await all(mfs.read(path))) - - expect(buffer.length).to.equal(2) - }) - }) - - runTest(({ type, path, content, contentSize }) => { - it(`overwrites start of a file without truncating (${type})`, async () => { - const newContent = Buffer.from('Goodbye world') - - await mfs.write(path, content, { - create: true - }) - - expect((await mfs.stat(path)).size).to.equal(contentSize) - - await mfs.write(path, newContent) - - const stats = await mfs.stat(path) - expect(stats.size).to.equal(contentSize) - - const buffer = Buffer.concat(await all(mfs.read(path, { - offset: 0, - length: newContent.length - }))) - - expect(buffer).to.deep.equal(newContent) - }) - }) - - runTest(({ type, path, content, contentSize }) => { - it(`pads the start of a new file when an offset is specified (${type})`, async () => { - const offset = 10 - - await mfs.write(path, content, { - offset, - create: true - }) - - const stats = await mfs.stat(path) - expect(stats.size).to.equal(offset + contentSize) - - const buffer = Buffer.concat(await all(mfs.read(path, { - offset: 0, - length: offset - }))) - - expect(buffer).to.deep.equal(Buffer.alloc(offset, 0)) - }) - }) - - runTest(({ type, path, content, contentSize }) => { - it(`expands a file when an offset is specified (${type})`, async () => { - const offset = contentSize - 1 - const newContent = Buffer.from('Oh hai!') - - await mfs.write(path, content, { - create: true - }) - - await mfs.write(path, newContent, { - offset - }) - - const stats = await mfs.stat(path) - expect(stats.size).to.equal(contentSize + newContent.length - 1) - - const buffer = Buffer.concat(await all(mfs.read(path, { - offset: offset - }))) - - expect(buffer).to.deep.equal(newContent) - }) - }) - - runTest(({ type, path, content, contentSize }) => { - it(`expands a file when an offset is specified and the offset is longer than the file (${type})`, async () => { - const offset = contentSize + 5 - const newContent = Buffer.from('Oh hai!') - - await mfs.write(path, content, { - create: true - }) - await mfs.write(path, newContent, { - offset - }) - - const stats = await mfs.stat(path) - expect(stats.size).to.equal(newContent.length + offset) - - const buffer = Buffer.concat(await all(mfs.read(path))) - - if (content[Symbol.asyncIterator]) { - content = Buffer.concat(await all(content)) - } - - expect(buffer).to.deep.equal(Buffer.concat([content, Buffer.from([0, 0, 0, 0, 0]), newContent])) - }) - }) - - runTest(({ type, path, content }) => { - it(`truncates a file after writing (${type})`, async () => { - const newContent = Buffer.from('Oh hai!') - - await mfs.write(path, content, { - create: true - }) - await mfs.write(path, newContent, { - truncate: true - }) - - const stats = await mfs.stat(path) - expect(stats.size).to.equal(newContent.length) - - const buffer = Buffer.concat(await all(mfs.read(path))) - - expect(buffer).to.deep.equal(newContent) - }) - }) - - runTest(({ type, path, content }) => { - it(`writes a file with raw blocks for newly created leaf nodes (${type})`, async () => { - await mfs.write(path, content, { - create: true, - rawLeaves: true - }) - - const stats = await mfs.stat(path) - - for await (const { cid } of traverseLeafNodes(mfs, stats.cid)) { - expect(cid.codec).to.equal('raw') - } - }) - }) - - it('supports concurrent writes', async function () { - const files = [] - - for (let i = 0; i < 10; i++) { - files.push({ - name: `source-file-${Math.random()}.txt`, - source: crypto.randomBytes(100) - }) - } - - await Promise.all( - files.map(({ name, source }) => mfs.write(`/concurrent/${name}`, source, { - create: true, - parents: true - })) - ) - - const listing = await all(mfs.ls('/concurrent')) - expect(listing.length).to.equal(files.length) - - listing.forEach(listedFile => { - expect(files.find(file => file.name === listedFile.name)) - }) - }) - - it('rewrites really big files', async function () { - const initialStream = crypto.randomBytes(1024 * 300) - const newDataStream = crypto.randomBytes(1024 * 300) - - const fileName = `/rewrite/file-${Math.random()}.txt` - - await mfs.write(fileName, initialStream, { - create: true, - parents: true - }) - - await mfs.write(fileName, newDataStream, { - offset: 0 - }) - - const actualBytes = Buffer.concat(await all(mfs.read(fileName))) - - for (var i = 0; i < newDataStream.length; i++) { - if (newDataStream[i] !== actualBytes[i]) { - if (initialStream[i] === actualBytes[i]) { - throw new Error(`Bytes at index ${i} were not overwritten - expected ${newDataStream[i]} actual ${initialStream[i]}`) - } - - throw new Error(`Bytes at index ${i} not equal - expected ${newDataStream[i]} actual ${actualBytes[i]}`) - } - } - - expect(actualBytes).to.deep.equal(newDataStream) - }) - - it('shards a large directory when writing too many links to it', async () => { - const shardSplitThreshold = 10 - const dirPath = `/sharded-dir-${Math.random()}` - const newFile = `file-${Math.random()}` - const newFilePath = `/${dirPath}/${newFile}` - - await mfs.mkdir(dirPath, { - shardSplitThreshold - }) - - for (let i = 0; i < shardSplitThreshold; i++) { - await mfs.write(`/${dirPath}/file-${Math.random()}`, Buffer.from([0, 1, 2, 3]), { - create: true, - shardSplitThreshold - }) - } - - expect((await mfs.stat(dirPath)).type).to.equal('directory') - - await mfs.write(newFilePath, Buffer.from([0, 1, 2, 3]), { - create: true, - shardSplitThreshold - }) - - expect((await mfs.stat(dirPath)).type).to.equal('hamt-sharded-directory') - - const files = await all(mfs.ls(dirPath, { - long: true - })) - - // new file should be in directory - expect(files.filter(file => file.name === newFile).pop()).to.be.ok() - }) - - it('writes a file to an already sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - - const newFile = `file-${Math.random()}` - const newFilePath = `${shardedDirPath}/${newFile}` - - await mfs.write(newFilePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - - const files = await all(mfs.ls(shardedDirPath, { - long: true - })) - - // new file should be in the directory - expect(files.filter(file => file.name === newFile).pop()).to.be.ok() - - // should be able to ls new file directly - expect(await all(mfs.ls(newFilePath, { - long: true - }))).to.not.be.empty() - }) - - it('overwrites a file in a sharded directory when positions do not match', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const newFile = 'file-0.6944395883502592' - const newFilePath = `${shardedDirPath}/${newFile}` - const newContent = Buffer.from([3, 2, 1, 0]) - - await mfs.write(newFilePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - - // overwrite the file - await mfs.write(newFilePath, newContent, { - create: true - }) - - // read the file back - const buffer = Buffer.concat(await all(mfs.read(newFilePath))) - - expect(buffer).to.deep.equal(newContent) - - // should be able to ls new file directly - expect(await all(mfs.ls(newFilePath, { - long: true - }))).to.not.be.empty() - }) - - it('overwrites file in a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const newFile = `file-${Math.random()}` - const newFilePath = `${shardedDirPath}/${newFile}` - const newContent = Buffer.from([3, 2, 1, 0]) - - await mfs.write(newFilePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - - // overwrite the file - await mfs.write(newFilePath, newContent, { - create: true - }) - - // read the file back - const buffer = Buffer.concat(await all(mfs.read(newFilePath))) - - expect(buffer).to.deep.equal(newContent) - - // should be able to ls new file directly - expect(await all(mfs.ls(newFilePath, { - long: true - }))).to.not.be.empty() - }) - - it('overwrites a file in a subshard of a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs, 10, 75) - const newFile = 'file-1a.txt' - const newFilePath = `${shardedDirPath}/${newFile}` - const newContent = Buffer.from([3, 2, 1, 0]) - - await mfs.write(newFilePath, Buffer.from([0, 1, 2, 3]), { - create: true - }) - - // should still be a sharded directory - expect((await mfs.stat(shardedDirPath)).type).to.equal('hamt-sharded-directory') - - // overwrite the file - await mfs.write(newFilePath, newContent, { - create: true - }) - - // read the file back - const buffer = Buffer.concat(await all(mfs.read(newFilePath))) - - expect(buffer).to.deep.equal(newContent) - - // should be able to ls new file directly - expect(await all(mfs.ls(newFilePath, { - long: true - }))).to.not.be.empty() - }) - - it('writes a file with a different CID version to the parent', async () => { - const directory = `cid-versions-${Math.random()}` - const directoryPath = `/${directory}` - const fileName = `file-${Math.random()}.txt` - const filePath = `${directoryPath}/${fileName}` - const expectedBytes = Buffer.from([0, 1, 2, 3]) - - await mfs.mkdir(directoryPath, { - cidVersion: 0 - }) - - expect((await cidAtPath(directoryPath, mfs)).version).to.equal(0) - - await mfs.write(filePath, expectedBytes, { - create: true, - cidVersion: 1 - }) - - expect((await cidAtPath(filePath, mfs)).version).to.equal(1) - - const actualBytes = Buffer.concat(await all(mfs.read(filePath))) - - expect(actualBytes).to.deep.equal(expectedBytes) - }) - - it('overwrites a file with a different CID version', async () => { - const directory = `cid-versions-${Math.random()}` - const directoryPath = `/${directory}` - const fileName = `file-${Math.random()}.txt` - const filePath = `${directoryPath}/${fileName}` - const expectedBytes = Buffer.from([0, 1, 2, 3]) - - await mfs.mkdir(directoryPath, { - cidVersion: 0 - }) - - expect((await cidAtPath(directoryPath, mfs)).version).to.equal(0) - - await mfs.write(filePath, Buffer.from([5, 6]), { - create: true, - cidVersion: 0 - }) - - expect((await cidAtPath(filePath, mfs)).version).to.equal(0) - - await mfs.write(filePath, expectedBytes, { - cidVersion: 1 - }) - - expect((await cidAtPath(filePath, mfs)).version).to.equal(1) - - const actualBytes = Buffer.concat(await all(mfs.read(filePath))) - - expect(actualBytes).to.deep.equal(expectedBytes) - }) - - it('partially overwrites a file with a different CID version', async () => { - const directory = `cid-versions-${Math.random()}` - const directoryPath = `/${directory}` - const fileName = `file-${Math.random()}.txt` - const filePath = `${directoryPath}/${fileName}` - - await mfs.mkdir(directoryPath, { - cidVersion: 0 - }) - - expect((await cidAtPath(directoryPath, mfs)).version).to.equal(0) - - await mfs.write(filePath, Buffer.from([5, 6, 7, 8, 9, 10, 11]), { - create: true, - cidVersion: 0 - }) - - expect((await cidAtPath(filePath, mfs)).version).to.equal(0) - - await mfs.write(filePath, Buffer.from([0, 1, 2, 3]), { - cidVersion: 1, - offset: 1 - }) - - expect((await cidAtPath(filePath, mfs)).version).to.equal(1) - - const actualBytes = Buffer.concat(await all(mfs.read(filePath))) - - expect(actualBytes).to.deep.equal(Buffer.from([5, 0, 1, 2, 3, 10, 11])) - }) - - it('writes a file with a different hash function to the parent', async () => { - const directory = `cid-versions-${Math.random()}` - const directoryPath = `/${directory}` - const fileName = `file-${Math.random()}.txt` - const filePath = `${directoryPath}/${fileName}` - const expectedBytes = Buffer.from([0, 1, 2, 3]) - - await mfs.mkdir(directoryPath, { - cidVersion: 0 - }) - - expect((await cidAtPath(directoryPath, mfs)).version).to.equal(0) - - await mfs.write(filePath, expectedBytes, { - create: true, - cidVersion: 1, - hashAlg: 'sha2-512' - }) - - expect(multihash.decode((await cidAtPath(filePath, mfs)).multihash).name).to.equal('sha2-512') - - const actualBytes = Buffer.concat(await all(mfs.read(filePath))) - - expect(actualBytes).to.deep.equal(expectedBytes) - }) - - it('results in the same hash as a sharded directory created by the importer when adding a new file', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 75) - - await mfs.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) - - await mfs.write(nextFile.path, nextFile.content, { - create: true - }) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithAllFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when creating a new subshard', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 100) - - await mfs.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) - - await mfs.write(nextFile.path, nextFile.content, { - create: true - }) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(updatedDirCid.toString()).to.deep.equal(dirWithAllFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 82) - - await mfs.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) - - await mfs.write(nextFile.path, nextFile.content, { - create: true - }) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithAllFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard of a subshard', async function () { - this.timeout(60000) - - const { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } = await createTwoShards(mfs.ipld, 2187) - - await mfs.cp(`/ipfs/${dirWithSomeFiles}`, dirPath) - - await mfs.write(nextFile.path, nextFile.content, { - create: true - }) - - const stats = await mfs.stat(dirPath) - const updatedDirCid = stats.cid - - expect(stats.type).to.equal('hamt-sharded-directory') - expect(updatedDirCid.toString()).to.deep.equal(dirWithAllFiles.toString()) - }) - - it('results in the same hash as a sharded directory created by the importer when causing a subshard of a subshard to be created', async function () { - this.timeout(60000) - - const dir = `/some-dir-${Math.random()}` - - const nodeGrContent = Buffer.from([0, 1, 2, 3, 4]) - const superModuleContent = Buffer.from([5, 6, 7, 8, 9]) - - const dirCid = await createShard(mfs.ipld, [{ - path: `${dir}/node-gr`, - content: nodeGrContent - }, { - path: `${dir}/yanvoidmodule`, - content: crypto.randomBytes(5) - }, { - path: `${dir}/methodify`, - content: crypto.randomBytes(5) - }, { - path: `${dir}/fis-msprd-style-loader_0_13_1`, - content: crypto.randomBytes(5) - }, { - path: `${dir}/js-form`, - content: crypto.randomBytes(5) - }, { - path: `${dir}/vivanov-sliceart`, - content: crypto.randomBytes(5) - }], 1) - - await mfs.cp(`/ipfs/${dirCid}`, dir) - - await mfs.write(`${dir}/supermodule_test`, superModuleContent, { - create: true - }) - - await mfs.stat(`${dir}/supermodule_test`) - await mfs.stat(`${dir}/node-gr`) - - expect(Buffer.concat(await all(mfs.read(`${dir}/node-gr`)))).to.deep.equal(nodeGrContent) - expect(Buffer.concat(await all(mfs.read(`${dir}/supermodule_test`)))).to.deep.equal(superModuleContent) - - await mfs.rm(`${dir}/supermodule_test`) - - try { - await mfs.stat(`${dir}/supermodule_test`) - } catch (err) { - expect(err.message).to.contain('not exist') - } - }) - - it('adds files that cause sub-sub-shards to be created', async function () { - // this.timeout(60000) - - const dir = `/updated-dir-${Math.random()}` - const buf = Buffer.from([0, 1, 2, 3, 4]) - - const dirCid = await createShard(mfs.ipld, [{ - path: `${dir}/file-699.txt`, - content: buf - }], 1) - - await mfs.cp(`/ipfs/${dirCid}`, dir) - - await mfs.write(`${dir}/file-1011.txt`, buf, { - create: true - }) - - await mfs.stat(`${dir}/file-1011.txt`) - - expect(Buffer.concat(await all(mfs.read(`${dir}/file-1011.txt`)))).to.deep.equal(buf) - }) - - it('removes files that cause sub-sub-shards to be removed', async function () { - this.timeout(60000) - - const dir = `/imported-dir-${Math.random()}` - const buf = Buffer.from([0, 1, 2, 3, 4]) - - const dirCid = await createShard(mfs.ipld, [{ - path: `${dir}/file-699.txt`, - content: buf - }, { - path: `${dir}/file-1011.txt`, - content: buf - }], 1) - - await mfs.cp(`/ipfs/${dirCid}`, dir) - - await mfs.rm(`${dir}/file-1011.txt`) - - try { - await mfs.stat(`${dir}/file-1011.txt`) - } catch (err) { - expect(err.message).to.contain('not exist') - } - }) -}) diff --git a/packages/ipfs-mfs/test/helpers/cid-at-path.js b/packages/ipfs-mfs/test/helpers/cid-at-path.js deleted file mode 100644 index c0e10c8876..0000000000 --- a/packages/ipfs-mfs/test/helpers/cid-at-path.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' - -const toPathComponents = require('../../src/core/utils/to-path-components') - -module.exports = async (path, mfs) => { - const parts = toPathComponents(path) - const fileName = parts.pop() - const directory = `/${parts.join('/')}` - const files = [] - - for await (const file of mfs.ls(directory, { - long: true - })) { - files.push(file) - } - - const file = files - .filter(file => file.name === fileName) - .pop() - - return file.cid -} diff --git a/packages/ipfs-mfs/test/helpers/cli.js b/packages/ipfs-mfs/test/helpers/cli.js deleted file mode 100644 index c3652acfdc..0000000000 --- a/packages/ipfs-mfs/test/helpers/cli.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const yargs = require('yargs') -const mfs = require('../../src/cli') - -module.exports = (command, { ipfs, print, getStdin }) => { - const parser = mfs(yargs) - - return new Promise((resolve, reject) => { - parser - .onFinishCommand((data) => { - resolve(data) - }) - .fail((msg, err) => { - if (msg) { - reject(new Error(msg)) - } else { - reject(err) - } - }) - .parse(command, { - ctx: { - ipfs, - print, - getStdin - } - }) - }) -} diff --git a/packages/ipfs-mfs/test/helpers/create-mfs.js b/packages/ipfs-mfs/test/helpers/create-mfs.js deleted file mode 100644 index 08bd9b4ecf..0000000000 --- a/packages/ipfs-mfs/test/helpers/create-mfs.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict' - -const core = require('../../src/core') -const isWebWorker = require('detect-webworker') -const { - MemoryDatastore -} = require('interface-datastore') -const Ipld = require('ipld') -const Repo = require('ipfs-repo') -const BlockService = require('ipfs-block-service') - -const createMfs = async () => { - const repo = new Repo(`test-repo-${Math.random()}`, { - lock: 'memory', - storageBackends: { - root: MemoryDatastore, - blocks: MemoryDatastore, - keys: MemoryDatastore, - datastore: MemoryDatastore - } - }) - - await repo.init({}) - await repo.open() - - const bs = new BlockService(repo) - - const ipld = new Ipld({ - blockService: bs - }) - - const mfs = core({ - ipld, - datastore: repo.datastore, - blocks: bs, - - // https://github.com/Joris-van-der-Wel/karma-mocha-webworker/issuses/4 - // There is no IPFS node running on the main thread so run it on the - // worker along with the tests - repoOwner: isWebWorker - }) - - mfs.ipld = ipld - mfs.repo = repo - - return mfs -} - -module.exports = createMfs diff --git a/packages/ipfs-mfs/test/helpers/create-shard.js b/packages/ipfs-mfs/test/helpers/create-shard.js deleted file mode 100644 index 91114de6f7..0000000000 --- a/packages/ipfs-mfs/test/helpers/create-shard.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -const importer = require('ipfs-unixfs-importer') -const last = require('it-last') - -const createShard = async (ipld, files, shardSplitThreshold = 10) => { - const result = await last(importer(files, ipld, { - shardSplitThreshold, - reduceSingleLeafToSelf: false, // same as go-ipfs-mfs implementation, differs from `ipfs add`(!) - leafType: 'raw' // same as go-ipfs-mfs implementation, differs from `ipfs add`(!) - })) - - return result.cid -} - -module.exports = createShard diff --git a/packages/ipfs-mfs/test/helpers/create-sharded-directory.js b/packages/ipfs-mfs/test/helpers/create-sharded-directory.js deleted file mode 100644 index 4343605972..0000000000 --- a/packages/ipfs-mfs/test/helpers/create-sharded-directory.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createShard = require('./create-shard') - -module.exports = async (mfs, shardSplitThreshold = 10, files = shardSplitThreshold) => { - const dirPath = `/sharded-dir-${Math.random()}` - const cid = await createShard(mfs.ipld, new Array(files).fill(0).map((_, index) => ({ - path: `${dirPath}/file-${index}`, - content: Buffer.from([0, 1, 2, 3, 4, 5, index]) - })), shardSplitThreshold) - - await mfs.cp(`/ipfs/${cid}`, dirPath) - - expect((await mfs.stat(`/ipfs/${cid}`)).type).to.equal('hamt-sharded-directory') - expect((await mfs.stat(dirPath)).type).to.equal('hamt-sharded-directory') - - return dirPath -} diff --git a/packages/ipfs-mfs/test/helpers/create-two-shards.js b/packages/ipfs-mfs/test/helpers/create-two-shards.js deleted file mode 100644 index 00f69a9cae..0000000000 --- a/packages/ipfs-mfs/test/helpers/create-two-shards.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -const createShard = require('./create-shard') - -const createTwoShards = async (ipld, fileCount) => { - const shardSplitThreshold = 10 - const dirPath = `/sharded-dir-${Math.random()}` - const files = new Array(fileCount).fill(0).map((_, index) => ({ - path: `${dirPath}/file-${index}`, - content: Buffer.from([0, 1, 2, 3, 4, index]) - })) - files[files.length - 1].path = `${dirPath}/file-${fileCount - 1}` - - const allFiles = files.map(file => ({ - ...file - })) - const someFiles = files.map(file => ({ - ...file - })) - const nextFile = someFiles.pop() - - const dirWithAllFiles = await createShard(ipld, allFiles, shardSplitThreshold) - const dirWithSomeFiles = await createShard(ipld, someFiles, shardSplitThreshold) - - return { - nextFile, - dirWithAllFiles, - dirWithSomeFiles, - dirPath - } -} - -module.exports = createTwoShards diff --git a/packages/ipfs-mfs/test/helpers/find-tree-with-depth.js b/packages/ipfs-mfs/test/helpers/find-tree-with-depth.js deleted file mode 100644 index 8bc54dc0e2..0000000000 --- a/packages/ipfs-mfs/test/helpers/find-tree-with-depth.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const createShard = require('./create-shard') -const printTree = require('./print-tree') - -// find specific hamt structure by brute force -const findTreeWithDepth = async (ipld, children, depth) => { - for (let i = 2550; i < 100000; i++) { - const files = new Array(i).fill(0).map((_, index) => ({ - path: `foo/file-${index}`, - content: Buffer.from([0, 1, 2, 3, 4, index]) - })) - - const cid = await createShard(ipld, files) - const hasChildrenAtDepth = await findChildrenAtDepth(ipld, cid, children, depth) - - if (hasChildrenAtDepth) { - await printTree(ipld, cid) - - return cid - } - } -} - -const load = (ipld, cid) => { - return new Promise((resolve, reject) => { - ipld.get(cid, (err, res) => { - if (err) { - return reject(err) - } - - resolve(res.value) - }) - }) -} - -const findChildrenAtDepth = async (ipld, cid, children, depth, currentDepth = 0) => { - const node = await load(ipld, cid) - const fileLinks = node.links.filter(link => link.Name) - - if (currentDepth === depth && fileLinks.length >= children) { - return true - } - - for (let i = 0; i < fileLinks.length; i++) { - const res = await findChildrenAtDepth(ipld, fileLinks[i].cid, children, depth, currentDepth + 1) - - if (res) { - return true - } - } - - return false -} - -module.exports = findTreeWithDepth diff --git a/packages/ipfs-mfs/test/helpers/print-tree.js b/packages/ipfs-mfs/test/helpers/print-tree.js deleted file mode 100644 index baa9d647c9..0000000000 --- a/packages/ipfs-mfs/test/helpers/print-tree.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict' - -const printTree = async (ipld, cid, indentation = '', name = '') => { - console.info(`${indentation} ${name} ${cid}`) // eslint-disable-line no-console - - const node = await ipld.get(cid) - const fileLinks = node.Links - .filter(link => link.Name) - - for (let i = 0; i < fileLinks.length; i++) { - await printTree(ipld, fileLinks[i].Hash, ` ${indentation}`, fileLinks[i].Name) - } -} - -module.exports = printTree diff --git a/packages/ipfs-mfs/test/helpers/stream-to-array.js b/packages/ipfs-mfs/test/helpers/stream-to-array.js deleted file mode 100644 index 882930ee58..0000000000 --- a/packages/ipfs-mfs/test/helpers/stream-to-array.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -module.exports = async (stream) => { - const arr = [] - - for await (const entry of stream) { - arr.push(entry) - } - - return arr -} diff --git a/packages/ipfs-mfs/test/helpers/stream-to-buffer.js b/packages/ipfs-mfs/test/helpers/stream-to-buffer.js deleted file mode 100644 index caab7c81de..0000000000 --- a/packages/ipfs-mfs/test/helpers/stream-to-buffer.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -module.exports = async (stream) => { - let buffer = Buffer.alloc(0) - - for await (const buf of stream) { - buffer = Buffer.concat([buffer, buf], buffer.length + buf.length) - } - - return buffer -} diff --git a/packages/ipfs-mfs/test/http/index.js b/packages/ipfs-mfs/test/http/index.js deleted file mode 100644 index d9bb97a741..0000000000 --- a/packages/ipfs-mfs/test/http/index.js +++ /dev/null @@ -1,16 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -describe('http', () => { - require('./chmod') - require('./cp') - require('./flush') - require('./ls') - require('./mkdir') - require('./mv') - require('./read') - require('./rm') - require('./stat') - require('./touch') - require('./write') -}) diff --git a/packages/ipfs-mfs/test/node.js b/packages/ipfs-mfs/test/node.js deleted file mode 100644 index 3d35201d39..0000000000 --- a/packages/ipfs-mfs/test/node.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' - -require('./cli') -require('./core') -require('./http') diff --git a/packages/ipfs-mfs/test/webworker.js b/packages/ipfs-mfs/test/webworker.js deleted file mode 100644 index ed5d9913f4..0000000000 --- a/packages/ipfs-mfs/test/webworker.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -require('./core') diff --git a/packages/ipfs-multipart/CHANGELOG.md b/packages/ipfs-multipart/CHANGELOG.md deleted file mode 100644 index 1db23d866b..0000000000 --- a/packages/ipfs-multipart/CHANGELOG.md +++ /dev/null @@ -1,47 +0,0 @@ - -# [0.3.0](https://github.com/ipfs/js-ipfs-multipart/compare/v0.2.0...v0.3.0) (2020-01-09) - - -### Features - -* support UnixFSv1.5 metadata ([008e872](https://github.com/ipfs/js-ipfs-multipart/commit/008e872)) - - - - -# [0.2.0](https://github.com/ipfs/js-ipfs-multipart/compare/v0.1.1...v0.2.0) (2019-08-27) - - -### Chores - -* refactor to async/await ([#17](https://github.com/ipfs/js-ipfs-multipart/issues/17)) ([55d926e](https://github.com/ipfs/js-ipfs-multipart/commit/55d926e)) - - -### BREAKING CHANGES - -* This module used to export a class that extended EventEmitter, -now it exports a function that returns an async iterable. - -I also updated the deps to use the latest http api, though it's removed -the ability to add whole paths at once, along with some special logic -to handle symlinks. The `Dicer` module that this module depends on -will still emit events for when it encounters symlinks so I left the -handlers in though am unsure if we actually use them. - - - - -## [0.1.1](https://github.com/ipfs/js-ipfs-multipart/compare/v0.1.0...v0.1.1) (2019-07-12) - - - - -# [0.1.0](https://github.com/ipfs/js-ipfs-multipart/compare/v0.0.2...v0.1.0) (2016-03-14) - - - - -## 0.0.2 (2016-03-14) - - - diff --git a/packages/ipfs-multipart/LICENSE b/packages/ipfs-multipart/LICENSE deleted file mode 100644 index cd102e348f..0000000000 --- a/packages/ipfs-multipart/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Protocol Labs Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/packages/ipfs-multipart/README.md b/packages/ipfs-multipart/README.md deleted file mode 100644 index ce7bed180d..0000000000 --- a/packages/ipfs-multipart/README.md +++ /dev/null @@ -1,77 +0,0 @@ -ipfs-multipart -==== - -[![made by Protocol Labs](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai) -[![Project IPFS](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) -[![freenode #ipfs](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) -[![Codecov branch](https://img.shields.io/codecov/c/github/ipfs/js-ipfs-multipart/master.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-multipart) -[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-multipart)](https://travis-ci.com/ipfs/js-ipfs-multipart) -[![Dependency Status](https://david-dm.org/ipfs/js-ipfs-multipart.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-multipart) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) -[![standard-readme compliant](https://img.shields.io/badge/readme%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) - - - - -> A set of utilities to help dealing with [IPFS](https://ipfs.io/) multipart. - -## Lead Maintainer - -[Hugo Dias](https://github.com/hugomrdias) - -### Notice -> This module is moving to async iterators starting from 0.2.0. -> The last minor version to support event emitter is 0.1.1, any backports will merged to the branch `event-emitter` and released under `>0.1.0 <0.2.0`. - -## Install -``` -npm install ipfs-multipart -``` - -## Usage -```javascript -const http = require('http') -const parser = require('ipfs-multipart') - -http.createServer(async (req, res) => { - if (req.method === 'POST' && req.headers['content-type']) { - - for await (const entry of parser(req)) { - if (entry.type === 'directory') { - console.log(`dir ${entry.name} start`) - } - - if (entry.type === 'file') { - console.log(`file ${entry.name} start`) - - for await (const data of entry.content) { - console.log(`file ${entry.name} contents:`, data.toString()) - } - - console.log(`file ${entry.name} end`) - } - } - - console.log('finished parsing') - res.writeHead(200) - res.end() - } - - res.writeHead(404) - res.end() -}).listen(5001, () => { - console.log('server listening on port 5001') -}) -``` - -## Contribute - -See [the contribute file](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) and our [code of conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md)! - -PRs accepted. - -Small note: If editing the Readme, please conform to the [standard-readme](https://github.com/RichardLitt/standard-readme) specification. - -## License - -[MIT](LICENSE) © Protocol Labs Inc. diff --git a/packages/ipfs-multipart/example.js b/packages/ipfs-multipart/example.js deleted file mode 100644 index 7335341b5a..0000000000 --- a/packages/ipfs-multipart/example.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -/* eslint-disable no-console */ - -const http = require('http') -const multipart = require('ipfs-multipart') - -http.createServer(async (req, res) => { - if (req.method === 'POST' && req.headers['content-type']) { - for await (const part of multipart(req)) { - console.log(`file ${part.name} start`) - - if (part.type === 'file') { - for await (const chunk of part.content) { - console.log(`file ${part.name} contents:`, chunk.toString()) - } - } - } - - console.log('finished parsing') - res.writeHead(200) - res.end() - } - - res.writeHead(404) - res.end() -}).listen(5001, () => { - console.log('server listening on port 5001') -}) diff --git a/packages/ipfs-multipart/package.json b/packages/ipfs-multipart/package.json deleted file mode 100644 index aeb539d345..0000000000 --- a/packages/ipfs-multipart/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "ipfs-multipart", - "version": "0.3.0", - "description": "A set of utilities to help dealing with IPFS multipart.", - "keywords": [ - "ipfs", - "multipart", - "files" - ], - "homepage": "https://github.com/ipfs/js-ipfs", - "bugs": "https://github.com/ipfs/js-ipfs/issues", - "license": "MIT", - "leadMaintainer": "Hugo Dias ", - "files": [ - "src", - "dist" - ], - "main": "src/index.js", - "repository": { - "type": "git", - "url": "git+https://github.com/ipfs/js-ipfs.git" - }, - "scripts": { - "lint": "aegir lint", - "build": "aegir build", - "test": "aegir test -t node", - "clean": "rm -rf ./dist", - "dep-check": "aegir dep-check" - }, - "dependencies": { - "@hapi/content": "^4.1.0", - "it-multipart": "^1.0.1" - }, - "devDependencies": { - "aegir": "^21.3.0", - "chai": "^4.2.0", - "ipfs-http-client": "^42.0.0", - "it-drain": "^1.0.0", - "request": "^2.88.0" - }, - "engines": { - "node": ">=10.0.0", - "npm": ">=6.0.0" - }, - "contributors": [ - "Alan Shaw ", - "Alex Potsides ", - "Francisco Baio Dias ", - "Hugo Dias ", - "Hugo Dias ", - "Maciej Krüger ", - "dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com>" - ] -} diff --git a/packages/ipfs-multipart/src/index.js b/packages/ipfs-multipart/src/index.js deleted file mode 100644 index 3a2620c33b..0000000000 --- a/packages/ipfs-multipart/src/index.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -const content = require('@hapi/content') -const parser = require('./parser') - -/** - * Request Parser - * - * @param {Object} req - Request - * @param {Object} options - Options passed to stream constructors - * @returns {Object} an async iterable - */ -module.exports = (req, options = {}) => { - options.boundary = content.type(req.headers['content-type']).boundary - - return parser(req.payload || req, options) -} diff --git a/packages/ipfs-multipart/test/fixtures/config b/packages/ipfs-multipart/test/fixtures/config deleted file mode 100644 index cc57f72278..0000000000 --- a/packages/ipfs-multipart/test/fixtures/config +++ /dev/null @@ -1,5 +0,0 @@ -{ - "API": { - "HTTPHeaders": null - } -} diff --git a/packages/ipfs-multipart/test/fixtures/folderlink b/packages/ipfs-multipart/test/fixtures/folderlink deleted file mode 120000 index eecca078af..0000000000 --- a/packages/ipfs-multipart/test/fixtures/folderlink +++ /dev/null @@ -1 +0,0 @@ -subfolder \ No newline at end of file diff --git a/packages/ipfs-multipart/test/fixtures/link b/packages/ipfs-multipart/test/fixtures/link deleted file mode 120000 index b63339dea3..0000000000 --- a/packages/ipfs-multipart/test/fixtures/link +++ /dev/null @@ -1 +0,0 @@ -subfolder/deepfile \ No newline at end of file diff --git a/packages/ipfs-multipart/test/fixtures/otherfile b/packages/ipfs-multipart/test/fixtures/otherfile deleted file mode 100644 index 2a6e94e3a5..0000000000 --- a/packages/ipfs-multipart/test/fixtures/otherfile +++ /dev/null @@ -1 +0,0 @@ -OTHERFILE CONTENT diff --git a/packages/ipfs-multipart/test/fixtures/subfolder/deepfile b/packages/ipfs-multipart/test/fixtures/subfolder/deepfile deleted file mode 100644 index 01767e229e..0000000000 --- a/packages/ipfs-multipart/test/fixtures/subfolder/deepfile +++ /dev/null @@ -1 +0,0 @@ -DEEPFILE CONTENT diff --git a/packages/ipfs-multipart/test/parser.spec.js b/packages/ipfs-multipart/test/parser.spec.js deleted file mode 100644 index 60e1964064..0000000000 --- a/packages/ipfs-multipart/test/parser.spec.js +++ /dev/null @@ -1,256 +0,0 @@ -'use strict' - -/* eslint-env mocha */ -/* eslint-disable no-unused-expressions */ - -const expect = require('chai').expect -const APIctl = require('ipfs-http-client') -const http = require('http') -const path = require('path') -const fs = require('fs') -const request = require('request') -const parser = require('../src') -const os = require('os') -const drain = require('it-drain') - -const isWindows = os.platform() === 'win32' - -const readDir = (path, prefix, includeMetadata, output = []) => { - const entries = fs.readdirSync(path) - - entries.forEach(entry => { - // resolves symlinks - const entryPath = fs.realpathSync(`${path}/${entry}`) - const type = fs.statSync(entryPath) - - if (type.isDirectory()) { - readDir(entryPath, `${prefix}/${entry}`, includeMetadata, output) - - output.push({ - path: `${prefix}/${entry}`, - mtime: includeMetadata ? new Date(type.mtimeMs) : undefined, - mode: includeMetadata ? type.mode : undefined - }) - } - - if (type.isFile()) { - output.push({ - path: `${prefix}/${entry}`, - content: fs.createReadStream(entryPath), - mtime: includeMetadata ? new Date(type.mtimeMs) : undefined, - mode: includeMetadata ? type.mode : undefined - }) - } - }) - - return output -} - -describe('parser', () => { - const PORT = 6001 - - let ctl - let handler = () => {} - - before((done) => { - http.createServer((req, res) => { - if (req.method === 'POST' && req.headers['content-type']) { - handler(req) - .then(() => { - res.writeHead(200) - }) - .catch(() => { - res.writeHead(500) - }) - .then(() => { - res.end() - }) - - return - } - - res.writeHead(404) - res.end() - }).listen(PORT, () => { - ctl = APIctl(`/ip4/127.0.0.1/tcp/${PORT}`) - done() - }) - }) - - describe('single file', () => { - const filePath = path.resolve(__dirname, 'fixtures/config') - const fileContent = fs.readFileSync(filePath, 'utf8') - const fileMtime = parseInt(Date.now() / 1000) - const fileMode = parseInt('0777', 8) - - before(() => { - handler = async (req) => { - expect(req.headers['content-type']).to.be.a('string') - - const files = [] - - for await (const entry of parser(req)) { - if (entry.type === 'file') { - const file = { ...entry, content: '' } - - for await (const data of entry.content) { - file.content += data.toString() - } - - files.push(file) - } - } - - expect(files.length).to.equal(1) - expect(JSON.parse(files[0].content)).to.deep.equal(JSON.parse(fileContent)) - } - }) - - it('parses ctl.config.replace correctly', async () => { - await ctl.config.replace(JSON.parse(fileContent)) - }) - - it('parses regular multipart requests correctly', (done) => { - const formData = { - file: fs.createReadStream(filePath) - } - - request.post({ url: `http://localhost:${PORT}`, formData: formData }, (err) => done(err)) - }) - - it('parses multipart requests with metadata correctly', (done) => { - const formData = { - file: { - value: fileContent, - options: { - header: { - mtime: fileMtime, - mode: fileMode - } - } - } - } - - request.post({ url: `http://localhost:${PORT}`, formData }, (err) => done(err)) - }) - }) - - describe('directory', () => { - const dirPath = path.resolve(__dirname, 'fixtures') - - let files = [] - - before(() => { - handler = async (req) => { - expect(req.headers['content-type']).to.be.a('string') - - for await (const entry of parser(req)) { - const file = { ...entry, content: '' } - - if (entry.content) { - for await (const data of entry.content) { - file.content += data.toString() - } - } - - files.push(file) - } - } - }) - - beforeEach(() => { - files = [] - }) - - it('parses ctl.add correctly', async () => { - const contents = readDir(dirPath, 'fixtures') - - await drain(ctl.add(contents, { recursive: true, followSymlinks: false })) - - if (isWindows) { - return - } - - expect(files).to.have.lengthOf(contents.length) - - for (let i = 0; i < contents.length; i++) { - expect(files[i].name).to.equal(contents[i].path) - expect(files[i].mode).to.be.undefined - expect(files[i].mtime).to.be.undefined - } - }) - - it('parses ctl.add with metadata correctly', async () => { - const contents = readDir(dirPath, 'fixtures', true) - - await drain(ctl.add(contents, { recursive: true, followSymlinks: false })) - - if (isWindows) { - return - } - - expect(files).to.have.lengthOf(contents.length) - - for (let i = 0; i < contents.length; i++) { - const msecs = contents[i].mtime.getTime() - const secs = Math.floor(msecs / 1000) - - expect(files[i].name).to.equal(contents[i].path) - expect(files[i].mode).to.equal(contents[i].mode) - expect(files[i].mtime).to.deep.equal({ - secs, - nsecs: (msecs - (secs * 1000)) * 1000 - }) - } - }) - }) - - describe('empty', () => { - before(() => { - handler = async (req) => { - expect(req.headers['content-type']).to.be.a('string') - - for await (const _ of parser(req)) { // eslint-disable-line no-unused-vars - - } - } - }) - - it('does not block', (done) => { - request.post({ url: `http://localhost:${PORT}` }, (err, httpResponse, body) => { - expect(err).not.to.exist - done() - }) - }) - }) - - describe('buffer', () => { - const files = [] - - before(() => { - handler = async (req) => { - expect(req.headers['content-type']).to.be.a('string') - - for await (const entry of parser(req)) { - if (entry.type === 'file') { - const file = { name: entry.name, content: '' } - - for await (const data of entry.content) { - file.content += data.toString() - } - - files.push(file) - } - } - } - }) - - it('parses ctl.add buffer correctly', async () => { - await drain(ctl.add(Buffer.from('hello world'))) - - expect(files.length).to.equal(1) - expect(files[0].name).to.equal('') - expect(files[0].content).to.equal('hello world') - }) - }) -}) diff --git a/packages/ipfs-utils/.aegir.js b/packages/ipfs-utils/.aegir.js new file mode 100644 index 0000000000..f897dd2a40 --- /dev/null +++ b/packages/ipfs-utils/.aegir.js @@ -0,0 +1,61 @@ +'use strict' + +const { promisify } = require('util') +const http = require('http') +const url = require('url') +const querystring = require('querystring') + +const echoServer = async (port = 3000) => { + const server = http.createServer() + + server.on('request', (request, response) => { + try { + + const uri = url.parse(request.url) + const qs = uri.query ? querystring.parse(uri.query) : {} + const status = qs.status || 200 + const contentType = qs.contentType || 'text/plain' + + const headers = { + 'Access-Control-Allow-Origin': '*' + } + + if (qs.body) { + headers['Content-Type'] = contentType + headers['Content-Length'] = qs.body.length + } + + response.writeHead(status, headers) + + if (qs.body) { + response.end(qs.body) + } else { + request.pipe(response) + } + + } catch (err) { + console.error(err) + } + }) + + await promisify(server.listen.bind(server))(port) + + return { + stop: promisify(server.close.bind(server)) + } +} + +let echo + +module.exports = { + hooks: { + pre: async () => { + console.info('starting echo server') + echo = await echoServer() + }, + post: async () => { + console.info('stopping echo server') + await echo.stop() + } + } +} diff --git a/packages/ipfs-utils/package.json b/packages/ipfs-utils/package.json index 40c465b6d4..3b0432df52 100644 --- a/packages/ipfs-utils/package.json +++ b/packages/ipfs-utils/package.json @@ -28,24 +28,26 @@ }, "license": "MIT", "dependencies": { + "abort-controller": "^3.0.0", "buffer": "^5.4.2", "err-code": "^2.0.0", "fs-extra": "^8.1.0", "is-electron": "^2.2.0", "iso-url": "^0.4.7", "it-glob": "0.0.7", - "ky": "^0.15.0", - "ky-universal": "^0.3.0", "merge-options": "^2.0.0", "node-fetch": "^2.6.0", "stream-to-it": "^0.2.0" }, "devDependencies": { - "aegir": "^21.3.0", + "aegir": "21.3.0", "chai": "^4.2.0", "chai-as-promised": "^7.1.1", + "delay": "^4.3.0", "dirty-chai": "^2.0.1", - "it-all": "^1.0.1" + "it-all": "^1.0.1", + "it-drain": "^1.0.0", + "it-to-stream": "^0.1.1" }, "contributors": [ "Alan Shaw ", diff --git a/packages/ipfs-utils/src/files/normalise-input.js b/packages/ipfs-utils/src/files/normalise-input.js index fb892692fa..a0c1249aa6 100644 --- a/packages/ipfs-utils/src/files/normalise-input.js +++ b/packages/ipfs-utils/src/files/normalise-input.js @@ -223,7 +223,7 @@ function toAsyncIterable (input) { })() } - throw errCode(new Error(`Unexpected input: ${input}`, 'ERR_UNEXPECTED_INPUT')) + throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') } function toBuffer (chunk) { diff --git a/packages/ipfs-utils/src/http.js b/packages/ipfs-utils/src/http.js index 83de94edf7..25720da539 100644 --- a/packages/ipfs-utils/src/http.js +++ b/packages/ipfs-utils/src/http.js @@ -7,7 +7,7 @@ const { URL, URLSearchParams } = require('iso-url') const global = require('./globalthis') const TextDecoder = require('./text-encoder') const Request = global.Request -const AbortController = global.AbortController +const AbortController = require('abort-controller') class TimeoutError extends Error { constructor () { @@ -33,16 +33,18 @@ const timeout = (promise, ms, abortController) => { const timeoutID = setTimeout(() => { reject(new TimeoutError()) - if (AbortController) { - abortController.abort() - } + abortController.abort() }, ms) promise - .then(resolve) - .catch(reject) - .then(() => { + .then((result) => { + clearTimeout(timeoutID) + + resolve(result) + }, (err) => { clearTimeout(timeoutID) + + reject(err) }) }) } @@ -79,16 +81,15 @@ class HTTP { this.opts = merge(defaults, options) // connect internal abort to external - if (AbortController) { - this.abortController = new AbortController() - if (this.opts.signal) { - this.opts.signal.addEventListener('abort', () => { - this.abortController.abort() - }) - } + this.abortController = new AbortController() - this.opts.signal = this.abortController.signal + if (this.opts.signal) { + this.opts.signal.addEventListener('abort', () => { + this.abortController.abort() + }) } + + this.opts.signal = this.abortController.signal } /** @@ -122,6 +123,7 @@ class HTTP { // TODO: try to remove the logic above or fix URL instance input without trailing '/' const url = new URL(resource, opts.base) + if (opts.searchParams) { url.search = opts.transformSearchParams(new URLSearchParams(opts.searchParams)) } @@ -134,6 +136,7 @@ class HTTP { } throw new HTTPError(response) } + return response } @@ -291,4 +294,39 @@ HTTP.TimeoutError = TimeoutError HTTP.ndjson = ndjson HTTP.streamToAsyncIterator = streamToAsyncIterator +/** + * @param {string | URL | Request} resource + * @param {APIOptions} options + * @returns {Promise} + */ +HTTP.post = (resource, options) => new HTTP(options).post(resource, options) + +/** + * @param {string | URL | Request} resource + * @param {APIOptions} options + * @returns {Promise} + */ +HTTP.get = (resource, options) => new HTTP(options).get(resource, options) + +/** + * @param {string | URL | Request} resource + * @param {APIOptions} options + * @returns {Promise} + */ +HTTP.put = (resource, options) => new HTTP(options).put(resource, options) + +/** + * @param {string | URL | Request} resource + * @param {APIOptions} options + * @returns {Promise} + */ +HTTP.delete = (resource, options) => new HTTP(options).delete(resource, options) + +/** + * @param {string | URL | Request} resource + * @param {APIOptions} options + * @returns {Promise} + */ +HTTP.options = (resource, options) => new HTTP(options).options(resource, options) + module.exports = HTTP diff --git a/packages/ipfs-utils/test/http.spec.js b/packages/ipfs-utils/test/http.spec.js new file mode 100644 index 0000000000..73c879b095 --- /dev/null +++ b/packages/ipfs-utils/test/http.spec.js @@ -0,0 +1,75 @@ +'use strict' + +/* eslint-env mocha */ +const { expect } = require('./utils/chai') +const HTTP = require('../src/http') +const toStream = require('it-to-stream') +const delay = require('delay') +const AbortController = require('abort-controller') +const drain = require('it-drain') +const { isBrowser, isWebWorker } = require('../src/env') + +describe('http', function () { + it('makes a GET request', async function () { + const res = HTTP.get('http://localhost:3000') + + await expect(res).to.eventually.be.fulfilled() + }) + + it('allow async aborting', async function () { + const controller = new AbortController() + + const res = HTTP.get('http://localhost:3000', { + signal: controller.signal + }) + controller.abort() + + await expect(res).to.eventually.be.rejectedWith(/aborted/) + }) + + it.skip('should handle errors in streaming bodies', async function () { + if (isBrowser || isWebWorker) { + // streaming bodies not supported by browsers + return this.skip() + } + + const err = new Error('Should be caught') + const body = (async function * () { + yield Buffer.from('{}\n') + + await delay(100) + + throw err + }()) + + const res = await HTTP.post('http://localhost:3000', { + body: toStream.readable(body) + }) + + await expect(drain(HTTP.ndjson(res.body))).to.eventually.be.rejectedWith(/aborted/) + }) + + it.skip('should handle errors in streaming bodies when a signal is passed', async function () { + if (isBrowser || isWebWorker) { + // streaming bodies not supported by browsers + return this.skip() + } + + const controller = new AbortController() + const err = new Error('Should be caught') + const body = (async function * () { + yield Buffer.from('{}\n') + + await delay(100) + + throw err + }()) + + const res = await HTTP.post('http://localhost:3000', { + body: toStream.readable(body), + signal: controller.signal + }) + + await expect(drain(HTTP.ndjson(res.body))).to.eventually.be.rejectedWith(/aborted/) + }) +}) diff --git a/packages/ipfs-mfs/test/helpers/chai.js b/packages/ipfs-utils/test/utils/chai.js similarity index 72% rename from packages/ipfs-mfs/test/helpers/chai.js rename to packages/ipfs-utils/test/utils/chai.js index c00c40d102..2b87a01f91 100644 --- a/packages/ipfs-mfs/test/helpers/chai.js +++ b/packages/ipfs-utils/test/utils/chai.js @@ -1,7 +1,10 @@ 'use strict' const chai = require('chai') + chai.use(require('dirty-chai')) chai.use(require('chai-as-promised')) -module.exports = chai.expect +module.exports = { + expect: chai.expect +} diff --git a/packages/ipfs/README.md b/packages/ipfs/README.md index 8effcf9846..b84f47e3c9 100644 --- a/packages/ipfs/README.md +++ b/packages/ipfs/README.md @@ -1016,7 +1016,6 @@ Listing of the main packages used in the IPFS ecosystem. There are also three sp | [`ipfs-http-client`](//github.com/ipfs/js-ipfs) | [![npm](https://img.shields.io/npm/v/ipfs-http-client.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs/master)](https://travis-ci.com/ipfs/js-ipfs) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs) | [Alan Shaw](mailto:alan@tableflip.io) | | [`ipfs-http-response`](//github.com/ipfs/js-ipfs-http-response) | [![npm](https://img.shields.io/npm/v/ipfs-http-response.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-http-response/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-http-response.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-http-response) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-http-response/master)](https://travis-ci.com/ipfs/js-ipfs-http-response) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-http-response/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-http-response) | [Vasco Santos](mailto:vasco.santos@moxy.studio) | | [`ipfsd-ctl`](//github.com/ipfs/js-ipfsd-ctl) | [![npm](https://img.shields.io/npm/v/ipfsd-ctl.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfsd-ctl/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfsd-ctl.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfsd-ctl) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfsd-ctl/master)](https://travis-ci.com/ipfs/js-ipfsd-ctl) | [![codecov](https://codecov.io/gh/ipfs/js-ipfsd-ctl/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfsd-ctl) | [Hugo Dias](mailto:mail@hugodias.me) | -| [`ipfs-multipart`](//github.com/ipfs/js-ipfs) | [![npm](https://img.shields.io/npm/v/ipfs-multipart.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs/master)](https://travis-ci.com/ipfs/js-ipfs) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs) | [Hugo Dias](mailto:mail@hugodias.me) | | [`is-ipfs`](//github.com/ipfs/is-ipfs) | [![npm](https://img.shields.io/npm/v/is-ipfs.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/is-ipfs/releases) | [![Deps](https://david-dm.org/ipfs/is-ipfs.svg?style=flat-square)](https://david-dm.org/ipfs/is-ipfs) | [![Travis CI](https://flat.badgen.net/travis/ipfs/is-ipfs/master)](https://travis-ci.com/ipfs/is-ipfs) | [![codecov](https://codecov.io/gh/ipfs/is-ipfs/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/is-ipfs) | [Marcin Rataj](mailto:lidel@lidel.org) | | [`aegir`](//github.com/ipfs/aegir) | [![npm](https://img.shields.io/npm/v/aegir.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/aegir/releases) | [![Deps](https://david-dm.org/ipfs/aegir.svg?style=flat-square)](https://david-dm.org/ipfs/aegir) | [![Travis CI](https://flat.badgen.net/travis/ipfs/aegir/master)](https://travis-ci.com/ipfs/aegir) | [![codecov](https://codecov.io/gh/ipfs/aegir/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/aegir) | [Hugo Dias](mailto:hugomrdias@gmail.com) | | [`ipfs-repo-migrations`](//github.com/ipfs/js-ipfs-repo-migrations) | [![npm](https://img.shields.io/npm/v/ipfs-repo-migrations.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-repo-migrations/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-repo-migrations.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo-migrations) | [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo-migrations/master)](https://travis-ci.com/ipfs/js-ipfs-repo-migrations) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations/branch/master/graph/badge.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations) | N/A | diff --git a/packages/ipfs/package-list.json b/packages/ipfs/package-list.json index 049313fba5..845b9af40f 100644 --- a/packages/ipfs/package-list.json +++ b/packages/ipfs/package-list.json @@ -30,7 +30,6 @@ ["ipfs/js-ipfs", "ipfs-http-client"], ["ipfs/js-ipfs-http-response", "ipfs-http-response"], ["ipfs/js-ipfsd-ctl", "ipfsd-ctl"], - ["ipfs/js-ipfs", "ipfs-multipart"], ["ipfs/is-ipfs", "is-ipfs"], ["ipfs/aegir", "aegir"], ["ipfs/js-ipfs-repo-migrations", "ipfs-repo-migrations"], diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index de450d34ce..3f965287de 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -43,6 +43,8 @@ "test": "cross-env ECHO_SERVER_PORT=37480 aegir test", "test:node": "cross-env ECHO_SERVER_PORT=37481 aegir test -t node", "test:browser": "cross-env ECHO_SERVER_PORT=37482 aegir test -t browser", + "test:browser:http": "cross-env ECHO_SERVER_PORT=37489 aegir test -t browser -f test/http-api/index.js", + "test:browser:interface:http": "cross-env ECHO_SERVER_PORT=37489 aegir test -t browser -f test/http-api/interface.js", "test:webworker": "cross-env ECHO_SERVER_PORT=37483 aegir test -t webworker", "test:electron": "cross-env ECHO_SERVER_PORT=37484 aegir test -t electron-main -t electron-renderer", "test:electron-main": "cross-env ECHO_SERVER_PORT=37485 aegir test -t electron-main", @@ -66,6 +68,7 @@ "dependencies": { "@hapi/ammo": "^3.1.2", "@hapi/boom": "^7.4.3", + "@hapi/content": "^4.1.0", "@hapi/hapi": "^18.4.0", "@hapi/joi": "^15.1.0", "abort-controller": "^3.0.0", @@ -89,6 +92,7 @@ "file-type": "^12.0.1", "fnv1a": "^1.0.1", "get-folder-size": "^2.0.0", + "hamt-sharding": "^1.0.0", "hapi-pino": "^6.1.0", "hashlru": "^2.3.0", "interface-datastore": "^0.8.0", @@ -97,8 +101,6 @@ "ipfs-block-service": "^0.16.0", "ipfs-http-client": "^42.0.0", "ipfs-http-response": "^0.5.0", - "ipfs-mfs": "^1.0.0", - "ipfs-multipart": "^0.3.0", "ipfs-repo": "^0.30.1", "ipfs-unixfs": "^1.0.0", "ipfs-unixfs-exporter": "^1.0.1", @@ -117,16 +119,17 @@ "is-ipfs": "^0.6.1", "it-all": "^1.0.1", "it-concat": "^1.0.0", + "it-drain": "^1.0.0", "it-glob": "0.0.7", "it-last": "^1.0.1", + "it-map": "^1.0.0", + "it-multipart": "^1.0.1", "it-pipe": "^1.1.0", "it-tar": "^1.2.1", "it-to-stream": "^0.1.1", "iterable-ndjson": "^1.1.0", "jsondiffpatch": "^0.3.11", "just-safe-set": "^2.1.0", - "ky": "^0.15.0", - "ky-universal": "^0.3.0", "libp2p": "^0.27.2", "libp2p-bootstrap": "^0.10.2", "libp2p-crypto": "^0.17.1", @@ -172,7 +175,7 @@ "yargs-promise": "^1.1.0" }, "devDependencies": { - "aegir": "^21.3.0", + "aegir": "21.3.0", "base64url": "^3.0.1", "clear-module": "^4.0.0", "cross-env": "^7.0.0", @@ -180,11 +183,12 @@ "execa": "^3.0.0", "form-data": "^3.0.0", "go-ipfs-dep": "0.4.23-3", - "hat": "0.0.3", "interface-ipfs-core": "^0.132.0", "ipfs-interop": "ipfs/interop#fix/name-pubsub", "ipfsd-ctl": "^3.0.0", + "iso-random-stream": "^1.1.1", "it-first": "^1.0.1", + "nanoid": "^2.1.11", "ncp": "^2.0.0", "p-event": "^4.1.0", "p-map": "^3.0.0", diff --git a/packages/ipfs-mfs/src/cli/index.js b/packages/ipfs/src/cli/commands/files.js similarity index 65% rename from packages/ipfs-mfs/src/cli/index.js rename to packages/ipfs/src/cli/commands/files.js index ae87d7ad0b..1b1a8c4c8e 100644 --- a/packages/ipfs-mfs/src/cli/index.js +++ b/packages/ipfs/src/cli/commands/files.js @@ -1,20 +1,15 @@ 'use strict' -const command = { +module.exports = { command: 'files ', description: 'Operations over mfs files (ls, mkdir, rm, etc)', builder (yargs) { - return yargs.commandDir('.') + return yargs.commandDir('files') }, handler (argv) { argv.print('Type `jsipfs files --help` for more instructions') } } - -module.exports = (yargs) => { - return yargs - .command(command) -} diff --git a/packages/ipfs-mfs/src/cli/chmod.js b/packages/ipfs/src/cli/commands/files/chmod.js similarity index 97% rename from packages/ipfs-mfs/src/cli/chmod.js rename to packages/ipfs/src/cli/commands/files/chmod.js index 21a7adf4cd..7c405d1ff5 100644 --- a/packages/ipfs-mfs/src/cli/chmod.js +++ b/packages/ipfs/src/cli/commands/files/chmod.js @@ -3,7 +3,7 @@ const { asBoolean, asOctal -} = require('./utils') +} = require('../../utils') module.exports = { command: 'chmod [mode] [path]', diff --git a/packages/ipfs-mfs/src/cli/cp.js b/packages/ipfs/src/cli/commands/files/cp.js similarity index 97% rename from packages/ipfs-mfs/src/cli/cp.js rename to packages/ipfs/src/cli/commands/files/cp.js index 82b9190716..41af91118d 100644 --- a/packages/ipfs-mfs/src/cli/cp.js +++ b/packages/ipfs/src/cli/commands/files/cp.js @@ -2,7 +2,7 @@ const { asBoolean -} = require('./utils') +} = require('../../utils') module.exports = { command: 'cp ', diff --git a/packages/ipfs-mfs/src/cli/flush.js b/packages/ipfs/src/cli/commands/files/flush.js similarity index 78% rename from packages/ipfs-mfs/src/cli/flush.js rename to packages/ipfs/src/cli/commands/files/flush.js index 95e38a8980..b80b6628f0 100644 --- a/packages/ipfs-mfs/src/cli/flush.js +++ b/packages/ipfs/src/cli/commands/files/flush.js @@ -1,9 +1,5 @@ 'use strict' -const { - FILE_SEPARATOR -} = require('../core/utils/constants') - module.exports = { command: 'flush [path]', @@ -22,7 +18,7 @@ module.exports = { cidBase } = argv - let cid = await ipfs.files.flush(path || FILE_SEPARATOR, {}) + let cid = await ipfs.files.flush(path || '/', {}) if (cidBase && cidBase !== 'base58btc' && cid.version === 0) { cid = cid.toV1() diff --git a/packages/ipfs-mfs/src/cli/ls.js b/packages/ipfs/src/cli/commands/files/ls.js similarity index 85% rename from packages/ipfs-mfs/src/cli/ls.js rename to packages/ipfs/src/cli/commands/files/ls.js index e9d7696458..53df34b9d0 100644 --- a/packages/ipfs-mfs/src/cli/ls.js +++ b/packages/ipfs/src/cli/commands/files/ls.js @@ -3,10 +3,7 @@ const all = require('it-all') const { asBoolean -} = require('./utils') -const { - FILE_SEPARATOR -} = require('../core/utils/constants') +} = require('../../utils') const formatMode = require('ipfs-utils/src/files/format-mode') const formatMtime = require('ipfs-utils/src/files/format-mtime') @@ -54,7 +51,7 @@ module.exports = { // https://github.com/ipfs/go-ipfs/issues/5181 if (sort) { - let files = await all(ipfs.files.ls(path || FILE_SEPARATOR)) + let files = await all(ipfs.files.ls(path || '/')) files = files.sort((a, b) => { return a.name.localeCompare(b.name) @@ -64,7 +61,7 @@ module.exports = { return } - for await (const file of ipfs.files.ls(path || FILE_SEPARATOR)) { + for await (const file of ipfs.files.ls(path || '/')) { printListing(file) } } diff --git a/packages/ipfs-mfs/src/cli/mkdir.js b/packages/ipfs/src/cli/commands/files/mkdir.js similarity index 98% rename from packages/ipfs-mfs/src/cli/mkdir.js rename to packages/ipfs/src/cli/commands/files/mkdir.js index 95b62d072f..5f156dfd16 100644 --- a/packages/ipfs-mfs/src/cli/mkdir.js +++ b/packages/ipfs/src/cli/commands/files/mkdir.js @@ -4,7 +4,7 @@ const { asBoolean, asOctal, asDateFromSeconds -} = require('./utils') +} = require('../../utils') module.exports = { command: 'mkdir ', diff --git a/packages/ipfs-mfs/src/cli/mv.js b/packages/ipfs/src/cli/commands/files/mv.js similarity index 98% rename from packages/ipfs-mfs/src/cli/mv.js rename to packages/ipfs/src/cli/commands/files/mv.js index 15740013cd..6ea6217ac6 100644 --- a/packages/ipfs-mfs/src/cli/mv.js +++ b/packages/ipfs/src/cli/commands/files/mv.js @@ -2,7 +2,7 @@ const { asBoolean -} = require('./utils') +} = require('../../utils') module.exports = { command: 'mv ', diff --git a/packages/ipfs-mfs/src/cli/read.js b/packages/ipfs/src/cli/commands/files/read.js similarity index 100% rename from packages/ipfs-mfs/src/cli/read.js rename to packages/ipfs/src/cli/commands/files/read.js diff --git a/packages/ipfs-mfs/src/cli/rm.js b/packages/ipfs/src/cli/commands/files/rm.js similarity index 94% rename from packages/ipfs-mfs/src/cli/rm.js rename to packages/ipfs/src/cli/commands/files/rm.js index 694011caea..c304c7cdc3 100644 --- a/packages/ipfs-mfs/src/cli/rm.js +++ b/packages/ipfs/src/cli/commands/files/rm.js @@ -2,7 +2,7 @@ const { asBoolean -} = require('./utils') +} = require('../../utils') module.exports = { command: 'rm ', diff --git a/packages/ipfs-mfs/src/cli/stat.js b/packages/ipfs/src/cli/commands/files/stat.js similarity index 98% rename from packages/ipfs-mfs/src/cli/stat.js rename to packages/ipfs/src/cli/commands/files/stat.js index 3fc59a42b0..2cad9e02d2 100644 --- a/packages/ipfs-mfs/src/cli/stat.js +++ b/packages/ipfs/src/cli/commands/files/stat.js @@ -2,7 +2,7 @@ const { asBoolean -} = require('./utils') +} = require('../../utils') const formatMode = require('ipfs-utils/src/files/format-mode') const formatMtime = require('ipfs-utils/src/files/format-mtime') diff --git a/packages/ipfs-mfs/src/cli/touch.js b/packages/ipfs/src/cli/commands/files/touch.js similarity index 97% rename from packages/ipfs-mfs/src/cli/touch.js rename to packages/ipfs/src/cli/commands/files/touch.js index beeb095adc..bceac05fa2 100644 --- a/packages/ipfs-mfs/src/cli/touch.js +++ b/packages/ipfs/src/cli/commands/files/touch.js @@ -3,7 +3,7 @@ const { asBoolean, asDateFromSeconds -} = require('./utils') +} = require('../../utils') module.exports = { command: 'touch [path]', diff --git a/packages/ipfs-mfs/src/cli/write.js b/packages/ipfs/src/cli/commands/files/write.js similarity index 99% rename from packages/ipfs-mfs/src/cli/write.js rename to packages/ipfs/src/cli/commands/files/write.js index 2f54bb0dd7..dc275d427f 100644 --- a/packages/ipfs-mfs/src/cli/write.js +++ b/packages/ipfs/src/cli/commands/files/write.js @@ -4,7 +4,7 @@ const { asBoolean, asOctal, asDateFromSeconds -} = require('./utils') +} = require('../../utils') module.exports = { command: 'write ', diff --git a/packages/ipfs/src/cli/parser.js b/packages/ipfs/src/cli/parser.js index 486902ee24..6d648a181e 100644 --- a/packages/ipfs/src/cli/parser.js +++ b/packages/ipfs/src/cli/parser.js @@ -1,7 +1,6 @@ 'use strict' const yargs = require('yargs/yargs')(process.argv.slice(2)) -const mfs = require('ipfs-mfs/cli') const utils = require('./utils') const parser = yargs @@ -32,7 +31,4 @@ const parser = yargs .strict() .completion() -// add MFS (Files API) commands -mfs(parser) - module.exports = parser diff --git a/packages/ipfs/src/cli/utils.js b/packages/ipfs/src/cli/utils.js index 67eb35aef7..06d664b095 100644 --- a/packages/ipfs/src/cli/utils.js +++ b/packages/ipfs/src/cli/utils.js @@ -116,6 +116,26 @@ async function getIpfs (argv) { } } +const asBoolean = (value) => { + if (value === false || value === true) { + return value + } + + if (value === undefined) { + return true + } + + return false +} + +const asOctal = (value) => { + return parseInt(value, 8) +} + +const asDateFromSeconds = (value) => { + return new Date(parseInt(value, 10) * 1000) +} + module.exports = { getIpfs, isDaemonOn, @@ -124,5 +144,8 @@ module.exports = { print, createProgressBar, rightpad, - ipfsPathHelp + ipfsPathHelp, + asBoolean, + asOctal, + asDateFromSeconds } diff --git a/packages/ipfs-mfs/src/core/chmod.js b/packages/ipfs/src/core/components/files/chmod.js similarity index 100% rename from packages/ipfs-mfs/src/core/chmod.js rename to packages/ipfs/src/core/components/files/chmod.js diff --git a/packages/ipfs-mfs/src/core/cp.js b/packages/ipfs/src/core/components/files/cp.js similarity index 100% rename from packages/ipfs-mfs/src/core/cp.js rename to packages/ipfs/src/core/components/files/cp.js diff --git a/packages/ipfs-mfs/src/core/flush.js b/packages/ipfs/src/core/components/files/flush.js similarity index 68% rename from packages/ipfs-mfs/src/core/flush.js rename to packages/ipfs/src/core/components/files/flush.js index a672f78678..9c4c130929 100644 --- a/packages/ipfs-mfs/src/core/flush.js +++ b/packages/ipfs/src/core/components/files/flush.js @@ -2,14 +2,11 @@ const applyDefaultOptions = require('./utils/apply-default-options') const stat = require('./stat') -const { - FILE_SEPARATOR -} = require('./utils/constants') const defaultOptions = {} module.exports = (context) => { - return async function mfsFlush (path = FILE_SEPARATOR, options = defaultOptions) { + return async function mfsFlush (path = '/', options = defaultOptions) { options = applyDefaultOptions(options, defaultOptions) const result = await stat(context)(path, options) diff --git a/packages/ipfs/src/core/components/files.js b/packages/ipfs/src/core/components/files/index.js similarity index 86% rename from packages/ipfs/src/core/components/files.js rename to packages/ipfs/src/core/components/files/index.js index 5f4e8bc6b9..c47ac8b1da 100644 --- a/packages/ipfs/src/core/components/files.js +++ b/packages/ipfs/src/core/components/files/index.js @@ -1,10 +1,83 @@ 'use strict' -const mfs = require('ipfs-mfs/core') +const createLock = require('./utils/create-lock') const isIpfs = require('is-ipfs') +// These operations are read-locked at the function level and will execute simultaneously +const readOperations = { + stat: require('./stat') +} + +// These operations are locked at the function level and will execute in series +const writeOperations = { + chmod: require('./chmod'), + cp: require('./cp'), + flush: require('./flush'), + mkdir: require('./mkdir'), + mv: require('./mv'), + rm: require('./rm'), + touch: require('./touch') +} + +// These operations are asynchronous and manage their own locking +const unwrappedOperations = { + write: require('./write'), + read: require('./read'), + ls: require('./ls') +} + +const wrap = ({ + options, mfs, operations, lock +}) => { + Object.keys(operations).forEach(key => { + mfs[key] = lock(operations[key](options)) + }) +} + +const defaultOptions = { + repoOwner: true, + ipld: null, + repo: null +} + +function createMfs (options) { + const { + repoOwner + } = Object.assign({}, defaultOptions || {}, options) + + options.repo = { + blocks: options.blocks, + datastore: options.datastore + } + + const lock = createLock(repoOwner) + + const readLock = (operation) => { + return lock.readLock(operation) + } + + const writeLock = (operation) => { + return lock.writeLock(operation) + } + + const mfs = {} + + wrap({ + options, mfs, operations: readOperations, lock: readLock + }) + wrap({ + options, mfs, operations: writeOperations, lock: writeLock + }) + + Object.keys(unwrappedOperations).forEach(key => { + mfs[key] = unwrappedOperations[key](options) + }) + + return mfs +} + module.exports = ({ ipld, blockService, repo, preload, options: constructorOptions }) => { - const methods = mfs({ + const methods = createMfs({ ipld, blocks: blockService, datastore: repo.root, diff --git a/packages/ipfs-mfs/src/core/ls.js b/packages/ipfs/src/core/components/files/ls.js similarity index 83% rename from packages/ipfs-mfs/src/core/ls.js rename to packages/ipfs/src/core/components/files/ls.js index f8380ce367..840d7bdd24 100644 --- a/packages/ipfs-mfs/src/core/ls.js +++ b/packages/ipfs/src/core/components/files/ls.js @@ -4,9 +4,8 @@ const exporter = require('ipfs-unixfs-exporter') const applyDefaultOptions = require('./utils/apply-default-options') const toMfsPath = require('./utils/to-mfs-path') const { - FILE_SEPARATOR, - FILE_TYPES -} = require('./utils/constants') + MFS_FILE_TYPES +} = require('../../utils') const defaultOptions = { @@ -20,7 +19,7 @@ const toOutput = (fsEntry) => { if (fsEntry.unixfs) { size = fsEntry.unixfs.fileSize() - type = FILE_TYPES[fsEntry.unixfs.type] + type = MFS_FILE_TYPES[fsEntry.unixfs.type] mode = fsEntry.unixfs.mode mtime = fsEntry.unixfs.mtime } @@ -29,22 +28,25 @@ const toOutput = (fsEntry) => { cid: fsEntry.cid, name: fsEntry.name, type, - size, - mode + size } if (mtime !== undefined) { output.mtime = mtime } + if (mode !== undefined) { + output.mode = mode + } + return output } module.exports = (context) => { - return async function * mfsLs (path = FILE_SEPARATOR, options = {}) { + return async function * mfsLs (path = '/', options = {}) { if (typeof path === 'object' && !(path instanceof String)) { options = path - path = FILE_SEPARATOR + path = '/' } options = applyDefaultOptions(options, defaultOptions) diff --git a/packages/ipfs-mfs/src/core/mkdir.js b/packages/ipfs/src/core/components/files/mkdir.js similarity index 91% rename from packages/ipfs-mfs/src/core/mkdir.js rename to packages/ipfs/src/core/components/files/mkdir.js index 06be27d295..daed25aa74 100644 --- a/packages/ipfs-mfs/src/core/mkdir.js +++ b/packages/ipfs/src/core/components/files/mkdir.js @@ -10,9 +10,6 @@ const updateTree = require('./utils/update-tree') const addLink = require('./utils/add-link') const withMfsRoot = require('./utils/with-mfs-root') const applyDefaultOptions = require('./utils/apply-default-options') -const { - FILE_SEPARATOR -} = require('./utils/constants') const defaultOptions = { parents: false, @@ -34,16 +31,16 @@ module.exports = (context) => { path = path.trim() - if (path === FILE_SEPARATOR) { + if (path === '/') { if (options.parents) { return } - throw errCode(new Error(`cannot create directory '${FILE_SEPARATOR}': Already exists`), 'ERR_INVALID_PATH') + throw errCode(new Error('cannot create directory \'/\': Already exists'), 'ERR_INVALID_PATH') } - if (path.substring(0, 1) !== FILE_SEPARATOR) { - throw errCode(new Error('paths must start with a leading /'), 'ERR_INVALID_PATH') + if (path.substring(0, 1) !== '/') { + throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') } log(`Creating ${path}`) diff --git a/packages/ipfs-mfs/src/core/mv.js b/packages/ipfs/src/core/components/files/mv.js similarity index 100% rename from packages/ipfs-mfs/src/core/mv.js rename to packages/ipfs/src/core/components/files/mv.js diff --git a/packages/ipfs-mfs/src/core/read.js b/packages/ipfs/src/core/components/files/read.js similarity index 100% rename from packages/ipfs-mfs/src/core/read.js rename to packages/ipfs/src/core/components/files/read.js diff --git a/packages/ipfs-mfs/src/core/rm.js b/packages/ipfs/src/core/components/files/rm.js similarity index 95% rename from packages/ipfs-mfs/src/core/rm.js rename to packages/ipfs/src/core/components/files/rm.js index 198d7b20de..2e1603e4c3 100644 --- a/packages/ipfs-mfs/src/core/rm.js +++ b/packages/ipfs/src/core/components/files/rm.js @@ -8,9 +8,6 @@ const removeLink = require('./utils/remove-link') const toMfsPath = require('./utils/to-mfs-path') const toTrail = require('./utils/to-trail') const applyDefaultOptions = require('./utils/apply-default-options') -const { - FILE_SEPARATOR -} = require('./utils/constants') const defaultOptions = { recursive: false, @@ -33,7 +30,7 @@ module.exports = (context) => { } sources.forEach(source => { - if (source.path === FILE_SEPARATOR) { + if (source.path === '/') { throw errCode(new Error('Cannot delete root'), 'ERR_INVALID_PARAMS') } }) diff --git a/packages/ipfs-mfs/src/core/stat.js b/packages/ipfs/src/core/components/files/stat.js similarity index 93% rename from packages/ipfs-mfs/src/core/stat.js rename to packages/ipfs/src/core/components/files/stat.js index 2228824d1d..c772d58dc3 100644 --- a/packages/ipfs-mfs/src/core/stat.js +++ b/packages/ipfs/src/core/components/files/stat.js @@ -73,7 +73,14 @@ const statters = { if (file.unixfs) { output.size = file.unixfs.fileSize() - output.type = file.unixfs.type + + // for go-ipfs compatibility + if (file.unixfs.type === 'hamt-sharded-directory') { + output.type = 'directory' + } else { + output.type = file.unixfs.type + } + output.mode = file.unixfs.mode if (file.unixfs.isDirectory()) { diff --git a/packages/ipfs-mfs/src/core/touch.js b/packages/ipfs/src/core/components/files/touch.js similarity index 100% rename from packages/ipfs-mfs/src/core/touch.js rename to packages/ipfs/src/core/components/files/touch.js diff --git a/packages/ipfs-mfs/src/core/utils/add-link.js b/packages/ipfs/src/core/components/files/utils/add-link.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/add-link.js rename to packages/ipfs/src/core/components/files/utils/add-link.js diff --git a/packages/ipfs-mfs/src/core/utils/apply-default-options.js b/packages/ipfs/src/core/components/files/utils/apply-default-options.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/apply-default-options.js rename to packages/ipfs/src/core/components/files/utils/apply-default-options.js diff --git a/packages/ipfs-mfs/src/core/utils/create-lock.js b/packages/ipfs/src/core/components/files/utils/create-lock.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/create-lock.js rename to packages/ipfs/src/core/components/files/utils/create-lock.js diff --git a/packages/ipfs-mfs/src/core/utils/create-node.js b/packages/ipfs/src/core/components/files/utils/create-node.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/create-node.js rename to packages/ipfs/src/core/components/files/utils/create-node.js diff --git a/packages/ipfs-mfs/src/core/utils/hamt-utils.js b/packages/ipfs/src/core/components/files/utils/hamt-utils.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/hamt-utils.js rename to packages/ipfs/src/core/components/files/utils/hamt-utils.js diff --git a/packages/ipfs-mfs/src/core/utils/remove-link.js b/packages/ipfs/src/core/components/files/utils/remove-link.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/remove-link.js rename to packages/ipfs/src/core/components/files/utils/remove-link.js diff --git a/packages/ipfs-mfs/src/core/utils/to-async-iterator.js b/packages/ipfs/src/core/components/files/utils/to-async-iterator.js similarity index 83% rename from packages/ipfs-mfs/src/core/utils/to-async-iterator.js rename to packages/ipfs/src/core/components/files/utils/to-async-iterator.js index dac3a8d508..930e5b2450 100644 --- a/packages/ipfs-mfs/src/core/utils/to-async-iterator.js +++ b/packages/ipfs/src/core/components/files/utils/to-async-iterator.js @@ -1,22 +1,20 @@ 'use strict' const errCode = require('err-code') -const fs = require('fs') const log = require('debug')('ipfs:mfs:utils:to-async-iterator') const { - MAX_CHUNK_SIZE -} = require('./constants') + MFS_MAX_CHUNK_SIZE +} = require('../../../utils') const toAsyncIterator = (content) => { if (!content) { - throw errCode(new Error('paths must start with a leading /'), 'ERR_INVALID_PATH') + throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') } if (typeof content === 'string' || content instanceof String) { - // Paths, node only - log('Content was a path') + log('Content was a string') - return fs.createReadStream(content) + content = Buffer.from(content) } if (content.length) { @@ -54,8 +52,8 @@ const toAsyncIterator = (content) => { } return new Promise((resolve, reject) => { - const chunk = content.slice(index, MAX_CHUNK_SIZE) - index += MAX_CHUNK_SIZE + const chunk = content.slice(index, MFS_MAX_CHUNK_SIZE) + index += MFS_MAX_CHUNK_SIZE const reader = new global.FileReader() diff --git a/packages/ipfs-mfs/src/core/utils/to-mfs-path.js b/packages/ipfs/src/core/components/files/utils/to-mfs-path.js similarity index 69% rename from packages/ipfs-mfs/src/core/utils/to-mfs-path.js rename to packages/ipfs/src/core/components/files/utils/to-mfs-path.js index 6b176760d8..52d1f1e952 100644 --- a/packages/ipfs-mfs/src/core/utils/to-mfs-path.js +++ b/packages/ipfs/src/core/components/files/utils/to-mfs-path.js @@ -1,8 +1,5 @@ 'use strict' -const { - FILE_SEPARATOR -} = require('./constants') const loadMfsRoot = require('./with-mfs-root') const toPathComponents = require('./to-path-components') const exporter = require('ipfs-unixfs-exporter') @@ -27,12 +24,12 @@ const toMfsPath = async (context, path) => { throw errCode(new Error('paths must not be empty'), 'ERR_NO_PATH') } - if (path.substring(0, 1) !== FILE_SEPARATOR) { - throw errCode(new Error(`paths must start with a leading ${FILE_SEPARATOR}`), 'ERR_INVALID_PATH') + if (path.substring(0, 1) !== '/') { + throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') } - if (path.substring(path.length - FILE_SEPARATOR.length) === FILE_SEPARATOR) { - path = path.substring(0, path.length - FILE_SEPARATOR.length) + if (path.substring(path.length - 1) === '/') { + path = path.substring(0, path.length - 1) } const pathComponents = toPathComponents(path) @@ -42,25 +39,25 @@ const toMfsPath = async (context, path) => { let mfsDirectory if (pathComponents.length === 2) { - mfsDirectory = `${FILE_SEPARATOR}${pathComponents.join(FILE_SEPARATOR)}` + mfsDirectory = `/${pathComponents.join('/')}` } else { - mfsDirectory = `${FILE_SEPARATOR}${pathComponents.slice(0, pathComponents.length - 1).join(FILE_SEPARATOR)}` + mfsDirectory = `/${pathComponents.slice(0, pathComponents.length - 1).join('/')}` } return { type: 'ipfs', depth: pathComponents.length - 2, - mfsPath: `${FILE_SEPARATOR}${pathComponents.join(FILE_SEPARATOR)}`, + mfsPath: `/${pathComponents.join('/')}`, mfsDirectory, parts: pathComponents, - path: `${FILE_SEPARATOR}${pathComponents.join(FILE_SEPARATOR)}`, + path: `/${pathComponents.join('/')}`, name: pathComponents[pathComponents.length - 1] } } - const mfsPath = `/${IPFS_PREFIX}/${root}${pathComponents.length ? '/' + pathComponents.join(FILE_SEPARATOR) : ''}` - const mfsDirectory = `/${IPFS_PREFIX}/${root}/${pathComponents.slice(0, pathComponents.length - 1).join(FILE_SEPARATOR)}` + const mfsPath = `/${IPFS_PREFIX}/${root}${pathComponents.length ? '/' + pathComponents.join('/') : ''}` + const mfsDirectory = `/${IPFS_PREFIX}/${root}/${pathComponents.slice(0, pathComponents.length - 1).join('/')}` return { type: 'mfs', @@ -69,7 +66,7 @@ const toMfsPath = async (context, path) => { mfsDirectory, mfsPath, parts: pathComponents, - path: `${FILE_SEPARATOR}${pathComponents.join(FILE_SEPARATOR)}`, + path: `/${pathComponents.join('/')}`, name: pathComponents[pathComponents.length - 1] } }) diff --git a/packages/ipfs-mfs/src/core/utils/to-path-components.js b/packages/ipfs/src/core/components/files/utils/to-path-components.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/to-path-components.js rename to packages/ipfs/src/core/components/files/utils/to-path-components.js diff --git a/packages/ipfs-mfs/src/core/utils/to-sources-and-destination.js b/packages/ipfs/src/core/components/files/utils/to-sources-and-destination.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/to-sources-and-destination.js rename to packages/ipfs/src/core/components/files/utils/to-sources-and-destination.js diff --git a/packages/ipfs-mfs/src/core/utils/to-sources.js b/packages/ipfs/src/core/components/files/utils/to-sources.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/to-sources.js rename to packages/ipfs/src/core/components/files/utils/to-sources.js diff --git a/packages/ipfs-mfs/src/core/utils/to-trail.js b/packages/ipfs/src/core/components/files/utils/to-trail.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/to-trail.js rename to packages/ipfs/src/core/components/files/utils/to-trail.js diff --git a/packages/ipfs-mfs/src/core/utils/update-mfs-root.js b/packages/ipfs/src/core/components/files/utils/update-mfs-root.js similarity index 90% rename from packages/ipfs-mfs/src/core/utils/update-mfs-root.js rename to packages/ipfs/src/core/components/files/utils/update-mfs-root.js index f29b600769..b4ab042d34 100644 --- a/packages/ipfs-mfs/src/core/utils/update-mfs-root.js +++ b/packages/ipfs/src/core/components/files/utils/update-mfs-root.js @@ -3,7 +3,7 @@ const log = require('debug')('ipfs:mfs:utils:update-mfs-root') const { MFS_ROOT_KEY -} = require('./constants') +} = require('../../../utils') const updateMfsRoot = async (context, cid) => { log(`New MFS root will be ${cid}`) diff --git a/packages/ipfs-mfs/src/core/utils/update-tree.js b/packages/ipfs/src/core/components/files/utils/update-tree.js similarity index 100% rename from packages/ipfs-mfs/src/core/utils/update-tree.js rename to packages/ipfs/src/core/components/files/utils/update-tree.js diff --git a/packages/ipfs-mfs/src/core/utils/with-mfs-root.js b/packages/ipfs/src/core/components/files/utils/with-mfs-root.js similarity index 97% rename from packages/ipfs-mfs/src/core/utils/with-mfs-root.js rename to packages/ipfs/src/core/components/files/utils/with-mfs-root.js index 5cf6740e64..5694c82695 100644 --- a/packages/ipfs-mfs/src/core/utils/with-mfs-root.js +++ b/packages/ipfs/src/core/components/files/utils/with-mfs-root.js @@ -11,7 +11,7 @@ const mh = require('multihashes') const { MFS_ROOT_KEY -} = require('./constants') +} = require('../../../utils') const loadMfsRoot = async (context) => { // Open the repo if it's been closed diff --git a/packages/ipfs-mfs/src/core/write.js b/packages/ipfs/src/core/components/files/write.js similarity index 98% rename from packages/ipfs-mfs/src/core/write.js rename to packages/ipfs/src/core/components/files/write.js index 5a10e3683a..0aab61b8fd 100644 --- a/packages/ipfs-mfs/src/core/write.js +++ b/packages/ipfs/src/core/components/files/write.js @@ -15,8 +15,8 @@ const updateTree = require('./utils/update-tree') const updateMfsRoot = require('./utils/update-mfs-root') const errCode = require('err-code') const { - MAX_CHUNK_SIZE -} = require('./utils/constants') + MFS_MAX_CHUNK_SIZE +} = require('../../utils') const last = require('it-last') const defaultOptions = { @@ -240,7 +240,7 @@ const limitAsyncStreamBytes = (stream, limit) => { } } -const asyncZeroes = (count, chunkSize = MAX_CHUNK_SIZE) => { +const asyncZeroes = (count, chunkSize = MFS_MAX_CHUNK_SIZE) => { const buf = Buffer.alloc(chunkSize, 0) const stream = { diff --git a/packages/ipfs/src/core/components/repo/gc.js b/packages/ipfs/src/core/components/repo/gc.js index 3f19789f37..7807cb1176 100644 --- a/packages/ipfs/src/core/components/repo/gc.js +++ b/packages/ipfs/src/core/components/repo/gc.js @@ -3,7 +3,7 @@ const CID = require('cids') const { cidToString } = require('../../../utils/cid') const log = require('debug')('ipfs:repo:gc') -const { MFS_ROOT_KEY } = require('ipfs-mfs/src/core/utils/constants') +const { MFS_ROOT_KEY } = require('../../utils') const Repo = require('ipfs-repo') const { Errors } = require('interface-datastore') const ERR_NOT_FOUND = Errors.notFoundError().code diff --git a/packages/ipfs/src/core/runtime/dns-browser.js b/packages/ipfs/src/core/runtime/dns-browser.js index 9715f7577b..61f068aeee 100644 --- a/packages/ipfs/src/core/runtime/dns-browser.js +++ b/packages/ipfs/src/core/runtime/dns-browser.js @@ -3,7 +3,7 @@ const TLRU = require('../../utils/tlru') const { default: PQueue } = require('p-queue') -const { default: ky } = require('ky-universal') +const HTTP = require('ipfs-utils/src/http') // Avoid sending multiple queries for the same hostname by caching results const cache = new TLRU(1000) @@ -16,21 +16,6 @@ const ttl = 60 * 1000 // we don't want preload calls to exhaust the limit (~6) const httpQueue = new PQueue({ concurrency: 4 }) -// Delegated HTTP resolver sending DNSLink queries to ipfs.io -// TODO: replace hardcoded host with configurable DNS over HTTPS: https://github.com/ipfs/js-ipfs/issues/2212 -const api = ky.create({ - prefixUrl: 'https://ipfs.io/api/v0/', - hooks: { - afterResponse: [ - async (input, options, response) => { - const query = new URL(response.url).search.slice(1) - const json = await response.json() - cache.set(query, json, ttl) - } - ] - } -}) - const ipfsPath = (response) => { if (response.Path) return response.Path throw new Error(response.Message) @@ -51,7 +36,16 @@ module.exports = async (fqdn, opts) => { // eslint-disable-line require-await } // fallback to delegated DNS resolver - const response = await httpQueue.add(() => api.get('dns', { searchParams }).json()) + const response = await httpQueue.add(async () => { + // Delegated HTTP resolver sending DNSLink queries to ipfs.io + // TODO: replace hardcoded host with configurable DNS over HTTPS: https://github.com/ipfs/js-ipfs/issues/2212 + const res = await HTTP.get('https://ipfs.io/api/v0/dns', { searchParams }) + const query = new URL(res.url).search.slice(1) + const json = await res.json() + cache.set(query, json, ttl) + + return json + }) return ipfsPath(response) } diff --git a/packages/ipfs/src/core/runtime/ipld-nodejs.js b/packages/ipfs/src/core/runtime/ipld-nodejs.js index 2431973c8b..f6acc665f1 100644 --- a/packages/ipfs/src/core/runtime/ipld-nodejs.js +++ b/packages/ipfs/src/core/runtime/ipld-nodejs.js @@ -49,7 +49,7 @@ module.exports = (blockService, options, log) => { if (IpldFormats[codec]) { return IpldFormats[codec] } else { - throw new Error(`Missing IPLD format "${codec}"`) + throw new Error(`Missing IPLD format "${multicodec.getName(codec)}"`) } } }, options) diff --git a/packages/ipfs/src/core/runtime/preload-browser.js b/packages/ipfs/src/core/runtime/preload-browser.js index 3b83e28fc3..7544a7b004 100644 --- a/packages/ipfs/src/core/runtime/preload-browser.js +++ b/packages/ipfs/src/core/runtime/preload-browser.js @@ -2,7 +2,7 @@ 'use strict' const { default: PQueue } = require('p-queue') -const { default: ky } = require('ky-universal') +const HTTP = require('ipfs-utils/src/http') const debug = require('debug') const log = debug('ipfs:preload') @@ -17,7 +17,7 @@ module.exports = function preload (url, options) { options = options || {} return httpQueue.add(async () => { - const res = await ky.get(url, { signal: options.signal }) + const res = await HTTP.get(url, { signal: options.signal }) const reader = res.body.getReader() try { diff --git a/packages/ipfs/src/core/runtime/preload-nodejs.js b/packages/ipfs/src/core/runtime/preload-nodejs.js index 1c0d1b40df..549f0379e6 100644 --- a/packages/ipfs/src/core/runtime/preload-nodejs.js +++ b/packages/ipfs/src/core/runtime/preload-nodejs.js @@ -1,6 +1,6 @@ 'use strict' -const { default: ky } = require('ky-universal') +const HTTP = require('ipfs-utils/src/http') const debug = require('debug') const log = debug('ipfs:preload') @@ -10,7 +10,7 @@ module.exports = async function preload (url, options) { log(url) options = options || {} - const res = await ky.get(url, { signal: options.signal }) + const res = await HTTP.get(url, { signal: options.signal }) for await (const _ of res.body) { // eslint-disable-line no-unused-vars // Read to completion but do not cache diff --git a/packages/ipfs/src/core/utils.js b/packages/ipfs/src/core/utils.js index aa1ff7a9dd..725100749c 100644 --- a/packages/ipfs/src/core/utils.js +++ b/packages/ipfs/src/core/utils.js @@ -5,11 +5,21 @@ const CID = require('cids') const TimeoutController = require('timeout-abort-controller') const anySignal = require('any-signal') const parseDuration = require('parse-duration') +const Key = require('interface-datastore').Key const { TimeoutError } = require('./errors') const ERR_BAD_PATH = 'ERR_BAD_PATH' exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' +exports.MFS_FILE_TYPES = { + file: 0, + directory: 1, + 'hamt-sharded-directory': 1 +} +exports.MFS_ROOT_KEY = new Key('/local/filesroot') +exports.MFS_MAX_CHUNK_SIZE = 262144 +exports.MFS_MAX_LINKS = 174 + /** * Break an ipfs-path down into it's hash and an array of links. * diff --git a/packages/ipfs/src/http/api/resources/block.js b/packages/ipfs/src/http/api/resources/block.js index 9eeccdfb0a..69d2cbe96d 100644 --- a/packages/ipfs/src/http/api/resources/block.js +++ b/packages/ipfs/src/http/api/resources/block.js @@ -3,7 +3,7 @@ const CID = require('cids') const multihash = require('multihashes') const codecs = require('multicodec/src/base-table.json') -const multipart = require('ipfs-multipart') +const multipart = require('../../utils/multipart-request-parser') const Joi = require('@hapi/joi') const multibase = require('multibase') const Boom = require('@hapi/boom') diff --git a/packages/ipfs/src/http/api/resources/config.js b/packages/ipfs/src/http/api/resources/config.js index 604506671b..30580e9793 100644 --- a/packages/ipfs/src/http/api/resources/config.js +++ b/packages/ipfs/src/http/api/resources/config.js @@ -5,7 +5,7 @@ const get = require('dlv') const set = require('just-safe-set') const log = debug('ipfs:http-api:config') log.error = debug('ipfs:http-api:config:error') -const multipart = require('ipfs-multipart') +const multipart = require('../../utils/multipart-request-parser') const Boom = require('@hapi/boom') const Joi = require('@hapi/joi') const { profiles } = require('../../../core/components/config') diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index 8b6ff198ce..baa44668f6 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -1,7 +1,7 @@ 'use strict' const CID = require('cids') -const multipart = require('ipfs-multipart') +const multipart = require('../../utils/multipart-request-parser') const mh = require('multihashes') const Joi = require('@hapi/joi') const multibase = require('multibase') diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 92afdacf3f..1645f40983 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -1,6 +1,6 @@ 'use strict' -const multipart = require('ipfs-multipart') +const multipart = require('../../utils/multipart-request-parser') const debug = require('debug') const tar = require('it-tar') const log = debug('ipfs:http-api:files') @@ -123,12 +123,12 @@ exports.add = { 'wrap-with-directory': Joi.boolean(), 'file-import-concurrency': Joi.number().integer().min(0).default(50), 'block-write-concurrency': Joi.number().integer().min(0).default(10), + 'shard-split-threshold': Joi.number().integer().min(0).default(1000), chunker: Joi.string(), trickle: Joi.boolean(), preload: Joi.boolean().default(true), progress: Joi.boolean(), 'stream-channels': Joi.boolean().default(true) - }) // TODO: Necessary until validate "recursive", "stream-channels" etc. .options({ allowUnknown: true }) @@ -190,6 +190,7 @@ exports.add = { chunker: request.query.chunker, trickle: request.query.trickle, preload: request.query.preload, + shardSplitThreshold: request.query['shard-split-threshold'], // this has to be hardcoded to 1 because we can only read one file // at a time from a http request and we have to consume it completely @@ -222,7 +223,9 @@ exports.add = { } }) .catch(err => { - if (!filesParsed) { + log.error(err) + + if (!filesParsed && output.writable) { output.write(' ') } @@ -233,7 +236,7 @@ exports.add = { }) }) }) - .then(() => { + .finally(() => { output.end() }) diff --git a/packages/ipfs-mfs/src/http/chmod.js b/packages/ipfs/src/http/api/resources/files/chmod.js similarity index 71% rename from packages/ipfs-mfs/src/http/chmod.js rename to packages/ipfs/src/http/api/resources/files/chmod.js index 11b3511cb8..41868ffd54 100644 --- a/packages/ipfs-mfs/src/http/chmod.js +++ b/packages/ipfs/src/http/api/resources/files/chmod.js @@ -3,8 +3,6 @@ const Joi = require('@hapi/joi') const mfsChmod = { - method: 'POST', - path: '/api/v0/files/chmod', async handler (request, h) { const { ipfs @@ -41,6 +39,18 @@ const mfsChmod = { hashAlg: Joi.string().default('sha2-256'), shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) + .rename('hash-alg', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('hash', 'hashAlg', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs-mfs/src/http/cp.js b/packages/ipfs/src/http/api/resources/files/cp.js similarity index 55% rename from packages/ipfs-mfs/src/http/cp.js rename to packages/ipfs/src/http/api/resources/files/cp.js index c27289857e..fbd86b86de 100644 --- a/packages/ipfs-mfs/src/http/cp.js +++ b/packages/ipfs/src/http/api/resources/files/cp.js @@ -3,8 +3,6 @@ const Joi = require('@hapi/joi') const mfsCp = { - method: 'POST', - path: '/api/v0/files/cp', async handler (request, h) { const { ipfs @@ -14,6 +12,7 @@ const mfsCp = { parents, flush, hashAlg, + cidVersion, shardSplitThreshold } = request.query @@ -21,6 +20,7 @@ const mfsCp = { parents, flush, hashAlg, + cidVersion, shardSplitThreshold }) @@ -35,12 +35,32 @@ const mfsCp = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.array().items(Joi.string()).min(2), + arg: Joi.array().required().items(Joi.string()).min(2), parents: Joi.boolean().default(false), flush: Joi.boolean().default(true), hashAlg: Joi.string().default('sha2-256'), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0), shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) + .rename('hash-alg', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('hash', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('cid-version', 'cidVersion', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs-mfs/src/http/flush.js b/packages/ipfs/src/http/api/resources/files/flush.js similarity index 77% rename from packages/ipfs-mfs/src/http/flush.js rename to packages/ipfs/src/http/api/resources/files/flush.js index dda494dfac..d178359c9a 100644 --- a/packages/ipfs-mfs/src/http/flush.js +++ b/packages/ipfs/src/http/api/resources/files/flush.js @@ -2,13 +2,7 @@ const Joi = require('@hapi/joi') -const { - FILE_SEPARATOR -} = require('../core/utils/constants') - const mfsFlush = { - method: 'POST', - path: '/api/v0/files/flush', async handler (request, h) { const { ipfs @@ -18,7 +12,7 @@ const mfsFlush = { cidBase } = request.query - let cid = await ipfs.files.flush(arg || FILE_SEPARATOR, {}) + let cid = await ipfs.files.flush(arg || '/', {}) if (cidBase && cidBase !== 'base58btc' && cid.version === 0) { cid = cid.toV1() diff --git a/packages/ipfs-mfs/src/http/index.js b/packages/ipfs/src/http/api/resources/files/index.js similarity index 95% rename from packages/ipfs-mfs/src/http/index.js rename to packages/ipfs/src/http/api/resources/files/index.js index 96cb1ad7af..5fa25ed83e 100644 --- a/packages/ipfs-mfs/src/http/index.js +++ b/packages/ipfs/src/http/api/resources/files/index.js @@ -12,7 +12,7 @@ const stat = require('./stat') const touch = require('./touch') const write = require('./write') -module.exports = [ +module.exports = { chmod, cp, flush, @@ -24,4 +24,4 @@ module.exports = [ stat, touch, write -] +} diff --git a/packages/ipfs-mfs/src/http/ls.js b/packages/ipfs/src/http/api/resources/files/ls.js similarity index 57% rename from packages/ipfs-mfs/src/http/ls.js rename to packages/ipfs/src/http/api/resources/files/ls.js index 3da925728d..b05907766e 100644 --- a/packages/ipfs-mfs/src/http/ls.js +++ b/packages/ipfs/src/http/api/resources/files/ls.js @@ -1,11 +1,10 @@ 'use strict' const Joi = require('@hapi/joi') -const { - PassThrough -} = require('stream') -const toStream = require('it-to-stream') const all = require('it-all') +const map = require('it-map') +const pipe = require('it-pipe') +const streamResponse = require('../../../utils/stream-response') const mapEntry = (entry, options) => { options = options || {} @@ -14,8 +13,7 @@ const mapEntry = (entry, options) => { Name: entry.name, Type: options.long ? entry.type : 0, Size: options.long ? entry.size || 0 : 0, - Hash: entry.cid.toString(options.cidBase), - Mode: entry.mode.toString(8).padStart(4, '0') + Hash: entry.cid.toString(options.cidBase) } if (entry.mtime) { @@ -26,12 +24,14 @@ const mapEntry = (entry, options) => { } } + if (entry.mode != null) { + output.Mode = entry.mode.toString(8).padStart(4, '0') + } + return output } const mfsLs = { - method: 'POST', - path: '/api/v0/files/ls', async handler (request, h) { const { ipfs @@ -44,28 +44,11 @@ const mfsLs = { } = request.query if (stream) { - const responseStream = await new Promise((resolve, reject) => { - const readableStream = toStream.readable(ipfs.files.ls(arg), { objectMode: true }) - - const passThrough = new PassThrough() - - readableStream.on('data', (entry) => { - resolve(passThrough) - passThrough.write(JSON.stringify(mapEntry(entry, { cidBase, long })) + '\n') - }) - - readableStream.once('end', (entry) => { - resolve(passThrough) - passThrough.end(entry ? JSON.stringify(mapEntry(entry, { cidBase, long })) + '\n' : undefined) - }) - - readableStream.once('error', (err) => { - passThrough.end() - reject(err) - }) - }) - - return h.response(responseStream).header('X-Stream-Output', '1') + return streamResponse(request, h, () => pipe( + ipfs.files.ls(arg), + source => map(source, (entry) => mapEntry(entry, { cidBase, long })), + source => map(source, (entry) => JSON.stringify(entry) + '\n') + )) } const files = await all(ipfs.files.ls(arg)) diff --git a/packages/ipfs-mfs/src/http/mkdir.js b/packages/ipfs/src/http/api/resources/files/mkdir.js similarity index 66% rename from packages/ipfs-mfs/src/http/mkdir.js rename to packages/ipfs/src/http/api/resources/files/mkdir.js index 5c4900fbe0..9fa59f0d73 100644 --- a/packages/ipfs-mfs/src/http/mkdir.js +++ b/packages/ipfs/src/http/api/resources/files/mkdir.js @@ -4,8 +4,6 @@ const Joi = require('@hapi/joi') const parseMtime = require('./utils/parse-mtime') const mfsMkdir = { - method: 'POST', - path: '/api/v0/files/mkdir', async handler (request, h) { const { ipfs @@ -41,7 +39,7 @@ const mfsMkdir = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.string().required(), + arg: Joi.string().trim().min(1).required().error(new Error('no path given')), mode: Joi.string(), mtime: Joi.number().integer(), mtimeNsecs: Joi.number().integer().min(0), @@ -58,6 +56,26 @@ const mfsMkdir = { override: true, ignoreUndefined: true }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) + .rename('hash-alg', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('hash', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('cid-version', 'cidVersion', { + override: true, + ignoreUndefined: true + }) + .rename('mtime-nsecs', 'mtimeNsecs', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs-mfs/src/http/mv.js b/packages/ipfs/src/http/api/resources/files/mv.js similarity index 66% rename from packages/ipfs-mfs/src/http/mv.js rename to packages/ipfs/src/http/api/resources/files/mv.js index 1a6f8e62c6..4d6adb60c7 100644 --- a/packages/ipfs-mfs/src/http/mv.js +++ b/packages/ipfs/src/http/api/resources/files/mv.js @@ -3,8 +3,6 @@ const Joi = require('@hapi/joi') const mfsMv = { - method: 'POST', - path: '/api/v0/files/mv', async handler (request, h) { const { ipfs @@ -39,7 +37,7 @@ const mfsMv = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.array().items(Joi.string()).min(2), + arg: Joi.array().required().items(Joi.string()).min(2), recursive: Joi.boolean().default(false), parents: Joi.boolean().default(false), hashAlg: Joi.string().default('sha2-256'), @@ -50,6 +48,22 @@ const mfsMv = { flush: Joi.boolean().default(true), shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) + .rename('hash-alg', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('hash', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('cid-version', 'cidVersion', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs-mfs/src/http/read.js b/packages/ipfs/src/http/api/resources/files/read.js similarity index 55% rename from packages/ipfs-mfs/src/http/read.js rename to packages/ipfs/src/http/api/resources/files/read.js index c2e40d0452..7998e62dcc 100644 --- a/packages/ipfs-mfs/src/http/read.js +++ b/packages/ipfs/src/http/api/resources/files/read.js @@ -1,15 +1,10 @@ 'use strict' const Joi = require('@hapi/joi') -const { - PassThrough -} = require('stream') -const toStream = require('it-to-stream') +const streamResponse = require('../../../utils/stream-response') const mfsRead = { - method: 'POST', - path: '/api/v0/files/read', - async handler (request, h) { + handler (request, h) { const { ipfs } = request.server.app @@ -19,27 +14,10 @@ const mfsRead = { length } = request.query - const responseStream = await new Promise((resolve, reject) => { - const stream = toStream.readable(ipfs.files.read(arg, { - offset, - length - })) - - stream.once('data', (chunk) => { - const passThrough = new PassThrough() - - resolve(passThrough) - - passThrough.write(chunk) - stream.pipe(passThrough) - }) - - stream.once('error', (error) => { - reject(error) - }) - }) - - return h.response(responseStream).header('X-Stream-Output', '1') + return streamResponse(request, h, () => ipfs.files.read(arg, { + offset, + length + })) }, options: { validate: { diff --git a/packages/ipfs-mfs/src/http/rm.js b/packages/ipfs/src/http/api/resources/files/rm.js similarity index 50% rename from packages/ipfs-mfs/src/http/rm.js rename to packages/ipfs/src/http/api/resources/files/rm.js index 7690cd2502..b9639304b0 100644 --- a/packages/ipfs-mfs/src/http/rm.js +++ b/packages/ipfs/src/http/api/resources/files/rm.js @@ -3,20 +3,23 @@ const Joi = require('@hapi/joi') const mfsRm = { - method: 'POST', - path: '/api/v0/files/rm', + async handler (request, h) { const { ipfs } = request.server.app const { arg, - recursive + recursive, + shardSplitThreshold } = request.query - await ipfs.files.rm(arg, { - recursive - }) + const args = [...arg, { + recursive, + shardSplitThreshold + }] + + await ipfs.files.rm.apply(null, args) return h.response() }, @@ -27,13 +30,18 @@ const mfsRm = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.string().required(), - recursive: Joi.boolean().default(false) + arg: Joi.array().required().items(Joi.string()).min(1).single(), + recursive: Joi.boolean().default(false), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) .rename('r', 'recursive', { override: true, ignoreUndefined: true }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs-mfs/src/http/stat.js b/packages/ipfs/src/http/api/resources/files/stat.js similarity index 73% rename from packages/ipfs-mfs/src/http/stat.js rename to packages/ipfs/src/http/api/resources/files/stat.js index 08aee3bdb9..9875fde248 100644 --- a/packages/ipfs-mfs/src/http/stat.js +++ b/packages/ipfs/src/http/api/resources/files/stat.js @@ -3,8 +3,6 @@ const Joi = require('@hapi/joi') const mfsStat = { - method: 'POST', - path: '/api/v0/files/stat', async handler (request, h) { const { ipfs @@ -23,7 +21,7 @@ const mfsStat = { withLocal }) - return h.response({ + const output = { Type: stats.type, Blocks: stats.blocks, Size: stats.size, @@ -31,11 +29,22 @@ const mfsStat = { CumulativeSize: stats.cumulativeSize, WithLocality: stats.withLocality, Local: stats.local, - SizeLocal: stats.sizeLocal, - Mtime: stats.mtime ? stats.mtime.secs : undefined, - MtimeNsecs: stats.mtime ? stats.mtime.nsecs : undefined, - Mode: stats.mode.toString(8).padStart(4, '0') - }) + SizeLocal: stats.sizeLocal + } + + if (stats.mtime) { + output.Mtime = stats.mtime.secs + + if (stats.mtime.nsecs) { + output.MtimeNsecs = stats.mtime.nsecs + } + } + + if (stats.mode != null) { + output.Mode = stats.mode.toString(8).padStart(4, '0') + } + + return h.response(output) }, options: { validate: { diff --git a/packages/ipfs-mfs/src/http/touch.js b/packages/ipfs/src/http/api/resources/files/touch.js similarity index 65% rename from packages/ipfs-mfs/src/http/touch.js rename to packages/ipfs/src/http/api/resources/files/touch.js index 1154385033..5f9ba067db 100644 --- a/packages/ipfs-mfs/src/http/touch.js +++ b/packages/ipfs/src/http/api/resources/files/touch.js @@ -4,8 +4,6 @@ const Joi = require('@hapi/joi') const parseMtime = require('./utils/parse-mtime') const mfsTouch = { - method: 'POST', - path: '/api/v0/files/touch', async handler (request, h) { const { ipfs @@ -48,6 +46,26 @@ const mfsTouch = { flush: Joi.boolean().default(true), shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) + .rename('mtime-nsecs', 'mtimeNsecs', { + override: true, + ignoreUndefined: true + }) + .rename('hash-alg', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('hash', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('cid-version', 'cidVersion', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs-mfs/src/http/utils/parse-mtime.js b/packages/ipfs/src/http/api/resources/files/utils/parse-mtime.js similarity index 100% rename from packages/ipfs-mfs/src/http/utils/parse-mtime.js rename to packages/ipfs/src/http/api/resources/files/utils/parse-mtime.js diff --git a/packages/ipfs-mfs/src/http/write.js b/packages/ipfs/src/http/api/resources/files/write.js similarity index 68% rename from packages/ipfs-mfs/src/http/write.js rename to packages/ipfs/src/http/api/resources/files/write.js index 542955f19a..50763655d2 100644 --- a/packages/ipfs-mfs/src/http/write.js +++ b/packages/ipfs/src/http/api/resources/files/write.js @@ -1,12 +1,11 @@ 'use strict' const Joi = require('@hapi/joi') -const multipart = require('ipfs-multipart') +const multipart = require('../../../utils/multipart-request-parser') const Boom = require('@hapi/boom') +const drain = require('it-drain') const mfsWrite = { - method: 'POST', - path: '/api/v0/files/write', async handler (request, h) { const { ipfs @@ -55,6 +54,10 @@ const mfsWrite = { mode: entry.mode, mtime: entry.mtime }) + + // if we didn't read the whole body, read it and discard the remainder + // otherwise the request will never end + await drain(entry.content) } } @@ -63,7 +66,8 @@ const mfsWrite = { options: { payload: { parse: false, - output: 'stream' + output: 'stream', + maxBytes: Number.MAX_SAFE_INTEGER }, validate: { options: { @@ -71,7 +75,7 @@ const mfsWrite = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.string().required(), + arg: Joi.string().regex(/^\/.+/).required(), offset: Joi.number().integer().min(0), length: Joi.number().integer().min(0), create: Joi.boolean().default(false), @@ -105,6 +109,30 @@ const mfsWrite = { override: true, ignoreUndefined: true }) + .rename('shard-split-threshold', 'shardSplitThreshold', { + override: true, + ignoreUndefined: true + }) + .rename('hash-alg', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('hash', 'hashAlg', { + override: true, + ignoreUndefined: true + }) + .rename('cid-version', 'cidVersion', { + override: true, + ignoreUndefined: true + }) + .rename('raw-leaves', 'rawLeaves', { + override: true, + ignoreUndefined: true + }) + .rename('reduce-single-leaf-to-self', 'reduceSingleLeafToSelf', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/packages/ipfs/src/http/api/resources/index.js b/packages/ipfs/src/http/api/resources/index.js index ebf0f6aa35..f54e59c903 100644 --- a/packages/ipfs/src/http/api/resources/index.js +++ b/packages/ipfs/src/http/api/resources/index.js @@ -21,3 +21,4 @@ exports.stats = require('./stats') exports.resolve = require('./resolve') exports.name = require('./name') exports.dht = require('./dht') +exports.files = require('./files') diff --git a/packages/ipfs/src/http/api/resources/object.js b/packages/ipfs/src/http/api/resources/object.js index ce354618b6..1c79942cf4 100644 --- a/packages/ipfs/src/http/api/resources/object.js +++ b/packages/ipfs/src/http/api/resources/object.js @@ -1,7 +1,7 @@ 'use strict' const CID = require('cids') -const multipart = require('ipfs-multipart') +const multipart = require('../../utils/multipart-request-parser') const all = require('it-all') const dagPB = require('ipld-dag-pb') const { DAGNode, DAGLink } = dagPB diff --git a/packages/ipfs/src/http/api/routes/files.js b/packages/ipfs/src/http/api/routes/files.js new file mode 100644 index 0000000000..5b73d1cb28 --- /dev/null +++ b/packages/ipfs/src/http/api/routes/files.js @@ -0,0 +1,72 @@ +'use strict' + +const resources = require('../resources') + +module.exports = [ + { + method: 'POST', + path: '/api/v0/files/chmod', + options: resources.files.chmod.options, + handler: resources.files.chmod.handler + }, + { + method: 'POST', + path: '/api/v0/files/cp', + options: resources.files.cp.options, + handler: resources.files.cp.handler + }, + { + method: 'POST', + path: '/api/v0/files/flush', + options: resources.files.flush.options, + handler: resources.files.flush.handler + }, + { + method: 'POST', + path: '/api/v0/files/ls', + options: resources.files.ls.options, + handler: resources.files.ls.handler + }, + { + method: 'POST', + path: '/api/v0/files/mkdir', + options: resources.files.mkdir.options, + handler: resources.files.mkdir.handler + }, + { + method: 'POST', + path: '/api/v0/files/mv', + options: resources.files.mv.options, + handler: resources.files.mv.handler + }, + { + method: 'POST', + path: '/api/v0/files/read', + options: resources.files.read.options, + handler: resources.files.read.handler + }, + { + method: 'POST', + path: '/api/v0/files/rm', + options: resources.files.rm.options, + handler: resources.files.rm.handler + }, + { + method: 'POST', + path: '/api/v0/files/stat', + options: resources.files.stat.options, + handler: resources.files.stat.handler + }, + { + method: 'POST', + path: '/api/v0/files/touch', + options: resources.files.touch.options, + handler: resources.files.touch.handler + }, + { + method: 'POST', + path: '/api/v0/files/write', + options: resources.files.write.options, + handler: resources.files.write.handler + } +] diff --git a/packages/ipfs/src/http/api/routes/index.js b/packages/ipfs/src/http/api/routes/index.js index 6e523c6214..94ceb86e10 100644 --- a/packages/ipfs/src/http/api/routes/index.js +++ b/packages/ipfs/src/http/api/routes/index.js @@ -14,7 +14,7 @@ module.exports = [ ...require('./swarm'), ...require('./bitswap'), ...require('./files-regular'), - ...require('ipfs-mfs/http'), + ...require('./files'), ...require('./pubsub'), require('./debug'), ...require('./webui'), diff --git a/packages/ipfs-multipart/src/parser.js b/packages/ipfs/src/http/utils/multipart-request-parser.js similarity index 90% rename from packages/ipfs-multipart/src/parser.js rename to packages/ipfs/src/http/utils/multipart-request-parser.js index 922ee33256..b63ce6e776 100644 --- a/packages/ipfs-multipart/src/parser.js +++ b/packages/ipfs/src/http/utils/multipart-request-parser.js @@ -134,4 +134,15 @@ async function * parser (stream, options) { } } -module.exports = parser +/** + * Request Parser + * + * @param {Object} req - Request + * @param {Object} options - Options passed to stream constructors + * @returns {Object} an async iterable + */ +module.exports = (req, options = {}) => { + options.boundary = Content.type(req.headers['content-type']).boundary + + return parser(req.payload || req, options) +} diff --git a/packages/ipfs/test/cli/commands.js b/packages/ipfs/test/cli/commands.js index e75caf7fe5..48137f9982 100644 --- a/packages/ipfs/test/cli/commands.js +++ b/packages/ipfs/test/cli/commands.js @@ -4,7 +4,7 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const cli = require('../utils/cli') -const commandCount = 98 +const commandCount = 110 describe('commands', () => { it('list the commands', async () => { diff --git a/packages/ipfs/test/cli/daemon.js b/packages/ipfs/test/cli/daemon.js index 6ff510b179..4ef68f4378 100644 --- a/packages/ipfs/test/cli/daemon.js +++ b/packages/ipfs/test/cli/daemon.js @@ -7,7 +7,7 @@ const ipfsCmd = require('../utils/ipfs-exec') const { isWindows } = require('../utils/platforms') const os = require('os') const path = require('path') -const hat = require('hat') +const nanoid = require('nanoid') const fs = require('fs') const tempWrite = require('temp-write') const pkg = require('../../package.json') @@ -96,7 +96,7 @@ describe('daemon', () => { let ipfs beforeEach(() => { - repoPath = path.join(os.tmpdir(), 'ipfs-test-not-found-' + hat()) + repoPath = path.join(os.tmpdir(), 'ipfs-test-not-found-' + nanoid()) ipfs = ipfsCmd(repoPath) }) diff --git a/packages/ipfs/test/cli/files.js b/packages/ipfs/test/cli/files-regular.js similarity index 100% rename from packages/ipfs/test/cli/files.js rename to packages/ipfs/test/cli/files-regular.js diff --git a/packages/ipfs-mfs/test/cli/chmod.js b/packages/ipfs/test/cli/files/chmod.js similarity index 96% rename from packages/ipfs-mfs/test/cli/chmod.js rename to packages/ipfs/test/cli/files/chmod.js index b459f3a408..b7310dcc8f 100644 --- a/packages/ipfs-mfs/test/cli/chmod.js +++ b/packages/ipfs/test/cli/files/chmod.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/cli/cp.js b/packages/ipfs/test/cli/files/cp.js similarity index 96% rename from packages/ipfs-mfs/test/cli/cp.js rename to packages/ipfs/test/cli/files/cp.js index a86910eeda..cf76dafb31 100644 --- a/packages/ipfs-mfs/test/cli/cp.js +++ b/packages/ipfs/test/cli/files/cp.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/cli/flush.js b/packages/ipfs/test/cli/files/flush.js similarity index 92% rename from packages/ipfs-mfs/test/cli/flush.js rename to packages/ipfs/test/cli/files/flush.js index 83c40953fc..8f029df480 100644 --- a/packages/ipfs-mfs/test/cli/flush.js +++ b/packages/ipfs/test/cli/files/flush.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') const CID = require('cids') const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') diff --git a/packages/ipfs-mfs/test/core/index.js b/packages/ipfs/test/cli/files/index.js similarity index 90% rename from packages/ipfs-mfs/test/core/index.js rename to packages/ipfs/test/cli/files/index.js index f0baeb2458..53d4c3c955 100644 --- a/packages/ipfs-mfs/test/core/index.js +++ b/packages/ipfs/test/cli/files/index.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -describe('core', () => { +describe('files', () => { require('./chmod') require('./cp') require('./flush') diff --git a/packages/ipfs-mfs/test/cli/ls.js b/packages/ipfs/test/cli/files/ls.js similarity index 97% rename from packages/ipfs-mfs/test/cli/ls.js rename to packages/ipfs/test/cli/files/ls.js index cfdedf2726..0660951295 100644 --- a/packages/ipfs-mfs/test/cli/ls.js +++ b/packages/ipfs/test/cli/files/ls.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') const CID = require('cids') const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') diff --git a/packages/ipfs-mfs/test/cli/mkdir.js b/packages/ipfs/test/cli/files/mkdir.js similarity index 96% rename from packages/ipfs-mfs/test/cli/mkdir.js rename to packages/ipfs/test/cli/files/mkdir.js index af041b97f0..3d15322889 100644 --- a/packages/ipfs-mfs/test/cli/mkdir.js +++ b/packages/ipfs/test/cli/files/mkdir.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') function defaultOptions (modification = {}) { const options = { diff --git a/packages/ipfs-mfs/test/cli/mv.js b/packages/ipfs/test/cli/files/mv.js similarity index 96% rename from packages/ipfs-mfs/test/cli/mv.js rename to packages/ipfs/test/cli/files/mv.js index 860405556d..c80f61ad61 100644 --- a/packages/ipfs-mfs/test/cli/mv.js +++ b/packages/ipfs/test/cli/files/mv.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') function defaultOptions (modification = {}) { const options = { diff --git a/packages/ipfs-mfs/test/cli/read.js b/packages/ipfs/test/cli/files/read.js similarity index 94% rename from packages/ipfs-mfs/test/cli/read.js rename to packages/ipfs/test/cli/files/read.js index 34aa0e4daf..525ab50ca5 100644 --- a/packages/ipfs-mfs/test/cli/read.js +++ b/packages/ipfs/test/cli/files/read.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') function defaultOptions (modification = {}) { const options = { diff --git a/packages/ipfs-mfs/test/cli/rm.js b/packages/ipfs/test/cli/files/rm.js similarity index 89% rename from packages/ipfs-mfs/test/cli/rm.js rename to packages/ipfs/test/cli/files/rm.js index 1370e4b310..2fce2b664d 100644 --- a/packages/ipfs-mfs/test/cli/rm.js +++ b/packages/ipfs/test/cli/files/rm.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') function defaultOptions (modification = {}) { const options = { diff --git a/packages/ipfs-mfs/test/cli/stat.js b/packages/ipfs/test/cli/files/stat.js similarity index 95% rename from packages/ipfs-mfs/test/cli/stat.js rename to packages/ipfs/test/cli/files/stat.js index 20cded1199..213784812e 100644 --- a/packages/ipfs-mfs/test/cli/stat.js +++ b/packages/ipfs/test/cli/files/stat.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') const CID = require('cids') const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') diff --git a/packages/ipfs-mfs/test/cli/touch.js b/packages/ipfs/test/cli/files/touch.js similarity index 94% rename from packages/ipfs-mfs/test/cli/touch.js rename to packages/ipfs/test/cli/files/touch.js index 8f9d29439e..ff263cf81f 100644 --- a/packages/ipfs-mfs/test/cli/touch.js +++ b/packages/ipfs/test/cli/files/touch.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') function defaultOptions (modification = {}) { const options = { diff --git a/packages/ipfs-mfs/test/cli/write.js b/packages/ipfs/test/cli/files/write.js similarity index 98% rename from packages/ipfs-mfs/test/cli/write.js rename to packages/ipfs/test/cli/files/write.js index 935d7c8197..d6b2c4c55b 100644 --- a/packages/ipfs-mfs/test/cli/write.js +++ b/packages/ipfs/test/cli/files/write.js @@ -1,10 +1,10 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const cli = require('../../utils/cli') const sinon = require('sinon') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') function defaultOptions (modification = {}) { const options = { diff --git a/packages/ipfs/test/cli/general.js b/packages/ipfs/test/cli/general.js index 59c2392a0d..f475fed85c 100644 --- a/packages/ipfs/test/cli/general.js +++ b/packages/ipfs/test/cli/general.js @@ -4,7 +4,7 @@ const os = require('os') const fs = require('fs').promises const path = require('path') -const hat = require('hat') +const nanoid = require('nanoid') const { expect } = require('interface-ipfs-core/src/utils/mocha') const { repoVersion } = require('ipfs-repo') const { promisify } = require('util') @@ -42,7 +42,7 @@ describe('--migrate', () => { } beforeEach(async () => { - repoPath = path.join(os.tmpdir(), `ipfs-${hat()}`) + repoPath = path.join(os.tmpdir(), `ipfs-${nanoid()}`) const v7RepoPath = path.join(__dirname, '../fixtures/v7-repo') await ncp(v7RepoPath, repoPath) ipfs = ipfsExec(repoPath) diff --git a/packages/ipfs/test/cli/init.js b/packages/ipfs/test/cli/init.js index 85a689d538..32fba29736 100644 --- a/packages/ipfs/test/cli/init.js +++ b/packages/ipfs/test/cli/init.js @@ -5,7 +5,7 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const path = require('path') const fs = require('fs') const clean = require('../utils/clean') -const hat = require('hat') +const nanoid = require('nanoid') const ipfsExec = require('../utils/ipfs-exec') const os = require('os') const tempWrite = require('temp-write') @@ -32,7 +32,7 @@ describe('init', function () { } beforeEach(() => { - repoPath = os.tmpdir() + '/ipfs-' + hat() + repoPath = os.tmpdir() + '/ipfs-' + nanoid() ipfs = ipfsExec(repoPath) }) diff --git a/packages/ipfs/test/core/bitswap.spec.js b/packages/ipfs/test/core/bitswap.spec.js index d2421901ee..9d40b17473 100644 --- a/packages/ipfs/test/core/bitswap.spec.js +++ b/packages/ipfs/test/core/bitswap.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const pmap = require('p-map') const { expect } = require('interface-ipfs-core/src/utils/mocha') const Block = require('ipfs-block') @@ -12,7 +12,7 @@ const concat = require('it-concat') const factory = require('../utils/factory') const makeBlock = async () => { - const d = Buffer.from(`IPFS is awesome ${hat()}`) + const d = Buffer.from(`IPFS is awesome ${nanoid()}`) const h = await multihashing(d, 'sha2-256') return new Block(d, new CID(h)) @@ -65,7 +65,7 @@ describe('bitswap', function () { it('2 peers', async () => { // TODO make this test more interesting (10Mb file) // TODO remove randomness from the test - const file = Buffer.from(`I love IPFS <3 ${hat()}`) + const file = Buffer.from(`I love IPFS <3 ${nanoid()}`) const remote = (await df.spawn({ type: 'js' })).api const proc = (await df.spawn({ type: 'proc' })).api proc.swarm.connect(remote.peerId.addresses[0]) diff --git a/packages/ipfs/test/core/block.spec.js b/packages/ipfs/test/core/block.spec.js index 7721340289..dd971b0c85 100644 --- a/packages/ipfs/test/core/block.spec.js +++ b/packages/ipfs/test/core/block.spec.js @@ -3,7 +3,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') -const hat = require('hat') +const nanoid = require('nanoid') const all = require('it-all') const factory = require('../utils/factory') @@ -27,7 +27,7 @@ describe('block', () => { describe('put', () => { it('should not error when passed null options', () => { - return ipfs.block.put(Buffer.from(hat()), null) + return ipfs.block.put(Buffer.from(nanoid()), null) }) }) @@ -47,7 +47,7 @@ describe('block', () => { }) it('should not error when passed null options', async () => { - const block = await ipfs.block.put(Buffer.from(hat())) + const block = await ipfs.block.put(Buffer.from(nanoid())) return ipfs.block.stat(block.cid, null) }) }) diff --git a/packages/ipfs/test/core/circuit-relay.spec.js b/packages/ipfs/test/core/circuit-relay.spec.js index 5e65b3cbdd..39dbc96d24 100644 --- a/packages/ipfs/test/core/circuit-relay.spec.js +++ b/packages/ipfs/test/core/circuit-relay.spec.js @@ -4,7 +4,7 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const all = require('it-all') const concat = require('it-concat') -const crypto = require('crypto') +const randomBytes = require('iso-random-stream/src/random') const factory = require('../utils/factory') const df = factory() @@ -60,7 +60,7 @@ describe('circuit relay', () => { after(() => df.clean()) it('should transfer via relay', async () => { - const data = crypto.randomBytes(128) + const data = randomBytes(128) const res = await all(nodeA.add(data)) const buffer = await concat(nodeB.cat(res[0].cid)) expect(buffer.slice()).to.deep.equal(data) diff --git a/packages/ipfs/test/core/create-node.spec.js b/packages/ipfs/test/core/create-node.spec.js index 8b63ee1ae0..2e3681a468 100644 --- a/packages/ipfs/test/core/create-node.spec.js +++ b/packages/ipfs/test/core/create-node.spec.js @@ -6,7 +6,7 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const sinon = require('sinon') const os = require('os') const path = require('path') -const hat = require('hat') +const nanoid = require('nanoid') const { isNode } = require('ipfs-utils/src/env') const IPFS = require('../../src/core') @@ -26,7 +26,7 @@ describe('create node', function () { this.timeout(80 * 1000) const node = await IPFS.create({ - repo: path.join(os.tmpdir(), 'ipfs-repo-' + hat()), + repo: path.join(os.tmpdir(), 'ipfs-repo-' + nanoid()), init: { bits: 512 }, config: { Addresses: { diff --git a/packages/ipfs/test/core/files-sharding.spec.js b/packages/ipfs/test/core/files-sharding.spec.js index 26b3dec066..0e722b0aac 100644 --- a/packages/ipfs/test/core/files-sharding.spec.js +++ b/packages/ipfs/test/core/files-sharding.spec.js @@ -20,7 +20,9 @@ describe('files directory (sharding tests)', function () { let ipfsd before(async function () { - ipfsd = await df.spawn() + ipfsd = await df.spawn({ + ipfsOptions: { EXPERIMENTAL: { sharding: false } } + }) ipfs = ipfsd.api }) diff --git a/packages/ipfs/test/core/files.spec.js b/packages/ipfs/test/core/files.spec.js index 91b8dcb5dd..ba72b90466 100644 --- a/packages/ipfs/test/core/files.spec.js +++ b/packages/ipfs/test/core/files.spec.js @@ -3,7 +3,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') -const hat = require('hat') +const nanoid = require('nanoid') const all = require('it-all') const factory = require('../utils/factory') @@ -30,7 +30,7 @@ describe('files', function () { describe('add', () => { it('should not error when passed null options', async () => { - await all(ipfs.add(Buffer.from(hat()), null)) + await all(ipfs.add(Buffer.from(nanoid()), null)) }) it('should add a file with a v1 CID', async () => { diff --git a/packages/ipfs/test/core/init.spec.js b/packages/ipfs/test/core/init.spec.js index b0835dc738..72ce000c09 100644 --- a/packages/ipfs/test/core/init.spec.js +++ b/packages/ipfs/test/core/init.spec.js @@ -4,7 +4,7 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const { isNode } = require('ipfs-utils/src/env') -const hat = require('hat') +const nanoid = require('nanoid') const IPFS = require('../../src/core') const privateKey = 'CAASqAkwggSkAgEAAoIBAQChVmiObYo6pkKrMSd3OzW1cTL+RDmX1rkETYGKWV9TPXMNgElFTYoYHqT9QZomj5RI8iUmHccjzqr4J0mV+E0NpvHHOLlmDZ82lAw2Zx7saUkeQWvC0S9Z0o3aTx2sSubZV53rSomkZgQH4fYTs4RERejV4ltzLFdzQQBwWrBvlagpPHUCxKDUCnE5oIzdbD26ltWViPBWr7TfotzC8Lyi/tceqCpHMUJGMbsVgypnlgpey07MBvs71dVh5LcRen/ztsQO6Yju4D3QgWoyD0SIUdJFvBzEwL9bSiA3QjUc/fkGd7EcdN5bebYOqAi4ZIiAMLp3i4+B8Tzq/acull43AgMBAAECggEBAIDgZE75o4SsEO9tKWht7L5OeXxxBUyMImkUfJkGQUZd/MzZIC5y/Q+9UvBW+gs5gCsw+onTGaM50Iq/32Ej4nE4XURVxIuH8BmJ86N1hlc010qK2cjajqeCsPulXT+m6XbOLYCpnv+q2idt0cL1EH/1FEPeOEztK8ION4qIdw36SoykfTx/RqtkKHtS01AwN82EOPbWk7huyQT5R5MsCZmRJXBFkpNtiL+8619BH2aVlghHO4NouF9wQjdz/ysVuyYg+3rX2cpGjuHDTZ6hVQiJD1lF6D+dua7UPyHYAG2iRQiKZmCjitt9ywzPxiRaYF/aZ02FEMWckZulR09axskCgYEAzjl6ER8WwxYHn4tHse+CrIIF2z5cscdrh7KSwd3Rse9hIIBDJ/0KkvoYd1IcWrS8ywLrRfSLIjEU9u7IN1m+IRVWJ61fXNqOHm9clAu6qNhCN6W2+JfxDkUygTwmsq0v3huO+qkiMQz+a4nAXJe8Utd36ywgPhVGxFa/7x1v1N0CgYEAyEdiYRFf1aQZcO7+B2FH+tkGJsB30VIBhcpG9EukuQUUulLHhScc/KRj+EFAACLdkTqlVI0xVYIWaaCXwoQCWKixjZ5mYPC+bBLgn4IoDS6XTdHtR7Vn3UUvGTKsM0/z4e8/0eSzGNCHoYez9IoBlPNic0sQuST4jzgS2RYnFCMCgYASWSzSLyjwTJp7CIJlg4Dl5l+tBRxsOOkJVssV8q2AnmLO6HqRKUNylkvs+eJJ88DEc0sJm1txvFo4KkCoJBT1jpduyk8szMlOTew3w99kvHEP0G+6KJKrCV8X/okW5q/WnC8ZgEjpglV0rfnugxWfbUpfIzrvKydzuqAzHzRfBQKBgQDANtKSeoxRjEbmfljLWHAure8bbgkQmfXgI7xpZdfXwqqcECpw/pLxXgycDHOSLeQcJ/7Y4RGCEXHVOk2sX+mokW6mjmmPjD4VlyCBtfcef6KzC1EBS3c9g9KqCln+fTOBmY7UsPu6SxiAzK7HeVP/Un8gS+Dm8DalrZlZQ8uJpQKBgF6mL/Xo/XUOiz2jAD18l8Y6s49bA9H2CoLpBGTV1LfY5yTFxRy4R3qnX/IzsKy567sbtkEFKJxplc/RzCQfrgbdj7k26SbKtHR3yERaFGRYq8UeAHeYC1/N19LF5BMQL4y5R4PJ1SFPeJCL/wXiMqs1maTqvKqtc4bbegNdwlxn' @@ -32,7 +32,7 @@ describe('init', function () { afterEach(() => repo.teardown()) it('should init successfully', async () => { - await ipfs.init({ bits: 512, pass: hat() }) + await ipfs.init({ bits: 512, pass: nanoid() }) const res = await repo.exists() expect(res).to.equal(true) @@ -46,7 +46,7 @@ describe('init', function () { it('should set # of bits in key', async function () { this.timeout(40 * 1000) - await ipfs.init({ bits: 1024, pass: hat() }) + await ipfs.init({ bits: 1024, pass: nanoid() }) const config = await repo.config.get() expect(config.Identity.PrivKey.length).is.above(256) @@ -60,7 +60,7 @@ describe('init', function () { }) it('should write init docs', async () => { - await ipfs.init({ bits: 512, pass: hat() }) + await ipfs.init({ bits: 512, pass: nanoid() }) const multihash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' const node = await ipfs.object.get(multihash, { enc: 'base58' }) diff --git a/packages/ipfs/test/core/interface.spec.js b/packages/ipfs/test/core/interface.spec.js index 837e40a36c..6abef6e66f 100644 --- a/packages/ipfs/test/core/interface.spec.js +++ b/packages/ipfs/test/core/interface.spec.js @@ -33,7 +33,9 @@ describe('interface-ipfs-core tests', function () { } }) - tests.files(commonFactory, { + tests.files(factory({ + ipfsOptions: { EXPERIMENTAL: { sharding: true } } + }), { skip: isNode ? null : [{ name: 'should make directory and specify mtime as hrtime', reason: 'Not designed to run in the browser' diff --git a/packages/ipfs/test/core/key-exchange.spec.js b/packages/ipfs/test/core/key-exchange.spec.js index ad090250f0..55c40d7e93 100644 --- a/packages/ipfs/test/core/key-exchange.spec.js +++ b/packages/ipfs/test/core/key-exchange.spec.js @@ -3,7 +3,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') -const hat = require('hat') +const nanoid = require('nanoid') const factory = require('../utils/factory') describe('key exchange', function () { @@ -11,12 +11,12 @@ describe('key exchange', function () { const df = factory() let ipfs let selfPem - const passwordPem = hat() + const passwordPem = nanoid() before(async () => { ipfs = (await df.spawn({ ipfsOptions: { - pass: hat() + pass: nanoid() } })).api }) diff --git a/packages/ipfs/test/core/mfs-preload.spec.js b/packages/ipfs/test/core/mfs-preload.spec.js index db0da28b5a..4c9cf068c6 100644 --- a/packages/ipfs/test/core/mfs-preload.spec.js +++ b/packages/ipfs/test/core/mfs-preload.spec.js @@ -4,14 +4,14 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const delay = require('delay') const multihashing = require('multihashing-async') -const hat = require('hat') +const nanoid = require('nanoid') const { Buffer } = require('buffer') const CID = require('cids') const waitFor = require('../utils/wait-for') const mfsPreload = require('../../src/core/mfs-preload') const fakeCid = async () => { - const mh = await multihashing(Buffer.from(hat()), 'sha2-256') + const mh = await multihashing(Buffer.from(nanoid()), 'sha2-256') return new CID(mh) } diff --git a/packages/ipfs/test/core/name-pubsub.js b/packages/ipfs/test/core/name-pubsub.js index ed34a2991a..46059ecd1f 100644 --- a/packages/ipfs/test/core/name-pubsub.js +++ b/packages/ipfs/test/core/name-pubsub.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { expect } = require('interface-ipfs-core/src/utils/mocha') const base64url = require('base64url') const { fromB58String } = require('multihashes') @@ -32,8 +32,8 @@ describe('name-pubsub', function () { this.timeout(40 * 1000) nodes = await Promise.all([ - df.spawn({ type: 'proc', ipfsOptions: { pass: hat(), EXPERIMENTAL: { ipnsPubsub: true } } }), - df.spawn({ type: 'proc', ipfsOptions: { pass: hat(), EXPERIMENTAL: { ipnsPubsub: true } } }) + df.spawn({ type: 'proc', ipfsOptions: { pass: nanoid(), EXPERIMENTAL: { ipnsPubsub: true } } }), + df.spawn({ type: 'proc', ipfsOptions: { pass: nanoid(), EXPERIMENTAL: { ipnsPubsub: true } } }) ]) nodeA = nodes[0].api diff --git a/packages/ipfs/test/core/name.spec.js b/packages/ipfs/test/core/name.spec.js index 10678c428c..76218472bb 100644 --- a/packages/ipfs/test/core/name.spec.js +++ b/packages/ipfs/test/core/name.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { expect } = require('interface-ipfs-core/src/utils/mocha') const sinon = require('sinon') const delay = require('delay') @@ -81,7 +81,7 @@ describe('name', function () { let nodeB let nodeC - const createNode = () => df.spawn({ ipfsOptions: { pass: hat() } }) + const createNode = () => df.spawn({ ipfsOptions: { pass: nanoid() } }) before(async function () { this.timeout(70 * 1000) @@ -104,7 +104,7 @@ describe('name', function () { it('should recursively resolve to an IPFS hash', async function () { this.timeout(360 * 1000) - const keyName = hat() + const keyName = nanoid() const key = await nodeA.key.gen(keyName, { type: 'rsa', size: 2048 }) diff --git a/packages/ipfs/test/core/object.spec.js b/packages/ipfs/test/core/object.spec.js index d7438753a3..153245d73d 100644 --- a/packages/ipfs/test/core/object.spec.js +++ b/packages/ipfs/test/core/object.spec.js @@ -3,7 +3,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') -const hat = require('hat') +const nanoid = require('nanoid') const factory = require('../utils/factory') describe('object', function () { @@ -26,21 +26,21 @@ describe('object', function () { }) it('should not error when passed null options', async () => { - const cid = await ipfs.object.put(Buffer.from(hat())) + const cid = await ipfs.object.put(Buffer.from(nanoid())) await ipfs.object.get(cid) }) }) describe('put', () => { it('should not error when passed null options', () => { - return ipfs.object.put(Buffer.from(hat()), null) + return ipfs.object.put(Buffer.from(nanoid()), null) }) }) describe('patch.addLink', () => { it('should not error when passed null options', async () => { - const aCid = await ipfs.object.put(Buffer.from(hat())) - const bCid = await ipfs.object.put(Buffer.from(hat())) + const aCid = await ipfs.object.put(Buffer.from(nanoid())) + const bCid = await ipfs.object.put(Buffer.from(nanoid())) const bNode = await ipfs.object.get(bCid) const link = { @@ -55,8 +55,8 @@ describe('object', function () { describe('patch.rmLink', () => { it('should not error when passed null options', async () => { - const aCid = await ipfs.object.put(Buffer.from(hat())) - const bCid = await ipfs.object.put(Buffer.from(hat())) + const aCid = await ipfs.object.put(Buffer.from(nanoid())) + const bCid = await ipfs.object.put(Buffer.from(nanoid())) const bNode = await ipfs.object.get(bCid) const cCid = await ipfs.object.patch.addLink(aCid, { @@ -72,15 +72,15 @@ describe('object', function () { describe('patch.appendData', () => { it('should not error when passed null options', async () => { - const cid = await ipfs.object.put(Buffer.from(hat()), null) - await ipfs.object.patch.appendData(cid, Buffer.from(hat()), null) + const cid = await ipfs.object.put(Buffer.from(nanoid()), null) + await ipfs.object.patch.appendData(cid, Buffer.from(nanoid()), null) }) }) describe('patch.setData', () => { it('should not error when passed null options', async () => { - const cid = await ipfs.object.put(Buffer.from(hat()), null) - await ipfs.object.patch.setData(cid, Buffer.from(hat()), null) + const cid = await ipfs.object.put(Buffer.from(nanoid()), null) + await ipfs.object.patch.setData(cid, Buffer.from(nanoid()), null) }) }) }) diff --git a/packages/ipfs/test/core/preload.spec.js b/packages/ipfs/test/core/preload.spec.js index 13e2d165a5..e60e6ed529 100644 --- a/packages/ipfs/test/core/preload.spec.js +++ b/packages/ipfs/test/core/preload.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { expect } = require('interface-ipfs-core/src/utils/mocha') const all = require('it-all') const MockPreloadNode = require('../utils/mock-preload-node') @@ -36,7 +36,7 @@ describe('preload', () => { it('should preload content added with add', async function () { this.timeout(50 * 1000) - const res = await all(ipfs.add(Buffer.from(hat()))) + const res = await all(ipfs.add(Buffer.from(nanoid()))) await MockPreloadNode.waitForCids(res[0].cid) }) @@ -44,11 +44,11 @@ describe('preload', () => { this.timeout(50 * 1000) const res = await all(ipfs.add([{ - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }])) await MockPreloadNode.waitForCids(res.map(file => file.cid)) @@ -59,13 +59,13 @@ describe('preload', () => { const res = await all(ipfs.add([{ path: 'dir0/dir1/file0', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { path: 'dir0/dir1/file1', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { path: 'dir0/file2', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }])) const rootDir = res.find(file => file.path === 'dir0') @@ -79,13 +79,13 @@ describe('preload', () => { const res = await all(ipfs.add([{ path: 'dir0/dir1/file0', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { path: 'dir0/dir1/file1', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { path: 'dir0/file2', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }], { wrapWithDirectory: true })) const wrappingDir = res.find(file => file.path === '') @@ -96,14 +96,14 @@ describe('preload', () => { it('should preload content retrieved with cat', async function () { this.timeout(50 * 1000) - const res = await all(ipfs.add(Buffer.from(hat()), { preload: false })) + const res = await all(ipfs.add(Buffer.from(nanoid()), { preload: false })) await all(ipfs.cat(res[0].cid)) await MockPreloadNode.waitForCids(res[0].cid) }) it('should preload content retrieved with get', async function () { this.timeout(50 * 1000) - const res = await all(ipfs.add(Buffer.from(hat()), { preload: false })) + const res = await all(ipfs.add(Buffer.from(nanoid()), { preload: false })) await all(ipfs.get(res[0].cid)) await MockPreloadNode.waitForCids(res[0].cid) }) @@ -113,13 +113,13 @@ describe('preload', () => { const res = await all(ipfs.add([{ path: 'dir0/dir1/file0', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { path: 'dir0/dir1/file1', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }, { path: 'dir0/file2', - content: Buffer.from(hat()) + content: Buffer.from(nanoid()) }], { wrapWithDirectory: true })) const wrappingDir = res.find(file => file.path === '') @@ -140,7 +140,7 @@ describe('preload', () => { it('should preload content added with object.put', async function () { this.timeout(50 * 1000) - const cid = await ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }) + const cid = await ipfs.object.put({ Data: Buffer.from(nanoid()), Links: [] }) await MockPreloadNode.waitForCids(cid) }) @@ -148,7 +148,7 @@ describe('preload', () => { this.timeout(50 * 1000) const createNode = async () => { - const cid = await ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }) + const cid = await ipfs.object.put({ Data: Buffer.from(nanoid()), Links: [] }) const node = await ipfs.object.get(cid) return { cid, node } } @@ -167,11 +167,11 @@ describe('preload', () => { it('should preload content added with object.patch.rmLink', async function () { this.timeout(50 * 1000) - const linkCid = await ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }) + const linkCid = await ipfs.object.put({ Data: Buffer.from(nanoid()), Links: [] }) const linkNode = await ipfs.object.get(linkCid) const parentCid = await ipfs.object.put({ - Data: Buffer.from(hat()), + Data: Buffer.from(nanoid()), Links: [{ name: 'link', cid: linkCid, @@ -185,15 +185,15 @@ describe('preload', () => { it('should preload content added with object.patch.setData', async function () { this.timeout(50 * 1000) - const originalCid = await ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }) - const patchedCid = await ipfs.object.patch.setData(originalCid, Buffer.from(hat())) + const originalCid = await ipfs.object.put({ Data: Buffer.from(nanoid()), Links: [] }) + const patchedCid = await ipfs.object.patch.setData(originalCid, Buffer.from(nanoid())) await MockPreloadNode.waitForCids(patchedCid) }) it('should preload content added with object.patch.appendData', async function () { this.timeout(50 * 1000) - const originalCid = await ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }) - const patchedCid = await ipfs.object.patch.appendData(originalCid, Buffer.from(hat())) + const originalCid = await ipfs.object.put({ Data: Buffer.from(nanoid()), Links: [] }) + const patchedCid = await ipfs.object.patch.appendData(originalCid, Buffer.from(nanoid())) await MockPreloadNode.waitForCids(patchedCid) }) @@ -206,34 +206,34 @@ describe('preload', () => { it('should preload content added with block.put', async function () { this.timeout(50 * 1000) - const block = await ipfs.block.put(Buffer.from(hat())) + const block = await ipfs.block.put(Buffer.from(nanoid())) await MockPreloadNode.waitForCids(block.cid) }) it('should preload content retrieved with block.get', async function () { this.timeout(50 * 1000) - const block = await ipfs.block.put(Buffer.from(hat()), { preload: false }) + const block = await ipfs.block.put(Buffer.from(nanoid()), { preload: false }) await ipfs.block.get(block.cid) await MockPreloadNode.waitForCids(block.cid) }) it('should preload content retrieved with block.stat', async function () { this.timeout(50 * 1000) - const block = await ipfs.block.put(Buffer.from(hat()), { preload: false }) + const block = await ipfs.block.put(Buffer.from(nanoid()), { preload: false }) await ipfs.block.stat(block.cid) await MockPreloadNode.waitForCids(block.cid) }) it('should preload content added with dag.put', async function () { this.timeout(50 * 1000) - const obj = { test: hat() } + const obj = { test: nanoid() } const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) await MockPreloadNode.waitForCids(cid) }) it('should preload content retrieved with dag.get', async function () { this.timeout(50 * 1000) - const obj = { test: hat() } + const obj = { test: nanoid() } const opts = { format: 'dag-cbor', hashAlg: 'sha2-256', preload: false } const cid = await ipfs.dag.put(obj, opts) await ipfs.dag.get(cid) @@ -241,7 +241,7 @@ describe('preload', () => { }) it('should preload content retrieved with files.ls', async () => { - const res = await all(ipfs.add({ path: `/t/${hat()}`, content: Buffer.from(hat()) })) + const res = await all(ipfs.add({ path: `/t/${nanoid()}`, content: Buffer.from(nanoid()) })) const dirCid = res[res.length - 1].cid await MockPreloadNode.waitForCids(dirCid) await MockPreloadNode.clearPreloadCids() @@ -250,7 +250,7 @@ describe('preload', () => { }) it('should preload content retrieved with files.ls by CID', async () => { - const res = await all(ipfs.add({ path: `/t/${hat()}`, content: Buffer.from(hat()) })) + const res = await all(ipfs.add({ path: `/t/${nanoid()}`, content: Buffer.from(nanoid()) })) const dirCid = res[res.length - 1].cid await MockPreloadNode.waitForCids(dirCid) await MockPreloadNode.clearPreloadCids() @@ -259,7 +259,7 @@ describe('preload', () => { }) it('should preload content retrieved with files.read', async () => { - const fileCid = (await all(ipfs.add(Buffer.from(hat()))))[0].cid + const fileCid = (await all(ipfs.add(Buffer.from(nanoid()))))[0].cid await MockPreloadNode.waitForCids(fileCid) await MockPreloadNode.clearPreloadCids() await ipfs.files.read(`/ipfs/${fileCid}`) @@ -267,7 +267,7 @@ describe('preload', () => { }) it('should preload content retrieved with files.stat', async () => { - const fileCid = (await all(ipfs.add(Buffer.from(hat()))))[0].cid + const fileCid = (await all(ipfs.add(Buffer.from(nanoid()))))[0].cid await MockPreloadNode.waitForCids(fileCid) await MockPreloadNode.clearPreloadCids() await ipfs.files.stat(`/ipfs/${fileCid}`) @@ -303,7 +303,7 @@ describe('preload disabled', function () { after(() => repo.teardown()) it('should not preload if disabled', async () => { - const res = await all(ipfs.add(Buffer.from(hat()))) + const res = await all(ipfs.add(Buffer.from(nanoid()))) return expect(MockPreloadNode.waitForCids(res[0].cid)) .to.eventually.be.rejected() diff --git a/packages/ipfs/test/core/pubsub.spec.js b/packages/ipfs/test/core/pubsub.spec.js index 0d28302bba..7895156413 100644 --- a/packages/ipfs/test/core/pubsub.spec.js +++ b/packages/ipfs/test/core/pubsub.spec.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { expect } = require('interface-ipfs-core/src/utils/mocha') const IPFS = require('../../src') const createTempRepo = require('../utils/create-repo-nodejs') @@ -37,7 +37,7 @@ describe('pubsub disabled', () => { after(() => repo.teardown()) it('should not allow subscribe if disabled', async () => { - const topic = hat() + const topic = nanoid() const handler = () => { throw new Error('unexpected message') } await expect(ipfs.pubsub.subscribe(topic, handler)) @@ -46,7 +46,7 @@ describe('pubsub disabled', () => { }) it('should not allow unsubscribe if disabled', async () => { - const topic = hat() + const topic = nanoid() const handler = () => { throw new Error('unexpected message') } await expect(ipfs.pubsub.unsubscribe(topic, handler)) @@ -55,8 +55,8 @@ describe('pubsub disabled', () => { }) it('should not allow publish if disabled', async () => { - const topic = hat() - const msg = Buffer.from(hat()) + const topic = nanoid() + const msg = Buffer.from(nanoid()) await expect(ipfs.pubsub.publish(topic, msg)) .to.eventually.be.rejected() @@ -70,7 +70,7 @@ describe('pubsub disabled', () => { }) it('should not allow peers if disabled', async () => { - const topic = hat() + const topic = nanoid() await expect(ipfs.pubsub.peers(topic)) .to.eventually.be.rejected() diff --git a/packages/ipfs/test/gateway/index.js b/packages/ipfs/test/gateway/index.js index 416b6c7eec..d302c3823e 100644 --- a/packages/ipfs/test/gateway/index.js +++ b/packages/ipfs/test/gateway/index.js @@ -7,7 +7,7 @@ const Daemon = require('../../src/cli/daemon') const loadFixture = require('aegir/fixtures') const os = require('os') const path = require('path') -const hat = require('hat') +const nanoid = require('nanoid') const fileType = require('file-type') const CID = require('cids') const all = require('it-all') @@ -32,7 +32,7 @@ describe('HTTP Gateway', function () { before(async () => { this.timeout(60 * 1000) - const repoPath = path.join(os.tmpdir(), '/ipfs-' + hat()) + const repoPath = path.join(os.tmpdir(), '/ipfs-' + nanoid()) http.api = new Daemon({ repo: repoPath, diff --git a/packages/ipfs/test/http-api/index.js b/packages/ipfs/test/http-api/index.js index 0c4cb6d793..8ee36db00d 100644 --- a/packages/ipfs/test/http-api/index.js +++ b/packages/ipfs/test/http-api/index.js @@ -1,4 +1,9 @@ 'use strict' -require('./routes') +const { isNode } = require('ipfs-utils/src/env') + +if (isNode) { + require('./routes') +} + require('./interface') diff --git a/packages/ipfs/test/http-api/inject/dag.js b/packages/ipfs/test/http-api/inject/dag.js index 2240c2c5d8..ed282f7deb 100644 --- a/packages/ipfs/test/http-api/inject/dag.js +++ b/packages/ipfs/test/http-api/inject/dag.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ 'use strict' -const hat = require('hat') +const nanoid = require('nanoid') const { expect } = require('interface-ipfs-core/src/utils/mocha') const DAGNode = require('ipld-dag-pb').DAGNode const Readable = require('stream').Readable @@ -259,7 +259,7 @@ module.exports = (http) => { it('pins a node after adding', async () => { const node = { foo: 'bar', - disambiguator: hat() + disambiguator: nanoid() } const res = await api.inject({ @@ -279,7 +279,7 @@ module.exports = (http) => { it('does not pin a node after adding', async () => { const node = { foo: 'bar', - disambiguator: hat() + disambiguator: nanoid() } const res = await api.inject({ diff --git a/packages/ipfs/test/http-api/inject/files.js b/packages/ipfs/test/http-api/inject/files.js index 5efa877328..a90930f354 100644 --- a/packages/ipfs/test/http-api/inject/files.js +++ b/packages/ipfs/test/http-api/inject/files.js @@ -2,7 +2,7 @@ /* eslint-env mocha */ 'use strict' -const crypto = require('crypto') +const randomBytes = require('iso-random-stream/src/random') const { expect } = require('interface-ipfs-core/src/utils/mocha') const FormData = require('form-data') const streamToPromise = require('stream-to-promise') @@ -24,7 +24,7 @@ module.exports = (http) => { 'Content-Disposition: form-data; name="test"; filename="test.txt"', 'Content-Type: text/plain', '', - crypto.randomBytes(1024 * 1024 * 2).toString('hex'), + randomBytes(1024 * 1024 * 2).toString('hex'), '------------287032381131322--' ].join('\r\n')) diff --git a/packages/ipfs-mfs/test/http/chmod.js b/packages/ipfs/test/http-api/inject/files/chmod.js similarity index 95% rename from packages/ipfs-mfs/test/http/chmod.js rename to packages/ipfs/test/http-api/inject/files/chmod.js index d9f44dbd5b..f5090b139e 100644 --- a/packages/ipfs-mfs/test/http/chmod.js +++ b/packages/ipfs/test/http-api/inject/files/chmod.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/cp.js b/packages/ipfs/test/http-api/inject/files/cp.js similarity index 94% rename from packages/ipfs-mfs/test/http/cp.js rename to packages/ipfs/test/http-api/inject/files/cp.js index abbd55a152..ac5a1c7309 100644 --- a/packages/ipfs-mfs/test/http/cp.js +++ b/packages/ipfs/test/http-api/inject/files/cp.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/flush.js b/packages/ipfs/test/http-api/inject/files/flush.js similarity index 93% rename from packages/ipfs-mfs/test/http/flush.js rename to packages/ipfs/test/http-api/inject/files/flush.js index 264677e6f8..b340c34399 100644 --- a/packages/ipfs-mfs/test/http/flush.js +++ b/packages/ipfs/test/http-api/inject/files/flush.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') const CID = require('cids') const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') diff --git a/packages/ipfs-mfs/test/cli/index.js b/packages/ipfs/test/http-api/inject/files/index.js similarity index 90% rename from packages/ipfs-mfs/test/cli/index.js rename to packages/ipfs/test/http-api/inject/files/index.js index 0f1fff2a99..53d4c3c955 100644 --- a/packages/ipfs-mfs/test/cli/index.js +++ b/packages/ipfs/test/http-api/inject/files/index.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -describe('cli', () => { +describe('files', () => { require('./chmod') require('./cp') require('./flush') diff --git a/packages/ipfs-mfs/test/http/ls.js b/packages/ipfs/test/http-api/inject/files/ls.js similarity index 96% rename from packages/ipfs-mfs/test/http/ls.js rename to packages/ipfs/test/http-api/inject/files/ls.js index 0d68d56e69..a8a7f4fac0 100644 --- a/packages/ipfs-mfs/test/http/ls.js +++ b/packages/ipfs/test/http-api/inject/files/ls.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') const CID = require('cids') const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') diff --git a/packages/ipfs-mfs/test/http/mkdir.js b/packages/ipfs/test/http-api/inject/files/mkdir.js similarity index 97% rename from packages/ipfs-mfs/test/http/mkdir.js rename to packages/ipfs/test/http-api/inject/files/mkdir.js index 586b358651..34f29aee2d 100644 --- a/packages/ipfs-mfs/test/http/mkdir.js +++ b/packages/ipfs/test/http-api/inject/files/mkdir.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/mv.js b/packages/ipfs/test/http-api/inject/files/mv.js similarity index 96% rename from packages/ipfs-mfs/test/http/mv.js rename to packages/ipfs/test/http-api/inject/files/mv.js index 22fd5f243e..ed2d42d775 100644 --- a/packages/ipfs-mfs/test/http/mv.js +++ b/packages/ipfs/test/http-api/inject/files/mv.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/read.js b/packages/ipfs/test/http-api/inject/files/read.js similarity index 95% rename from packages/ipfs-mfs/test/http/read.js rename to packages/ipfs/test/http-api/inject/files/read.js index 4f255253b3..e9588ba32a 100644 --- a/packages/ipfs-mfs/test/http/read.js +++ b/packages/ipfs/test/http-api/inject/files/read.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/rm.js b/packages/ipfs/test/http-api/inject/files/rm.js similarity index 90% rename from packages/ipfs-mfs/test/http/rm.js rename to packages/ipfs/test/http-api/inject/files/rm.js index 6bc877ef6a..b799eb50ce 100644 --- a/packages/ipfs-mfs/test/http/rm.js +++ b/packages/ipfs/test/http-api/inject/files/rm.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/stat.js b/packages/ipfs/test/http-api/inject/files/stat.js similarity index 96% rename from packages/ipfs-mfs/test/http/stat.js rename to packages/ipfs/test/http-api/inject/files/stat.js index d302895ac0..ca5e3c14b3 100644 --- a/packages/ipfs-mfs/test/http/stat.js +++ b/packages/ipfs/test/http-api/inject/files/stat.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') const CID = require('cids') const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') diff --git a/packages/ipfs-mfs/test/http/touch.js b/packages/ipfs/test/http-api/inject/files/touch.js similarity index 96% rename from packages/ipfs-mfs/test/http/touch.js rename to packages/ipfs/test/http-api/inject/files/touch.js index 5f30c66d2d..c8d3c7de66 100644 --- a/packages/ipfs-mfs/test/http/touch.js +++ b/packages/ipfs/test/http-api/inject/files/touch.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') function defaultOptions (modification = {}) { diff --git a/packages/ipfs-mfs/test/http/write.js b/packages/ipfs/test/http-api/inject/files/write.js similarity index 98% rename from packages/ipfs-mfs/test/http/write.js rename to packages/ipfs/test/http-api/inject/files/write.js index 6d7cc2e635..104b93a6ff 100644 --- a/packages/ipfs-mfs/test/http/write.js +++ b/packages/ipfs/test/http-api/inject/files/write.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('../helpers/chai') -const http = require('../helpers/http') +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const http = require('../../../utils/http') const sinon = require('sinon') const FormData = require('form-data') const streamToPromise = require('stream-to-promise') diff --git a/packages/ipfs/test/http-api/interface.js b/packages/ipfs/test/http-api/interface.js index 1cd7b52b6e..0905467848 100644 --- a/packages/ipfs/test/http-api/interface.js +++ b/packages/ipfs/test/http-api/interface.js @@ -2,32 +2,31 @@ 'use strict' const tests = require('interface-ipfs-core') -const merge = require('merge-options') -const { createFactory } = require('ipfsd-ctl') -const IPFS = require('../../src') +const factory = require('../utils/factory') +const { isNode } = require('ipfs-utils/src/env') /** @typedef { import("ipfsd-ctl").ControllerOptions } ControllerOptions */ - describe('interface-ipfs-core over ipfs-http-client tests', function () { this.timeout(20000) - /** @type ControllerOptions */ - const commonOptions = { - test: true, + + const commonFactory = factory({ type: 'js', - ipfsModule: IPFS, - ipfsHttpModule: require('ipfs-http-client'), - ipfsOptions: { - pass: 'ipfs-is-awesome-software' - } - } - const overrides = { - js: { - ipfsBin: './src/cli/bin.js' - } - } - const commonFactory = createFactory(commonOptions, overrides) + ipfsBin: './src/cli/bin.js', + ipfsModule: false + }) - tests.root(commonFactory) + tests.root(commonFactory, { + skip: isNode ? [{ + name: 'should fail when passed invalid input', + reason: 'node-fetch cannot detect errors in streaming bodies - https://github.com/node-fetch/node-fetch/issues/753' + }, { + name: 'should not add from an invalid url', + reason: 'node-fetch cannot detect errors in streaming bodies - https://github.com/node-fetch/node-fetch/issues/753' + }] : [{ + name: 'should add with mtime as hrtime', + reason: 'Not designed to run in the browser' + }] + }) tests.bitswap(commonFactory) @@ -53,48 +52,35 @@ describe('interface-ipfs-core over ipfs-http-client tests', function () { } }) - tests.files(commonFactory, { - skip: [ - { - name: 'should make directory and specify mtime as hrtime', - reason: 'FIXME: use kebab case in joi validation' - }, - { - name: 'should respect metadata when copying directories', - reason: 'FIXME: use kebab case in joi validation' - }, - { - name: 'should stat sharded dir with mode', - reason: 'FIXME: expected: hamt-sharded-directory, actual: directory' - }, - { - name: 'should stat sharded dir with mtime', - reason: 'FIXME: expected: hamt-sharded-directory, actual: directory' - }, - { - name: 'should set mtime as hrtime', - reason: 'FIXME: use kebab case in joi validation' + tests.files(factory({ + type: 'js', + ipfsBin: './src/cli/bin.js', + ipfsOptions: { + EXPERIMENTAL: { + sharding: true } - ] - }) + } + })) tests.key(commonFactory) tests.miscellaneous(commonFactory) - tests.name(createFactory(merge(commonOptions, { + tests.name(factory({ ipfsOptions: { offline: true } - }), overrides)) + })) - tests.namePubsub(createFactory(merge(commonOptions, { + tests.namePubsub(factory({ + type: 'js', + ipfsBin: './src/cli/bin.js', ipfsOptions: { EXPERIMENTAL: { ipnsPubsub: true } } - }), overrides)) + })) tests.object(commonFactory) @@ -115,11 +101,13 @@ describe('interface-ipfs-core over ipfs-http-client tests', function () { }] }) - tests.pubsub(createFactory(commonOptions, merge(overrides, { + tests.pubsub(factory({ + type: 'js', + ipfsBin: './src/cli/bin.js', go: { args: ['--enable-pubsub-experiment'] } - }))) + })) tests.repo(commonFactory) diff --git a/packages/ipfs/test/http-api/routes.js b/packages/ipfs/test/http-api/routes.js index de626d345a..1409753a03 100644 --- a/packages/ipfs/test/http-api/routes.js +++ b/packages/ipfs/test/http-api/routes.js @@ -2,7 +2,7 @@ 'use strict' const fs = require('fs') -const hat = require('hat') +const nanoid = require('nanoid') const Daemon = require('../../src/cli/daemon') const { promisify } = require('util') const ncp = promisify(require('ncp').ncp) @@ -21,7 +21,7 @@ describe('HTTP API', () => { const startHttpAPI = async (config) => { http.api = new Daemon({ repo: repoTests, - pass: hat(), + pass: nanoid(), config, preload: { enabled: false } }) diff --git a/packages/ipfs/test/utils/create-repo-browser.js b/packages/ipfs/test/utils/create-repo-browser.js index 7b719c1882..34d324e6e1 100644 --- a/packages/ipfs/test/utils/create-repo-browser.js +++ b/packages/ipfs/test/utils/create-repo-browser.js @@ -2,7 +2,7 @@ 'use strict' const IPFSRepo = require('ipfs-repo') -const hat = require('hat') +const nanoid = require('nanoid') const idb = self.indexedDB || self.mozIndexedDB || @@ -10,7 +10,7 @@ const idb = self.indexedDB || self.msIndexedDB module.exports = function createTempRepo (repoPath) { - repoPath = repoPath || '/ipfs-' + hat() + repoPath = repoPath || '/ipfs-' + nanoid() const repo = new IPFSRepo(repoPath) diff --git a/packages/ipfs/test/utils/create-repo-nodejs.js b/packages/ipfs/test/utils/create-repo-nodejs.js index 688ee972b6..81591e2819 100644 --- a/packages/ipfs/test/utils/create-repo-nodejs.js +++ b/packages/ipfs/test/utils/create-repo-nodejs.js @@ -4,10 +4,10 @@ const IPFSRepo = require('ipfs-repo') const clean = require('./clean') const os = require('os') const path = require('path') -const hat = require('hat') +const nanoid = require('nanoid') module.exports = function createTempRepo (repoPath) { - repoPath = repoPath || path.join(os.tmpdir(), '/ipfs-test-' + hat()) + repoPath = repoPath || path.join(os.tmpdir(), '/ipfs-test-' + nanoid()) const repo = new IPFSRepo(repoPath) diff --git a/packages/ipfs/test/utils/factory.js b/packages/ipfs/test/utils/factory.js index 17bfbef854..8966d043ce 100644 --- a/packages/ipfs/test/utils/factory.js +++ b/packages/ipfs/test/utils/factory.js @@ -43,9 +43,11 @@ const commonOverrides = { } } -const factory = (options = {}, overrides = {}) => createFactory( - merge(commonOptions, options), - merge(commonOverrides, overrides) -) +const factory = (options = {}, overrides = {}) => { + return createFactory( + merge(commonOptions, options), + merge(commonOverrides, overrides) + ) +} module.exports = factory diff --git a/packages/ipfs-mfs/test/helpers/http.js b/packages/ipfs/test/utils/http.js similarity index 51% rename from packages/ipfs-mfs/test/helpers/http.js rename to packages/ipfs/test/utils/http.js index 4d474fd5ef..e3af496e60 100644 --- a/packages/ipfs-mfs/test/helpers/http.js +++ b/packages/ipfs/test/utils/http.js @@ -1,17 +1,15 @@ 'use strict' const Hapi = require('@hapi/hapi') -const routes = require('../../src/http') +const routes = require('../../src/http/api/routes') module.exports = (request, { ipfs }) => { const server = Hapi.server() server.app.ipfs = ipfs - for (const key in routes) { - if (Object.prototype.hasOwnProperty.call(routes, key)) { - server.route(routes[key]) - } - } + routes.forEach(route => { + server.route(route) + }) return server.inject(request) } diff --git a/packages/ipfs/test/utils/mock-preload-node.js b/packages/ipfs/test/utils/mock-preload-node.js index 6193ac3b95..db0eb1c356 100644 --- a/packages/ipfs/test/utils/mock-preload-node.js +++ b/packages/ipfs/test/utils/mock-preload-node.js @@ -5,7 +5,7 @@ const http = require('http') const toUri = require('multiaddr-to-uri') const URL = require('url').URL || self.URL const errCode = require('err-code') -const { default: ky } = require('ky-universal') +const HTTP = require('ipfs-utils/src/http') const waitFor = require('../utils/wait-for') const defaultPort = 1138 @@ -55,12 +55,18 @@ module.exports.createNode = () => { } // Get the stored preload CIDs for the server at `addr` -const getPreloadCids = addr => ky.get(`${toUri(addr || defaultAddr)}/cids`).json() +const getPreloadCids = async (addr) => { + const res = await HTTP.get(`${toUri(addr || defaultAddr)}/cids`) + return res.json() +} module.exports.getPreloadCids = getPreloadCids // Clear the stored preload URLs for the server at `addr` -module.exports.clearPreloadCids = addr => ky.delete(`${toUri(addr || defaultAddr)}/cids`) + +module.exports.clearPreloadCids = addr => { + return HTTP.delete(`${toUri(addr || defaultAddr)}/cids`) +} // Wait for the passed CIDs to appear in the CID list from the preload node module.exports.waitForCids = async (cids, opts) => {