diff --git a/.aegir.js b/.aegir.js index 65a309618a..d9a7271617 100644 --- a/.aegir.js +++ b/.aegir.js @@ -22,7 +22,9 @@ module.exports = { singleRun: true }, hooks: { - pre: server.start.bind(server), - post: server.stop.bind(server) + browser: { + pre: () => server.start(), + post: () => server.stop() + } } } diff --git a/package.json b/package.json index b055227e53..cc37fe15e3 100644 --- a/package.json +++ b/package.json @@ -26,6 +26,10 @@ "test:node": "aegir test -t node", "test:browser": "aegir test -t browser", "test:webworker": "aegir test -t webworker", + "test:electron-main": "aegir test -t electron-main", + "test:electron-renderer": "aegir test -t electron-renderer", + "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", + "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", "build": "aegir build", "release": "aegir release ", @@ -90,11 +94,12 @@ "aegir": "^20.0.0", "browser-process-platform": "~0.1.1", "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", "cross-env": "^5.2.0", "dirty-chai": "^2.0.1", "go-ipfs-dep": "^0.4.22", "interface-ipfs-core": "^0.111.0", - "ipfsd-ctl": "~0.43.0", + "ipfsd-ctl": "~0.45.0", "nock": "^10.0.2", "stream-equal": "^1.1.1" }, diff --git a/src/utils/send-files-stream.js b/src/utils/send-files-stream.js index 91bd90fb45..09c55d3769 100644 --- a/src/utils/send-files-stream.js +++ b/src/utils/send-files-stream.js @@ -78,8 +78,8 @@ module.exports = (send, path) => { qs['raw-leaves'] = propOrProp(options, 'raw-leaves', 'rawLeaves') qs['only-hash'] = propOrProp(options, 'only-hash', 'onlyHash') qs['wrap-with-directory'] = propOrProp(options, 'wrap-with-directory', 'wrapWithDirectory') - qs['pin'] = propOrProp(options, 'pin') - qs['preload'] = propOrProp(options, 'preload') + qs.pin = propOrProp(options, 'pin') + qs.preload = propOrProp(options, 'preload') qs.hash = propOrProp(options, 'hash', 'hashAlg') if (options.strategy === 'trickle' || options.trickle) { diff --git a/test/commands.spec.js b/test/commands.spec.js index 3fb4f02e06..66072c5f92 100644 --- a/test/commands.spec.js +++ b/test/commands.spec.js @@ -16,34 +16,25 @@ describe('.commands', function () { let ipfsd let ipfs - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('lists commands', (done) => { - ipfs.commands((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) + it('lists commands', async () => { + const res = await ipfs.commands() - describe('promise', () => { - it('lists commands', () => { - return ipfs.commands() - .then((res) => { - expect(res).to.exist() - }) - }) + expect(res).to.exist() }) }) diff --git a/test/constructor.spec.js b/test/constructor.spec.js index c138b7fb06..cc6a250c26 100644 --- a/test/constructor.spec.js +++ b/test/constructor.spec.js @@ -109,36 +109,30 @@ describe('ipfs-http-client constructor tests', () => { let apiAddr let ipfsd - before(function (done) { + before(async function () { this.timeout(60 * 1000) // slow CI - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, node) => { - expect(err).to.not.exist() - ipfsd = node - apiAddr = node.apiAddr.toString() - done() - }) + ipfsd = await f.spawn({ initOptions: { bits: 1024, profile: 'test' } }) + apiAddr = ipfsd.apiAddr.toString() }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('can connect to an ipfs http api', (done) => { - clientWorks(ipfsClient(apiAddr), done) + it('can connect to an ipfs http api', async () => { + await clientWorks(ipfsClient(apiAddr)) }) }) }) -function clientWorks (client, done) { - client.id((err, id) => { - expect(err).to.not.exist() +async function clientWorks (client) { + const id = await client.id() - expect(id).to.have.a.property('id') - expect(id).to.have.a.property('publicKey') - done() - }) + expect(id).to.have.a.property('id') + expect(id).to.have.a.property('publicKey') } function expectConfig (ipfs, { host, port, protocol, apiPath }) { diff --git a/test/custom-headers.spec.js b/test/custom-headers.spec.js index 1e17485153..8d05897a6a 100644 --- a/test/custom-headers.spec.js +++ b/test/custom-headers.spec.js @@ -17,23 +17,25 @@ describe('custom headers', function () { let ipfs let ipfsd // initialize ipfs with custom headers - before(done => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient({ - host: 'localhost', - port: 6001, - protocol: 'http', - headers: { - authorization: 'Bearer ' + 'YOLO' - } - }) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } + }) + + ipfs = ipfsClient({ + host: 'localhost', + port: 6001, + protocol: 'http', + headers: { + authorization: 'Bearer ' + 'YOLO' + } }) }) - it('are supported', done => { + it('are supported', (done) => { // spin up a test http server to inspect the requests made by the library const server = require('http').createServer((req, res) => { req.on('data', () => {}) @@ -57,5 +59,9 @@ describe('custom headers', function () { }) }) - after(done => ipfsd.stop(done)) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } + }) }) diff --git a/test/dag.spec.js b/test/dag.spec.js index eb69a959ca..8796ba72b6 100644 --- a/test/dag.spec.js +++ b/test/dag.spec.js @@ -5,9 +5,10 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') +const chaiAsPromised = require('chai-as-promised') const expect = chai.expect chai.use(dirtyChai) -const series = require('async/series') +chai.use(chaiAsPromised) const { DAGNode } = require('ipld-dag-pb') const CID = require('cids') const ipfsClient = require('../src') @@ -18,67 +19,56 @@ let ipfs describe('.dag', function () { this.timeout(20 * 1000) - before(function (done) { - series([ - (cb) => f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - cb() - }) - ], done) + before(async function () { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } + }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('should be able to put and get a DAG node with format dag-pb', (done) => { + it('should be able to put and get a DAG node with format dag-pb', async () => { const data = Buffer.from('some data') const node = DAGNode.create(data) - ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }, (err, cid) => { - expect(err).to.not.exist() - cid = cid.toV0() - expect(cid.codec).to.equal('dag-pb') - cid = cid.toBaseEncodedString('base58btc') - // expect(cid).to.equal('bafybeig3t3eugdchignsgkou3ly2mmy4ic4gtfor7inftnqn3yq4ws3a5u') - expect(cid).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - expect(result.value.Data).to.deep.equal(data) - done() - }) - }) + let cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }) + cid = cid.toV0() + expect(cid.codec).to.equal('dag-pb') + cid = cid.toBaseEncodedString('base58btc') + // expect(cid).to.equal('bafybeig3t3eugdchignsgkou3ly2mmy4ic4gtfor7inftnqn3yq4ws3a5u') + expect(cid).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + + const result = await ipfs.dag.get(cid) + + expect(result.value.Data).to.deep.equal(data) }) - it('should be able to put and get a DAG node with format dag-cbor', (done) => { + it('should be able to put and get a DAG node with format dag-cbor', async () => { const cbor = { foo: 'dag-cbor-bar' } - ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - cid = cid.toBaseEncodedString('base32') - expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.deep.equal(cbor) - done() - }) - }) + let cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + + expect(cid.codec).to.equal('dag-cbor') + cid = cid.toBaseEncodedString('base32') + expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') + + const result = await ipfs.dag.get(cid) + + expect(result.value).to.deep.equal(cbor) }) - it('should callback with error when missing DAG resolver for multicodec from requested CID', (done) => { - ipfs.block.put(Buffer.from([0, 1, 2, 3]), { + it('should callback with error when missing DAG resolver for multicodec from requested CID', async () => { + const block = await ipfs.block.put(Buffer.from([0, 1, 2, 3]), { cid: new CID('z8mWaJ1dZ9fH5EetPuRsj8jj26pXsgpsr') - }, (err, block) => { - expect(err).to.not.exist() - - ipfs.dag.get(block.cid, (err, result) => { - expect(result).to.not.exist() - expect(err.message).to.equal('Missing IPLD format "git-raw"') - done() - }) }) + + await expect(ipfs.dag.get(block.cid)).to.be.rejectedWith('Missing IPLD format "git-raw"') }) }) diff --git a/test/diag.spec.js b/test/diag.spec.js index 3f12106e0b..73efbb2de2 100644 --- a/test/diag.spec.js +++ b/test/diag.spec.js @@ -19,68 +19,42 @@ describe('.diag', function () { let ipfsd let ipfs - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - describe('Callback API', () => { + describe('api API', () => { // Disabled in go-ipfs 0.4.10 - it.skip('.diag.net', (done) => { - ipfs.diag.net((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) + it.skip('.diag.net', async () => { + const res = await ipfs.diag.net() - it('.diag.sys', (done) => { - ipfs.diag.sys((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.a.property('memory') - expect(res).to.have.a.property('diskinfo') - done() - }) + expect(res).to.exist() }) - it('.diag.cmds', (done) => { - ipfs.diag.cmds((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) - }) + it('.diag.sys', async () => { + const res = await ipfs.diag.sys() - describe('Promise API', () => { - // Disabled in go-ipfs 0.4.10 - it.skip('.diag.net', () => { - return ipfs.diag.net() - .then((res) => expect(res).to.exist()) + expect(res).to.exist() + expect(res).to.have.a.property('memory') + expect(res).to.have.a.property('diskinfo') }) - it('.diag.sys', () => { - return ipfs.diag.sys() - .then((res) => { - expect(res).to.exist() - expect(res).to.have.a.property('memory') - expect(res).to.have.a.property('diskinfo') - }) - }) + it('.diag.cmds', async () => { + const res = await ipfs.diag.cmds() - it('.diag.cmds', () => { - return ipfs.diag.cmds() - .then((res) => expect(res).to.exist()) + expect(res).to.exist() }) }) }) diff --git a/test/endpoint-config.spec.js b/test/endpoint-config.spec.js index 8ff6532c7b..4747df2743 100644 --- a/test/endpoint-config.spec.js +++ b/test/endpoint-config.spec.js @@ -17,21 +17,24 @@ describe('.getEndpointConfig', () => { let ipfsd let ipfs - before(function (done) { + before(async function () { this.timeout(20 * 1000) // slow CI - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after(function (done) { + after(async function () { this.timeout(10 * 1000) - if (!ipfsd) return done() - ipfsd.stop(done) + + if (ipfsd) { + await ipfsd.stop() + } }) it('should return the endpoint configuration', function () { diff --git a/test/files-mfs.spec.js b/test/files-mfs.spec.js index beda9b24af..3116592000 100644 --- a/test/files-mfs.spec.js +++ b/test/files-mfs.spec.js @@ -4,8 +4,10 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') +const chaiAsPromised = require('chai-as-promised') const expect = chai.expect chai.use(dirtyChai) +chai.use(chaiAsPromised) const loadFixture = require('aegir/fixtures') const mh = require('multihashes') const CID = require('cids') @@ -39,136 +41,109 @@ describe('.files (the MFS API part)', function () { const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('.add file for testing', (done) => { - ipfs.add(testfile, (err, res) => { - expect(err).to.not.exist() + it('.add file for testing', async () => { + const res = await ipfs.add(testfile) - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedMultihash) - expect(res[0].path).to.equal(expectedMultihash) - done() - }) + expect(res).to.have.length(1) + expect(res[0].hash).to.equal(expectedMultihash) + expect(res[0].path).to.equal(expectedMultihash) }) - it('.add with Buffer module', (done) => { + it('.add with Buffer module', async () => { const { Buffer } = require('buffer') const expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' const file = Buffer.from('hello') - ipfs.add(file, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(file) - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedBufferMultihash) - expect(res[0].path).to.equal(expectedBufferMultihash) - done() - }) + expect(res).to.have.length(1) + expect(res[0].hash).to.equal(expectedBufferMultihash) + expect(res[0].path).to.equal(expectedBufferMultihash) }) - it('.add with empty path and buffer content', (done) => { + it('.add with empty path and buffer content', async () => { const expectedHash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' const content = Buffer.from('hello') - ipfs.add([{ path: '', content }], (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add([{ path: '', content }]) - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedHash) - expect(res[0].path).to.equal(expectedHash) - done() - }) + expect(res).to.have.length(1) + expect(res[0].hash).to.equal(expectedHash) + expect(res[0].path).to.equal(expectedHash) }) - it('.add with cid-version=1 and raw-leaves=false', (done) => { + it('.add with cid-version=1 and raw-leaves=false', async () => { const expectedCid = 'bafybeifogzovjqrcxvgt7g36y7g63hvwvoakledwk4b2fr2dl4wzawpnny' const options = { 'cid-version': 1, 'raw-leaves': false } - ipfs.add(testfile, options, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(testfile, options) - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedCid) - expect(res[0].path).to.equal(expectedCid) - done() - }) + expect(res).to.have.length(1) + expect(res[0].hash).to.equal(expectedCid) + expect(res[0].path).to.equal(expectedCid) }) - it('.add with only-hash=true', function () { + it('.add with only-hash=true', async () => { const content = String(Math.random() + Date.now()) - return ipfs.add(Buffer.from(content), { onlyHash: true }) - .then(files => { - expect(files).to.have.length(1) + const files = await ipfs.add(Buffer.from(content), { onlyHash: true }) + expect(files).to.have.length(1) - // 'ipfs.object.get()' should timeout because content wasn't actually added - return expectTimeout(ipfs.object.get(files[0].hash), 4000) - }) + // 'ipfs.object.get()' should timeout because content wasn't actually added + await expectTimeout(ipfs.object.get(files[0].hash), 4000) }) - it('.add with options', (done) => { - ipfs.add(testfile, { pin: false }, (err, res) => { - expect(err).to.not.exist() + it('.add with options', async () => { + const res = await ipfs.add(testfile, { pin: false }) - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedMultihash) - expect(res[0].path).to.equal(expectedMultihash) - done() - }) + expect(res).to.have.length(1) + expect(res[0].hash).to.equal(expectedMultihash) + expect(res[0].path).to.equal(expectedMultihash) }) - it('.add pins by default', (done) => { + it('.add pins by default', async () => { const newContent = Buffer.from(String(Math.random())) - ipfs.pin.ls((err, pins) => { - expect(err).to.not.exist() - const initialPinCount = pins.length - ipfs.add(newContent, (err, res) => { - expect(err).to.not.exist() + const initialPins = await ipfs.pin.ls() - ipfs.pin.ls((err, pins) => { - expect(err).to.not.exist() - expect(pins.length).to.eql(initialPinCount + 1) - done() - }) - }) - }) + await ipfs.add(newContent) + + const pinsAfterAdd = await ipfs.pin.ls() + + expect(pinsAfterAdd.length).to.eql(initialPins.length + 1) }) - it('.add with pin=false', (done) => { + it('.add with pin=false', async () => { const newContent = Buffer.from(String(Math.random())) - ipfs.pin.ls((err, pins) => { - expect(err).to.not.exist() - const initialPinCount = pins.length - ipfs.add(newContent, { pin: false }, (err, res) => { - expect(err).to.not.exist() + const initialPins = await ipfs.pin.ls() - ipfs.pin.ls((err, pins) => { - expect(err).to.not.exist() - expect(pins.length).to.eql(initialPinCount) - done() - }) - }) - }) + await ipfs.add(newContent, { pin: false }) + + const pinsAfterAdd = await ipfs.pin.ls() + + expect(pinsAfterAdd.length).to.eql(initialPins.length) }) HASH_ALGS.forEach((name) => { - it(`.add with hash=${name} and raw-leaves=false`, (done) => { + it(`.add with hash=${name} and raw-leaves=false`, async () => { const content = String(Math.random() + Date.now()) const file = { path: content + '.txt', @@ -176,17 +151,15 @@ describe('.files (the MFS API part)', function () { } const options = { hash: name, 'raw-leaves': false } - ipfs.add([file], options, (err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - const cid = new CID(res[0].hash) - expect(mh.decode(cid.multihash).name).to.equal(name) - done() - }) + const res = await ipfs.add([file], options) + + expect(res).to.have.length(1) + const cid = new CID(res[0].hash) + expect(mh.decode(cid.multihash).name).to.equal(name) }) }) - it('.add file with progress option', (done) => { + it('.add file with progress option', async () => { let progress let progressCount = 0 @@ -195,18 +168,14 @@ describe('.files (the MFS API part)', function () { progress = p } - ipfs.add(testfile, { progress: progressHandler }, (err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(progress).to.be.equal(testfile.byteLength) - expect(progressCount).to.be.equal(1) + const res = await ipfs.add(testfile, { progress: progressHandler }) - done() - }) + expect(res).to.have.length(1) + expect(progress).to.be.equal(testfile.byteLength) + expect(progressCount).to.be.equal(1) }) - it('.add big file with progress option', (done) => { + it('.add big file with progress option', async () => { let progress = 0 let progressCount = 0 @@ -216,18 +185,14 @@ describe('.files (the MFS API part)', function () { } // TODO: needs to be using a big file - ipfs.add(testfile, { progress: progressHandler }, (err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(progress).to.be.equal(testfile.byteLength) - expect(progressCount).to.be.equal(1) + const res = await ipfs.add(testfile, { progress: progressHandler }) - done() - }) + expect(res).to.have.length(1) + expect(progress).to.be.equal(testfile.byteLength) + expect(progressCount).to.be.equal(1) }) - it('.add directory with progress option', (done) => { + it('.add directory with progress option', async () => { let progress = 0 let progressCount = 0 @@ -237,28 +202,21 @@ describe('.files (the MFS API part)', function () { } // TODO: needs to be using a directory - ipfs.add(testfile, { progress: progressHandler }, (err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(progress).to.be.equal(testfile.byteLength) - expect(progressCount).to.be.equal(1) + const res = await ipfs.add(testfile, { progress: progressHandler }) - done() - }) + expect(res).to.have.length(1) + expect(progress).to.be.equal(testfile.byteLength) + expect(progressCount).to.be.equal(1) }) - it('.add without progress options', (done) => { - ipfs.add(testfile, (err, res) => { - expect(err).to.not.exist() + it('.add without progress options', async () => { + const res = await ipfs.add(testfile) - expect(res).to.have.length(1) - done() - }) + expect(res).to.have.length(1) }) HASH_ALGS.forEach((name) => { - it(`.add with hash=${name} and raw-leaves=false`, (done) => { + it(`.add with hash=${name} and raw-leaves=false`, async () => { const content = String(Math.random() + Date.now()) const file = { path: content + '.txt', @@ -266,14 +224,11 @@ describe('.files (the MFS API part)', function () { } const options = { hash: name, 'raw-leaves': false } - ipfs.add([file], options, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add([file], options) - expect(res).to.have.length(1) - const cid = new CID(res[0].hash) - expect(mh.decode(cid.multihash).name).to.equal(name) - done() - }) + expect(res).to.have.length(1) + const cid = new CID(res[0].hash) + expect(mh.decode(cid.multihash).name).to.equal(name) }) }) @@ -293,200 +248,170 @@ describe('.files (the MFS API part)', function () { ) }) - it('.add with pull stream (callback)', (done) => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - ipfs.add(values([Buffer.from('test')]), (err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) - }) - - it('.add with pull stream (promise)', () => { + it('.add with pull stream', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + const res = await ipfs.add(values([Buffer.from('test')])) - return ipfs.add(values([Buffer.from('test')])) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) - it('.add with array of objects with pull stream content', () => { + it('.add with array of objects with pull stream content', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + const res = await ipfs.add([{ content: values([Buffer.from('test')]) }]) - return ipfs.add([{ content: values([Buffer.from('test')]) }]) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.eql({ path: expectedCid, hash: expectedCid, size: 12 }) - }) + expect(res).to.have.length(1) + expect(res[0]).to.eql({ path: expectedCid, hash: expectedCid, size: 12 }) }) - it('files.mkdir', (done) => { - ipfs.files.mkdir('/test-folder', done) + it('files.mkdir', async () => { + await ipfs.files.mkdir('/test-folder') }) - it('files.flush', (done) => { - ipfs.files.flush('/', done) + it('files.flush', async () => { + await ipfs.files.flush('/') }) - it('files.cp', () => { + it('files.cp', async () => { const folder = `/test-folder-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.cp([ - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - `${folder}/test-file-${Math.random()}` - ])) + await ipfs.files.mkdir(folder) + await ipfs.files.cp([ + '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + `${folder}/test-file-${Math.random()}` + ]) }) - it('files.cp with non-array arguments', () => { + it('files.cp with non-array arguments', async () => { const folder = `/test-folder-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.cp( - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - `${folder}/test-file-${Math.random()}` - )) + await ipfs.files.mkdir(folder) + await ipfs.files.cp( + '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + `${folder}/test-file-${Math.random()}` + ) }) - it('files.mv', () => { + it('files.mv', async () => { const folder = `/test-folder-${Math.random()}` const source = `${folder}/test-file-${Math.random()}` const dest = `${folder}/test-file-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.cp( - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - source - )) - .then(() => ipfs.files.mv([ - source, - dest - ])) + await ipfs.files.mkdir(folder) + await ipfs.files.cp( + '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + source + ) + await ipfs.files.mv([ + source, + dest + ]) }) - it('files.mv with non-array arguments', () => { + it('files.mv with non-array arguments', async () => { const folder = `/test-folder-${Math.random()}` const source = `${folder}/test-file-${Math.random()}` const dest = `${folder}/test-file-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.cp( - '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - source - )) - .then(() => ipfs.files.mv( - source, - dest - )) + await ipfs.files.mkdir(folder) + await ipfs.files.cp( + '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + source + ) + await ipfs.files.mv( + source, + dest + ) }) - it('files.ls', () => { + it('files.ls', async () => { const folder = `/test-folder-${Math.random()}` const file = `${folder}/test-file-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.write(file, Buffer.from('Hello, world'), { - create: true - })) - .then(() => ipfs.files.ls(folder)) - .then(files => { - expect(files.length).to.equal(1) - }) + await ipfs.files.mkdir(folder) + await ipfs.files.write(file, Buffer.from('Hello, world'), { + create: true + }) + const files = await ipfs.files.ls(folder) + + expect(files.length).to.equal(1) }) - it('files.ls mfs root by default', () => { + it('files.ls mfs root by default', async () => { const folder = `test-folder-${Math.random()}` - return ipfs.files.mkdir(`/${folder}`) - .then(() => ipfs.files.ls()) - .then(files => { - expect(files.find(file => file.name === folder)).to.be.ok() - }) + await ipfs.files.mkdir(`/${folder}`) + const files = await ipfs.files.ls() + + expect(files.find(file => file.name === folder)).to.be.ok() }) - it('files.write', (done) => { - ipfs.files - .write('/test-folder/test-file-2.txt', Buffer.from('hello world'), { create: true }, (err) => { - expect(err).to.not.exist() + it('files.write', async () => { + await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world'), { + create: true + }) - ipfs.files.read('/test-folder/test-file-2.txt', (err, buf) => { - expect(err).to.not.exist() - expect(buf.toString()).to.be.equal('hello world') - done() - }) - }) + const buf = await ipfs.files.read('/test-folder/test-file-2.txt') + + expect(buf.toString()).to.be.equal('hello world') }) - it('files.write without options', (done) => { - ipfs.files - .write('/test-folder/test-file-2.txt', Buffer.from('hello world'), (err) => { - expect(err).to.not.exist() + it('files.write without options', async () => { + await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world')) - ipfs.files.read('/test-folder/test-file-2.txt', (err, buf) => { - expect(err).to.not.exist() - expect(buf.toString()).to.be.equal('hello world') - done() - }) - }) + const buf = await ipfs.files.read('/test-folder/test-file-2.txt') + + expect(buf.toString()).to.be.equal('hello world') }) - it('files.stat', () => { + it('files.stat', async () => { const folder = `/test-folder-${Math.random()}` const file = `${folder}/test-file-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.write(file, testfile, { - create: true - })) - .then(() => ipfs.files.stat(file)) - .then((stats) => { - expect(stats).to.deep.equal({ - hash: 'QmQhouoDPAnzhVM148yCa9CbUXK65wSEAZBtgrLGHtmdmP', - size: 12, - cumulativeSize: 70, - blocks: 1, - type: 'file', - withLocality: false, - local: undefined, - sizeLocal: undefined - }) - }) - }) + await ipfs.files.mkdir(folder) + await ipfs.files.write(file, testfile, { + create: true + }) - it('files.stat file that does not exist()', (done) => { - ipfs.files.stat('/test-folder/does-not-exist()', (err, res) => { - expect(err).to.exist() - expect(err.code).to.equal(0) - expect(err.type).to.equal('error') + const stats = await ipfs.files.stat(file) + + expect(stats).to.deep.equal({ + hash: 'QmQhouoDPAnzhVM148yCa9CbUXK65wSEAZBtgrLGHtmdmP', + size: 12, + cumulativeSize: 70, + blocks: 1, + type: 'file', + withLocality: false, + local: undefined, + sizeLocal: undefined + }) + }) - done() + it('files.stat file that does not exist()', async () => { + await expect(ipfs.files.stat('/test-folder/does-not-exist()')).to.be.rejectedWith({ + code: 0, + type: 'error' }) }) - it('files.read', () => { + it('files.read', async () => { const folder = `/test-folder-${Math.random()}` const file = `${folder}/test-file-${Math.random()}` - return ipfs.files.mkdir(folder) - .then(() => ipfs.files.write(file, testfile, { - create: true - })) - .then(() => ipfs.files.read(file)) - .then((buf) => { - expect(Buffer.from(buf)).to.deep.equal(testfile) - }) + await ipfs.files.mkdir(folder) + await ipfs.files.write(file, testfile, { + create: true + }) + const buf = await ipfs.files.read(file) + + expect(Buffer.from(buf)).to.deep.equal(testfile) }) - it('files.rm without options', (done) => { - ipfs.files.rm('/test-folder/test-file-2.txt', done) + it('files.rm without options', async () => { + await ipfs.files.rm('/test-folder/test-file-2.txt') }) - it('files.rm', (done) => { - ipfs.files.rm('/test-folder', { recursive: true }, done) + it('files.rm', async () => { + await ipfs.files.rm('/test-folder', { recursive: true }) }) }) diff --git a/test/get.spec.js b/test/get.spec.js index 2c5adb6d98..acdcf833b7 100644 --- a/test/get.spec.js +++ b/test/get.spec.js @@ -5,17 +5,18 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') +const chaiAsPromised = require('chai-as-promised') const expect = chai.expect chai.use(dirtyChai) +chai.use(chaiAsPromised) const isNode = require('detect-node') -const series = require('async/series') const loadFixture = require('aegir/fixtures') const ipfsClient = require('../src') const f = require('./utils/factory') describe('.get (specific go-ipfs features)', function () { - this.timeout(20 * 1000) + this.timeout(60 * 1000) function fixture (path) { return loadFixture(path, 'interface-ipfs-core') @@ -29,89 +30,84 @@ describe('.get (specific go-ipfs features)', function () { let ipfsd let ipfs - before(function (done) { - series([ - (cb) => f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - cb() - }), - (cb) => ipfs.add(smallFile.data, cb) - ], done) + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } + }) + ipfs = ipfsClient(ipfsd.apiAddr) + + await ipfs.add(smallFile.data) }) - after((done) => { - if (!ipfsd) { return done() } - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('no compression args', (done) => { - ipfs.get(smallFile.cid, (err, files) => { - expect(err).to.not.exist() + it('no compression args', async () => { + const files = await ipfs.get(smallFile.cid) - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain(smallFile.data.toString()) - done() - }) + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(smallFile.data.toString()) }) - it('archive true', (done) => { - ipfs.get(smallFile.cid, { archive: true }, (err, files) => { - expect(err).to.not.exist() + it('archive true', async () => { + const files = await ipfs.get(smallFile.cid, { archive: true }) - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain(smallFile.data.toString()) - done() - }) + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(smallFile.data.toString()) }) - it('err with out of range compression level', (done) => { - ipfs.get(smallFile.cid, { + it('err with out of range compression level', async () => { + await expect(ipfs.get(smallFile.cid, { compress: true, 'compression-level': 10 - }, (err, files) => { - expect(err).to.exist() - expect(err.toString()).to.equal('Error: compression level must be between 1 and 9') - done() - }) + })).to.be.rejectedWith('compression level must be between 1 and 9') }) // TODO Understand why this test started failing - it.skip('with compression level', (done) => { - ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 }, done) + it.skip('with compression level', async () => { + await ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 }) }) - it('add path containing "+"s (for testing get)', (done) => { - if (!isNode) { return done() } + it('add path containing "+"s (for testing get)', async () => { + if (!isNode) { + return + } const filename = 'ti,c64x+mega++mod-pic.txt' const subdir = 'tmp/c++files' const expectedCid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' - ipfs.add([{ + const files = await ipfs.add([{ path: subdir + '/' + filename, content: Buffer.from(subdir + '/' + filename, 'utf-8') - }], (err, files) => { - expect(err).to.not.exist() - expect(files[2].hash).to.equal(expectedCid) - done() - }) + }]) + + expect(files[2].hash).to.equal(expectedCid) }) - it('get path containing "+"s', (done) => { - if (!isNode) { return done() } + it('get path containing "+"s', async () => { + if (!isNode) { + return + } const cid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' let count = 0 - ipfs.get(cid, (err, files) => { - expect(err).to.not.exist() - files.forEach((file) => { - if (file.path !== cid) { - count++ - expect(file.path).to.contain('+') - if (count === 2) done() + const files = await ipfs.get(cid) + + files.forEach((file) => { + if (file.path !== cid) { + count++ + expect(file.path).to.contain('+') + + if (count === 2) { + // done() } - }) + } }) }) }) diff --git a/test/interface.spec.js b/test/interface.spec.js index 86ffac21d1..fb54c422b4 100644 --- a/test/interface.spec.js +++ b/test/interface.spec.js @@ -304,14 +304,11 @@ describe('interface-ipfs-core tests', () => { const spawnOptions = { repoPath, config, initOptions: { bits: 1024, profile: 'test' } } - ipfsFactory.spawn(spawnOptions, (err, _ipfsd) => { - if (err) { - return cb(err) - } - - nodes.push(_ipfsd) - cb(null, ipfsClient(_ipfsd.apiAddr)) - }) + ipfsFactory.spawn(spawnOptions) + .then(ipfsd => { + nodes.push(ipfsd) + cb(null, ipfsClient(ipfsd.apiAddr)) + }, cb) } }) } diff --git a/test/key.spec.js b/test/key.spec.js index 8ed35d42bb..b332b91bac 100644 --- a/test/key.spec.js +++ b/test/key.spec.js @@ -16,73 +16,42 @@ describe('.key', function () { let ipfsd let ipfs - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - describe('Callback API', () => { - describe('.gen', () => { - it('create a new rsa key', (done) => { - ipfs.key.gen('foobarsa', { type: 'rsa', size: 2048 }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) + describe('.gen', () => { + it('create a new rsa key', async () => { + const res = await ipfs.key.gen('foobarsa', { type: 'rsa', size: 2048 }) - it('create a new ed25519 key', (done) => { - ipfs.key.gen('bazed', { type: 'ed25519' }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) + expect(res).to.exist() }) - describe('.list', () => { - it('both keys show up + self', (done) => { - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.length).to.equal(3) - done() - }) - }) + it('create a new ed25519 key', async () => { + const res = await ipfs.key.gen('bazed', { type: 'ed25519' }) + + expect(res).to.exist() }) }) - describe('Promise API', () => { - describe('.gen', () => { - it('create a new rsa key', () => { - return ipfs.key.gen('foobarsa2', { type: 'rsa', size: 2048 }).then((res) => { - expect(res).to.exist() - }) - }) - - it('create a new ed25519 key', () => { - return ipfs.key.gen('bazed2', { type: 'ed25519' }).then((res) => { - expect(res).to.exist() - }) - }) - }) + describe('.list', () => { + it('both keys show up + self', async () => { + const res = await ipfs.key.list() - describe('.list', () => { - it('4 keys to show up + self', () => { - return ipfs.key.list().then((res) => { - expect(res).to.exist() - expect(res.length).to.equal(5) - }) - }) + expect(res).to.exist() + expect(res.length).to.equal(3) }) }) }) diff --git a/test/log.spec.js b/test/log.spec.js index 1e800f0751..73ff556e2b 100644 --- a/test/log.spec.js +++ b/test/log.spec.js @@ -16,58 +16,60 @@ describe('.log', function () { let ipfsd let ipfs - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('.log.tail', (done) => { - const i = setInterval(() => { - ipfs.add(Buffer.from('just adding some data to generate logs')) + it('.log.tail', async () => { + const i = setInterval(async () => { + try { + await ipfs.add(Buffer.from('just adding some data to generate logs')) + } catch (_) { + // this can error if the test has finished and we're shutting down the node + } }, 1000) - const req = ipfs.log.tail((err, res) => { - expect(err).to.not.exist() - expect(req).to.exist() + const res = await ipfs.log.tail() + + return new Promise((resolve, reject) => { + res.on('error', (err) => { + reject(err) + }) res.once('data', (obj) => { clearInterval(i) expect(obj).to.be.an('object') - done() + res.end() + resolve() }) }) }) - it('.log.ls', (done) => { - ipfs.log.ls((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - - expect(res).to.be.an('array') + it('.log.ls', async () => { + const res = await ipfs.log.ls() - done() - }) + expect(res).to.exist() + expect(res).to.be.an('array') }) - it('.log.level', (done) => { - ipfs.log.level('all', 'error', (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - - expect(res).to.be.an('object') - expect(res).to.not.have.property('Error') - expect(res).to.have.property('Message') + it('.log.level', async () => { + const res = await ipfs.log.level('all', 'error') - done() - }) + expect(res).to.exist() + expect(res).to.be.an('object') + expect(res).to.not.have.property('Error') + expect(res).to.have.property('Message') }) }) diff --git a/test/node/swarm.js b/test/node/swarm.js index 0ba0fe73f6..3ec634aeac 100644 --- a/test/node/swarm.js +++ b/test/node/swarm.js @@ -4,8 +4,10 @@ const nock = require('nock') const chai = require('chai') const dirtyChai = require('dirty-chai') +const chaiAsPromised = require('chai-as-promised') const expect = chai.expect chai.use(dirtyChai) +chai.use(chaiAsPromised) const ipfsClient = require('../../src') @@ -15,7 +17,7 @@ describe('.swarm.peers', function () { const ipfs = ipfsClient('/ip4/127.0.0.1/tcp/5001') const apiUrl = 'http://127.0.0.1:5001' - it('handles a peer response', (done) => { + it('handles a peer response', async () => { const response = { Peers: [{ Addr: '/ip4/104.131.131.82/tcp/4001', Peer: 'QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ', Latency: '', Muxer: '', Streams: null }] } const scope = nock(apiUrl) @@ -23,19 +25,17 @@ describe('.swarm.peers', function () { .query(true) .reply(200, response) - ipfs.swarm.peers((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.a('array') - expect(res.length).to.equal(1) - expect(res[0].error).to.not.exist() - expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr) - expect(res[0].peer.toB58String()).to.equal(response.Peers[0].Peer) - expect(scope.isDone()).to.equal(true) - done() - }) + const res = await ipfs.swarm.peers() + + expect(res).to.be.a('array') + expect(res.length).to.equal(1) + expect(res[0].error).to.not.exist() + expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr) + expect(res[0].peer.toB58String()).to.equal(response.Peers[0].Peer) + expect(scope.isDone()).to.equal(true) }) - it('handles a go-ipfs <= 0.4.4 peer response', (done) => { + it('handles a go-ipfs <= 0.4.4 peer response', async () => { const response = { Strings: ['/ip4/73.109.217.59/tcp/49311/ipfs/QmWjxEGC7BthJrCf7QTModrcsRweHbupdPTY4oGMVoDZXm'] } const scope = nock(apiUrl) @@ -43,19 +43,17 @@ describe('.swarm.peers', function () { .query(true) .reply(200, response) - ipfs.swarm.peers((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.a('array') - expect(res.length).to.equal(1) - expect(res[0].error).to.not.exist() - expect(res[0].addr.toString()).to.equal('/ip4/73.109.217.59/tcp/49311/ipfs/QmWjxEGC7BthJrCf7QTModrcsRweHbupdPTY4oGMVoDZXm') - expect(res[0].peer.toB58String()).to.equal('QmWjxEGC7BthJrCf7QTModrcsRweHbupdPTY4oGMVoDZXm') - expect(scope.isDone()).to.equal(true) - done() - }) + const res = await ipfs.swarm.peers() + + expect(res).to.be.a('array') + expect(res.length).to.equal(1) + expect(res[0].error).to.not.exist() + expect(res[0].addr.toString()).to.equal('/ip4/73.109.217.59/tcp/49311/ipfs/QmWjxEGC7BthJrCf7QTModrcsRweHbupdPTY4oGMVoDZXm') + expect(res[0].peer.toB58String()).to.equal('QmWjxEGC7BthJrCf7QTModrcsRweHbupdPTY4oGMVoDZXm') + expect(scope.isDone()).to.equal(true) }) - it('handles an ip6 quic peer', (done) => { + it('handles an ip6 quic peer', async () => { const response = { Peers: [{ Addr: '/ip6/2001:8a0:7ac5:4201:3ac9:86ff:fe31:7095/udp/4001/quic', Peer: 'QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC', Latency: '', Muxer: '', Streams: null }] } const scope = nock(apiUrl) @@ -63,19 +61,17 @@ describe('.swarm.peers', function () { .query(true) .reply(200, response) - ipfs.swarm.peers((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.a('array') - expect(res.length).to.equal(1) - expect(res[0].error).to.not.exist() - expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr) - expect(res[0].peer.toB58String()).to.equal(response.Peers[0].Peer) - expect(scope.isDone()).to.equal(true) - done() - }) + const res = await ipfs.swarm.peers() + + expect(res).to.be.a('array') + expect(res.length).to.equal(1) + expect(res[0].error).to.not.exist() + expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr) + expect(res[0].peer.toB58String()).to.equal(response.Peers[0].Peer) + expect(scope.isDone()).to.equal(true) }) - it('handles unvalidatable peer addr', (done) => { + it('handles unvalidatable peer addr', async () => { const response = { Peers: [{ Addr: '/ip4/104.131.131.82/future-tech', Peer: 'QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC', Latency: '', Muxer: '', Streams: null }] } const scope = nock(apiUrl) @@ -83,28 +79,23 @@ describe('.swarm.peers', function () { .query(true) .reply(200, response) - ipfs.swarm.peers((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.a('array') - expect(res.length).to.equal(1) - expect(res[0].error).to.exist() - expect(res[0].rawPeerInfo).to.deep.equal(response.Peers[0]) - expect(scope.isDone()).to.equal(true) - done() - }) + const res = await ipfs.swarm.peers() + + expect(res).to.be.a('array') + expect(res.length).to.equal(1) + expect(res[0].error).to.exist() + expect(res[0].rawPeerInfo).to.deep.equal(response.Peers[0]) + expect(scope.isDone()).to.equal(true) }) - it('handles an error response', (done) => { + it('handles an error response', async () => { const scope = nock(apiUrl) .post('/api/v0/swarm/peers') .query(true) .replyWithError('something awful happened') - ipfs.swarm.peers((err, res) => { - expect(err.message).to.equal('something awful happened') - expect(res).to.not.exist() - expect(scope.isDone()).to.equal(true) - done() - }) + await expect(ipfs.swarm.peers()).to.be.rejectedWith('something awful happened') + + expect(scope.isDone()).to.equal(true) }) }) diff --git a/test/ping.spec.js b/test/ping.spec.js index 5f6ecd3eae..da141702cd 100644 --- a/test/ping.spec.js +++ b/test/ping.spec.js @@ -3,13 +3,12 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const pull = require('pull-stream/pull') -const collect = require('pull-stream/sinks/collect') +const chaiAsPromised = require('chai-as-promised') const expect = chai.expect chai.use(dirtyChai) - -const parallel = require('async/parallel') -const series = require('async/series') +chai.use(chaiAsPromised) +const pull = require('pull-stream/pull') +const collect = require('pull-stream/sinks/collect') const ipfsClient = require('../src') const PingMessageStream = require('../src/utils/ping-message-stream') @@ -29,126 +28,97 @@ describe('.ping', function () { let otherd let otherId - before(function (done) { + before(async function () { this.timeout(30 * 1000) // slow CI - series([ - (cb) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - cb() - }) - }, - (cb) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, node) => { - expect(err).to.not.exist() - other = node.api - otherd = node - cb() - }) - }, - (cb) => { - parallel([ - (cb) => { - ipfs.id((err, id) => { - expect(err).to.not.exist() - const ma = id.addresses[0] - other.swarm.connect(ma, cb) - }) - }, - (cb) => { - other.id((err, id) => { - expect(err).to.not.exist() - otherId = id.id - cb() - }) - } - ], cb) + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' } - ], done) - }) + }) + ipfs = ipfsClient(ipfsd.apiAddr) - after((done) => { - parallel([ - (cb) => { - if (!ipfsd) return cb() - ipfsd.stop(cb) - }, - (cb) => { - if (!otherd) return cb() - otherd.stop(cb) + otherd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' } - ], done) + }) + other = otherd.api + + const ma = (await ipfs.id()).addresses[0] + await other.swarm.connect(ma) + + otherId = (await other.id()).id }) - it('.ping with default n', (done) => { - ipfs.ping(otherId, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array') - expect(res.filter(isPong)).to.have.lengthOf(1) - res.forEach(packet => { - expect(packet).to.have.keys('success', 'time', 'text') - expect(packet.time).to.be.a('number') - }) - const resultMsg = res.find(packet => packet.text.includes('Average latency')) - expect(resultMsg).to.exist() - done() + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } + + if (otherd) { + await otherd.stop() + } + }) + + it('.ping with default n', async () => { + const res = await ipfs.ping(otherId) + + expect(res).to.be.an('array') + expect(res.filter(isPong)).to.have.lengthOf(1) + res.forEach(packet => { + expect(packet).to.have.keys('success', 'time', 'text') + expect(packet.time).to.be.a('number') }) + + const resultMsg = res.find(packet => packet.text.includes('Average latency')) + expect(resultMsg).to.exist() }) - it('.ping with count = 2', (done) => { - ipfs.ping(otherId, { count: 2 }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array') - expect(res.filter(isPong)).to.have.lengthOf(2) - res.forEach(packet => { - expect(packet).to.have.keys('success', 'time', 'text') - expect(packet.time).to.be.a('number') - }) - const resultMsg = res.find(packet => packet.text.includes('Average latency')) - expect(resultMsg).to.exist() - done() + it('.ping with count = 2', async () => { + const res = await ipfs.ping(otherId, { count: 2 }) + + expect(res).to.be.an('array') + expect(res.filter(isPong)).to.have.lengthOf(2) + res.forEach(packet => { + expect(packet).to.have.keys('success', 'time', 'text') + expect(packet.time).to.be.a('number') }) + const resultMsg = res.find(packet => packet.text.includes('Average latency')) + expect(resultMsg).to.exist() }) - it('.ping with n = 2', (done) => { - ipfs.ping(otherId, { n: 2 }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array') - expect(res.filter(isPong)).to.have.lengthOf(2) - res.forEach(packet => { - expect(packet).to.have.keys('success', 'time', 'text') - expect(packet.time).to.be.a('number') - }) - const resultMsg = res.find(packet => packet.text.includes('Average latency')) - expect(resultMsg).to.exist() - done() + it('.ping with n = 2', async () => { + const res = await ipfs.ping(otherId, { n: 2 }) + + expect(res).to.be.an('array') + expect(res.filter(isPong)).to.have.lengthOf(2) + res.forEach(packet => { + expect(packet).to.have.keys('success', 'time', 'text') + expect(packet.time).to.be.a('number') }) + const resultMsg = res.find(packet => packet.text.includes('Average latency')) + expect(resultMsg).to.exist() }) - it('.ping fails with count & n', function (done) { + it('.ping fails with count & n', async function () { this.timeout(20 * 1000) - ipfs.ping(otherId, { count: 2, n: 2 }, (err, res) => { - expect(err).to.exist() - done() - }) + await expect(ipfs.ping(otherId, { count: 2, n: 2 })).to.be.rejected() }) - it('.ping with Promises', () => { - return ipfs.ping(otherId) - .then((res) => { - expect(res).to.be.an('array') - expect(res.filter(isPong)).to.have.lengthOf(1) - res.forEach(packet => { - expect(packet).to.have.keys('success', 'time', 'text') - expect(packet.time).to.be.a('number') - }) - const resultMsg = res.find(packet => packet.text.includes('Average latency')) - expect(resultMsg).to.exist() - }) + it('.ping with Promises', async () => { + const res = await ipfs.ping(otherId) + expect(res).to.be.an('array') + expect(res.filter(isPong)).to.have.lengthOf(1) + res.forEach(packet => { + expect(packet).to.have.keys('success', 'time', 'text') + expect(packet.time).to.be.a('number') + }) + const resultMsg = res.find(packet => packet.text.includes('Average latency')) + expect(resultMsg).to.exist() }) it('.pingPullStream', (done) => { diff --git a/test/repo.spec.js b/test/repo.spec.js index e018688107..c6306f5af4 100644 --- a/test/repo.spec.js +++ b/test/repo.spec.js @@ -15,43 +15,39 @@ describe('.repo', function () { let ipfs let ipfsd - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('.repo.gc', (done) => { - ipfs.repo.gc((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) + it('.repo.gc', async () => { + const res = await ipfs.repo.gc() + + expect(res).to.exist() }) - it('.repo.stat', (done) => { - ipfs.repo.stat((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.a.property('numObjects') - expect(res).to.have.a.property('repoSize') - done() - }) + it('.repo.stat', async () => { + const res = await ipfs.repo.stat() + + expect(res).to.exist() + expect(res).to.have.a.property('numObjects') + expect(res).to.have.a.property('repoSize') }) - it('.repo.version', (done) => { - ipfs.repo.version((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) + it('.repo.version', async () => { + const res = await ipfs.repo.version() + + expect(res).to.exist() }) }) diff --git a/test/stats.spec.js b/test/stats.spec.js index 4597e03971..66b76b2707 100644 --- a/test/stats.spec.js +++ b/test/stats.spec.js @@ -15,59 +15,55 @@ describe('stats', function () { let ipfs let ipfsd - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() + before(async () => { + ipfsd = await f.spawn({ + initOptions: { + bits: 1024, + profile: 'test' + } }) + ipfs = ipfsClient(ipfsd.apiAddr) }) - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) + after(async () => { + if (ipfsd) { + await ipfsd.stop() + } }) - it('.stats.bitswap', (done) => { - ipfs.stats.bitswap((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.a.property('provideBufLen') - expect(res).to.have.a.property('wantlist') - expect(res).to.have.a.property('peers') - expect(res).to.have.a.property('blocksReceived') - expect(res).to.have.a.property('dataReceived') - expect(res).to.have.a.property('blocksSent') - expect(res).to.have.a.property('dataSent') - expect(res).to.have.a.property('dupBlksReceived') - expect(res).to.have.a.property('dupDataReceived') - done() - }) + it('.stats.bitswap', async () => { + const res = await ipfs.stats.bitswap() + + expect(res).to.exist() + expect(res).to.have.a.property('provideBufLen') + expect(res).to.have.a.property('wantlist') + expect(res).to.have.a.property('peers') + expect(res).to.have.a.property('blocksReceived') + expect(res).to.have.a.property('dataReceived') + expect(res).to.have.a.property('blocksSent') + expect(res).to.have.a.property('dataSent') + expect(res).to.have.a.property('dupBlksReceived') + expect(res).to.have.a.property('dupDataReceived') }) - it('.stats.bw', (done) => { - ipfs.stats.bw((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.a.property('totalIn') - expect(res).to.have.a.property('totalOut') - expect(res).to.have.a.property('rateIn') - expect(res).to.have.a.property('rateOut') - done() - }) + it('.stats.bw', async () => { + const res = await ipfs.stats.bw() + + expect(res).to.exist() + expect(res).to.have.a.property('totalIn') + expect(res).to.have.a.property('totalOut') + expect(res).to.have.a.property('rateIn') + expect(res).to.have.a.property('rateOut') }) - it('.stats.repo', (done) => { - ipfs.stats.repo((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.a.property('numObjects') - expect(res).to.have.a.property('repoSize') - expect(res).to.have.a.property('repoPath') - expect(res).to.have.a.property('version') - expect(res).to.have.a.property('storageMax') - done() - }) + it('.stats.repo', async () => { + const res = await ipfs.stats.repo() + + expect(res).to.exist() + expect(res).to.have.a.property('numObjects') + expect(res).to.have.a.property('repoSize') + expect(res).to.have.a.property('repoPath') + expect(res).to.have.a.property('version') + expect(res).to.have.a.property('storageMax') }) }) diff --git a/test/utils/interface-common-factory.js b/test/utils/interface-common-factory.js index ad50100802..6f1cad7ca1 100644 --- a/test/utils/interface-common-factory.js +++ b/test/utils/interface-common-factory.js @@ -23,14 +23,11 @@ function createFactory (options) { setup = (callback) => { callback(null, { spawnNode (cb) { - ipfsFactory.spawn(options.spawnOptions, (err, _ipfsd) => { - if (err) { - return cb(err) - } - - nodes.push(_ipfsd) - cb(null, ipfsClient(_ipfsd.apiAddr)) - }) + ipfsFactory.spawn(options.spawnOptions) + .then((ipfsd) => { + nodes.push(ipfsd) + cb(null, ipfsClient(ipfsd.apiAddr)) + }, cb) } }) } @@ -39,7 +36,7 @@ function createFactory (options) { if (options.createTeardown) { teardown = options.createTeardown({ ipfsFactory, nodes }, options) } else { - teardown = callback => each(nodes, (node, cb) => node.stop(cb), callback) + teardown = callback => each(nodes, (node, cb) => node.stop().then(cb, cb), callback) } return { setup, teardown }