Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.
7 changes: 6 additions & 1 deletion karma.conf.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,18 @@ module.exports = function (config) {

webpack: {
resolve: {
extensions: ['', '.js']
extensions: ['', '.js', '.json']
},
externals: {
fs: '{}'
},
node: {
Buffer: true
},
module: {
loaders: [
{ test: /\.json$/, loader: 'json' }
]
}
},

Expand Down
6 changes: 5 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
"chai": "^3.4.1",
"fs-blob-store": "^5.2.1",
"istanbul": "^0.4.1",
"json-loader": "^0.5.4",
"karma": "^0.13.19",
"karma-chrome-launcher": "^0.2.2",
"karma-cli": "^0.1.2",
Expand All @@ -54,13 +55,16 @@
"raw-loader": "^0.5.1",
"rimraf": "^2.4.4",
"standard": "^5.4.1",
"webpack": "^1.12.11"
"webpack": "diasdavid/webpack#81f5994"
},
"dependencies": {
"bl": "^1.0.0",
"boom": "^3.1.1",
"bs58": "^3.0.0",
"debug": "^2.2.0",
"hapi": "^12.0.0",
"ipfs-repo": "^0.4.1",
"ipfs-merkle-dag": "vijayee/js-ipfs-merkle-dag",
"lodash.get": "^4.0.0",
"lodash.set": "^4.0.0",
"ronin": "^0.3.11"
Expand Down
2 changes: 1 addition & 1 deletion src/http-api/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ exports.start = callback => {
})

server.connection({
port: 9000
port: 9001
})

// load routes
Expand Down
30 changes: 30 additions & 0 deletions src/ipfs-core/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
'use strict'

const defaultRepo = require('./default-repo')
// const bl = require('bl')
const MerkleDAG = require('ipfs-merkle-dag')
const BlockService = MerkleDAG.BlockService
// const Block = MerkleDAG.Block

exports = module.exports = IPFS

Expand All @@ -12,6 +16,7 @@ function IPFS (repo) {
if (!repo) {
repo = defaultRepo()
}
const bs = new BlockService(repo)

this.daemon = callback => {
// 1. read repo to get peer data
Expand Down Expand Up @@ -124,4 +129,29 @@ function IPFS (repo) {
})
}
}

this.block = {
get: (multihash, callback) => {
bs.getBlock(multihash, callback)
},
put: (block, callback) => {
bs.addBlock(block, callback)
},
del: (multihash, callback) => {
bs.deleteBlock(multihash, callback)
},
stat: (multihash, callback) => {
bs.getBlock(multihash, (err, block) => {
if (err) {
return callback(err)
}
console.log('->', block.data.length)
console.log('->', block.data)
callback(null, {
Key: multihash,
Size: block.data.length
})
})
}
}
}
73 changes: 73 additions & 0 deletions tests/test-core/test-block.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
/* globals describe, it */

'use strict'

const expect = require('chai').expect
const base58 = require('bs58')
const fs = require('fs')
const IPFS = require('../../src/ipfs-core')
const Block = require('ipfs-merkle-dag').Block

const isNode = !global.window

const fileA = isNode
? fs.readFileSync(process.cwd() + '/tests/repo-example/blocks/12207028/122070286b9afa6620a66f715c7020d68af3d10e1a497971629c07606bfdb812303d.data')
: new Buffer(require('raw!./../repo-example/blocks/12207028/122070286b9afa6620a66f715c7020d68af3d10e1a497971629c07606bfdb812303d.data'))
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

bug found (this took me a bit to figure out :))

When using raw-loader, a string is returned with the contents of the file, if the file content was text, it would be ok, but since it is an encoded protobuf, it will add extra encoding data (utf8) adding extra bytes to the content of the Buffer, increasing the size of the Block and by so, failing the test. Need to get another way to load the blocks into localstorage

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.


console.log('=>', fileA)

describe('block', () => {
var ipfs

it('get', done => {
ipfs = new IPFS()
const b58mh = 'QmVtU7ths96fMgZ8YSZAbKghyieq7AjxNdcqyVzxTt3qVe'
const mh = new Buffer(base58.decode(b58mh))
ipfs.block.get(mh, (err, block) => {
expect(err).to.not.exist
const eq = fileA.equals(block.data)
expect(eq).to.equal(true)
done()
})
})
it('put', done => {
var b = new Block('random data')
ipfs.block.put(b, function (err) {
expect(err).to.not.exist
ipfs.block.get(b.key, function (err, block) {
expect(err).to.not.exist
expect(b.data.equals(block.data)).to.equal(true)
expect(b.key.equals(block.key)).to.equal(true)
done()
})
})
})

it('rm', done => {
var b = new Block('I will not last long enough')
ipfs.block.put(b, function (err) {
expect(err).to.not.exist
ipfs.block.get(b.key, function (err, block) {
expect(err).to.not.exist
ipfs.block.del(b.key, function (err) {
expect(err).to.not.exist
ipfs.block.get(b.key, function (err, block) {
expect(err).to.exist
done()
})
})
})
})
})

it('stat', done => {
const mh = new Buffer(base58
.decode('QmVtU7ths96fMgZ8YSZAbKghyieq7AjxNdcqyVzxTt3qVe'))
ipfs.block.stat(mh, (err, stats) => {
expect(err).to.not.exist
expect(stats.Key.equals(mh)).to.equal(true)
expect(stats.Size).to.equal(309)
done()
})
})
})