From a86753a9a49f1beed3e50596d8e36c38fa721826 Mon Sep 17 00:00:00 2001 From: Stephen Whitmore Date: Tue, 7 Jun 2016 09:05:36 -0700 Subject: [PATCH 1/4] Adds API for ipfs.files.get. --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 721378ae..2f9ee61c 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,7 @@ - [Files](/API/files) - [`add`](/API/files#add) - [`createAddStream`](/files#createaddstream) + - [`get`](/API/files#get) - [`cat`](/API/files#cat) - [Object](/API/object) - [`object.new`](/API/object#objectnew) @@ -99,7 +100,7 @@ test.all(common) ## API -A valid (read: that follows this interface) IPFS core implementation, must expose the API described in [/API](/API) +A valid (read: that follows this interface) IPFS core implementation must expose the API described in [/API](/API). ## Contribute @@ -114,3 +115,5 @@ This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/c ## License MIT + +[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs From 2e94ac94b54951195a932901f6ad56ea08bed1c8 Mon Sep 17 00:00:00 2001 From: Stephen Whitmore Date: Tue, 7 Jun 2016 17:09:21 -0700 Subject: [PATCH 2/4] Add ipfs.files.get tests. --- package.json | 1 + src/files.js | 198 ++++++++++++++++++++++++++++++++++++++++++++------- 2 files changed, 174 insertions(+), 25 deletions(-) diff --git a/package.json b/package.json index d8a6dbc4..5435d84e 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "bl": "^1.1.2", "bs58": "^3.0.0", "chai": "^3.5.0", + "concat-stream": "^1.5.1", "detect-node": "^2.0.3", "ipfs-merkle-dag": "^0.6.0", "readable-stream": "1.1.13" diff --git a/src/files.js b/src/files.js index 766ce4df..83ea4c52 100644 --- a/src/files.js +++ b/src/files.js @@ -6,20 +6,28 @@ const bs58 = require('bs58') const Readable = require('readable-stream') const path = require('path') const fs = require('fs') -const isNode = require('detect-node') const bl = require('bl') +const concat = require('concat-stream') +const through = require('through2') module.exports = (common) => { - describe('.files', () => { + describe.only('.files', () => { let smallFile let bigFile + let directoryContent let ipfs before((done) => { - smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt') -) - bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random') -) + smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')) + bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')) + directoryContent = { + 'pp.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/pp.txt')), + 'holmes.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/holmes.txt')), + 'jungle.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/jungle.txt')), + 'alice.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/alice.txt')), + 'files/hello.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/hello.txt')), + 'files/ipfs.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/ipfs.txt')) + } common.setup((err, _ipfs) => { expect(err).to.not.exist @@ -100,15 +108,9 @@ module.exports = (common) => { }) it('add a nested dir as array', (done) => { - if (!isNode) { - return done() - // can't run this test cause browserify - // can't shim readFileSync in runtime - } - const base = path.join(__dirname, 'data/test-folder') const content = (name) => ({ path: `test-folder/${name}`, - content: fs.readFileSync(path.join(base, name)) + content: directoryContent[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` @@ -138,21 +140,13 @@ module.exports = (common) => { describe('.createAddStream', () => { it('stream of valid files and dirs', (done) => { - if (!isNode) { - return done() - // can't run this test cause browserify - // can't shim readFileSync in runtime - } - - const base = path.join(__dirname, 'data/test-folder') const content = (name) => ({ path: `test-folder/${name}`, - content: fs.readFileSync(path.join(base, name)) + content: directoryContent[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) - const files = [ content('pp.txt'), content('holmes.txt'), @@ -241,7 +235,7 @@ module.exports = (common) => { }) describe('.cat', () => { - it('with a bas58 multihash encoded string', () => { + it('with a base58 multihash encoded string', () => { const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' return ipfs.cat(hash) @@ -273,11 +267,165 @@ module.exports = (common) => { const hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB')) return ipfs.cat(hash) .then((stream) => { - stream.pipe(bl((err, bldata) => { + stream.pipe(bl((err, data) => { expect(err).to.not.exist - expect(bldata.toString()).to.contain('Check out some of the other files in this directory:') + expect(data.toString()).to.contain('Check out some of the other files in this directory:') + })) + }) + }) + }) + }) + + describe('.get', () => { + it('with a base58 encoded multihash', (done) => { + const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' + ipfs.files.get(hash, (err, stream) => { + expect(err).to.not.exist + stream.pipe(concat((files) => { + expect(err).to.not.exist + expect(files).to.be.length(1) + expect(files[0].path).to.equal(hash) + files[0].content.pipe(concat((content) => { + expect(content.toString()).to.contain('Check out some of the other files in this directory:') + done() + })) + })) + }) + }) + + it('with a multihash', (done) => { + const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' + const mhBuf = new Buffer(bs58.decode(hash)) + ipfs.files.get(mhBuf, (err, stream) => { + expect(err).to.not.exist + stream.pipe(concat((files) => { + expect(files).to.be.length(1) + expect(files[0].path).to.deep.equal(hash) + files[0].content.pipe(concat((content) => { + expect(content.toString()).to.contain('Check out some of the other files in this directory:') + done() + })) + })) + }) + }) + + it('large file', (done) => { + const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + ipfs.files.get(hash, (err, stream) => { + expect(err).to.not.exist + + // accumulate the files and their content + var files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(hash) + expect(files[0].content).to.deep.equal(bigFile) + done() + })) + }) + }) + + it('directory', (done) => { + const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + ipfs.files.get(hash, (err, stream) => { + expect(err).to.not.exist + + // accumulate the files and their content + var files = [] + stream.pipe(through.obj((file, enc, next) => { + if (file.content) { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + } else { + files.push(file) + next() + } + }, () => { + // Check paths + var paths = files.map((file) => { + return file.path + }) + expect(paths).to.deep.equal([ + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' + ]) + + // Check contents + var contents = files.map((file) => { + return file.content ? file.content : null + }) + expect(contents).to.deep.equal([ + null, + directoryContent['alice.txt'], + null, + null, + null, + directoryContent['files/hello.txt'], + directoryContent['files/ipfs.txt'], + directoryContent['holmes.txt'], + directoryContent['jungle.txt'], + directoryContent['pp.txt'] + ]) + done() + })) + }) + }) + + describe('promise', () => { + it('with a base58 encoded string', (done) => { + const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' + ipfs.files.get(hash) + .then((stream) => { + stream.pipe(concat((files) => { + expect(files).to.be.length(1) + expect(files[0].path).to.equal(hash) + files[0].content.pipe(concat((content) => { + expect(content.toString()).to.contain('Check out some of the other files in this directory:') + done() + })) })) }) + .catch((err) => { + expect(err).to.not.exist + }) + }) + + it('errors on invalid key', (done) => { + const hash = 'somethingNotMultihash' + ipfs.files.get(hash) + .then((stream) => {}) + .catch((err) => { + expect(err).to.exist + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } + done() + }) }) }) }) From 08a5aa0872e51c9cf924a71feaa456ea52d69e76 Mon Sep 17 00:00:00 2001 From: Stephen Whitmore Date: Tue, 9 Aug 2016 09:35:40 -0700 Subject: [PATCH 3/4] Add API reference for files.get. --- API/files/README.md | 44 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/API/files/README.md b/API/files/README.md index 3b47e3fd..1db9f929 100644 --- a/API/files/README.md +++ b/API/files/README.md @@ -93,9 +93,9 @@ ipfs.files.createAddStream(function (err, stream) { -#### `cat` +#### `cat` -> Streams the file at the given IPFS multihash.. +> Streams the file at the given IPFS multihash. ##### `Go` **WIP** @@ -116,3 +116,43 @@ ipfs.files.cat(multihash, function (err, file) { }) ``` + +#### `get` +> Get [UnixFS][] files from IPFS. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.get(hash, [callback]) + +Where `hash` is an IPFS multiaddress or multihash. + +`callback` must follow `function (err, stream) {}` signature, where `err` is an +error if the operation was not successful. `stream` will be a Readable stream in +[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), +outputting objects of the form + +```js +{ + path: '/tmp/myfile.txt', + content: +} +``` + +Here, each `path` corresponds to the name of a file, and `content` is a regular +Readable stream with the raw contents of that file. + +If no `callback` is passed, a promise is returned with the Readable stream. + +Example: + +```js +var multiaddr = '/ipfs/QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' +ipfs.files.get(multiaddr, function (err, stream) { + stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) + file.content.pipe(process.stdout) + }) +}) +``` + From aea75854e60a62ac309feb0926273d33da07dd26 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 9 Aug 2016 13:36:58 +0100 Subject: [PATCH 4/4] fix(files): remove .only used for testing --- src/files.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/files.js b/src/files.js index f8d1c863..c91726b0 100644 --- a/src/files.js +++ b/src/files.js @@ -13,7 +13,7 @@ const concat = require('concat-stream') const through = require('through2') module.exports = (common) => { - describe.only('.files', () => { + describe('.files', () => { let smallFile let bigFile let directoryContent @@ -22,6 +22,7 @@ module.exports = (common) => { before((done) => { smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')) bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')) + directoryContent = { 'pp.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/pp.txt')), 'holmes.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/holmes.txt')), @@ -133,9 +134,10 @@ module.exports = (common) => { const added = res[res.length - 1] const mh = bs58.encode(added.node.multihash()).toString() - expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP') - expect(added.path).to.equal('test-folder') expect(added.node.links).to.have.length(6) + expect(added.path).to.equal('test-folder') + expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP') + done() }) })