Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

Commit

Permalink
Merge pull request #54 from ipfs/feat/ipfs.files.get
Browse files Browse the repository at this point in the history
feat/ipfs.files.get
  • Loading branch information
daviddias authored Aug 10, 2016
2 parents 7cc174e + aea7585 commit 600e6c4
Show file tree
Hide file tree
Showing 4 changed files with 223 additions and 29 deletions.
44 changes: 42 additions & 2 deletions API/files/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,9 @@ ipfs.files.createAddStream(function (err, stream) {



#### `cat`
#### `cat`

> Streams the file at the given IPFS multihash..
> Streams the file at the given IPFS multihash.
##### `Go` **WIP**

Expand All @@ -116,3 +116,43 @@ ipfs.files.cat(multihash, function (err, file) {
})
```


#### `get`
> Get [UnixFS][] files from IPFS.
##### `Go` **WIP**

##### `JavaScript` - ipfs.files.get(hash, [callback])

Where `hash` is an IPFS multiaddress or multihash.

`callback` must follow `function (err, stream) {}` signature, where `err` is an
error if the operation was not successful. `stream` will be a Readable stream in
[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode),
outputting objects of the form

```js
{
path: '/tmp/myfile.txt',
content: <Readable stream>
}
```

Here, each `path` corresponds to the name of a file, and `content` is a regular
Readable stream with the raw contents of that file.

If no `callback` is passed, a promise is returned with the Readable stream.

Example:

```js
var multiaddr = '/ipfs/QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
ipfs.files.get(multiaddr, function (err, stream) {
stream.on('data', (file) => {
// write the file's path and contents to standard out
console.log(file.path)
file.content.pipe(process.stdout)
})
})
```

5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
- [Files](/API/files)
- [`add`](/API/files#add)
- [`createAddStream`](/files#createaddstream)
- [`get`](/API/files#get)
- [`cat`](/API/files#cat)
- [Object](/API/object)
- [`object.new`](/API/object#objectnew)
Expand Down Expand Up @@ -99,7 +100,7 @@ test.all(common)
## API

A valid (read: that follows this interface) IPFS core implementation, must expose the API described in [/API](/API)
A valid (read: that follows this interface) IPFS core implementation must expose the API described in [/API](/API).

## Contribute

Expand All @@ -114,3 +115,5 @@ This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/c
## License

MIT

[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"bl": "^1.1.2",
"bs58": "^3.0.0",
"chai": "^3.5.0",
"concat-stream": "^1.5.1",
"detect-node": "^2.0.3",
"ipfs-merkle-dag": "^0.6.2",
"readable-stream": "1.1.13"
Expand Down
202 changes: 176 additions & 26 deletions src/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,29 @@ const bs58 = require('bs58')
const Readable = require('readable-stream')
const path = require('path')
const fs = require('fs')
const isNode = require('detect-node')
const bl = require('bl')
const concat = require('concat-stream')
const through = require('through2')

module.exports = (common) => {
describe('.files', () => {
let smallFile
let bigFile
let directoryContent
let ipfs

before((done) => {
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')
)
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')
)
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt'))
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random'))

directoryContent = {
'pp.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/pp.txt')),
'holmes.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/holmes.txt')),
'jungle.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/jungle.txt')),
'alice.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/alice.txt')),
'files/hello.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/hello.txt')),
'files/ipfs.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/ipfs.txt'))
}

common.setup((err, _ipfs) => {
expect(err).to.not.exist
Expand Down Expand Up @@ -102,15 +111,9 @@ module.exports = (common) => {
})

it('add a nested dir as array', (done) => {
if (!isNode) {
return done()
// can't run this test cause browserify
// can't shim readFileSync in runtime
}
const base = path.join(__dirname, 'data/test-folder')
const content = (name) => ({
path: `test-folder/${name}`,
content: fs.readFileSync(path.join(base, name))
content: directoryContent[name]
})
const emptyDir = (name) => ({
path: `test-folder/${name}`
Expand All @@ -131,30 +134,23 @@ module.exports = (common) => {

const added = res[res.length - 1]
const mh = bs58.encode(added.node.multihash()).toString()
expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP')
expect(added.path).to.equal('test-folder')
expect(added.node.links).to.have.length(6)
expect(added.path).to.equal('test-folder')
expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP')

done()
})
})

describe('.createAddStream', () => {
it('stream of valid files and dirs', (done) => {
if (!isNode) {
return done()
// can't run this test cause browserify
// can't shim readFileSync in runtime
}

const base = path.join(__dirname, 'data/test-folder')
const content = (name) => ({
path: `test-folder/${name}`,
content: fs.readFileSync(path.join(base, name))
content: directoryContent[name]
})
const emptyDir = (name) => ({
path: `test-folder/${name}`
})

const files = [
content('pp.txt'),
content('holmes.txt'),
Expand Down Expand Up @@ -243,7 +239,7 @@ module.exports = (common) => {
})

describe('.cat', () => {
it('with a bas58 multihash encoded string', () => {
it('with a base58 multihash encoded string', () => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'

return ipfs.cat(hash)
Expand Down Expand Up @@ -275,13 +271,167 @@ module.exports = (common) => {
const hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
return ipfs.cat(hash)
.then((stream) => {
stream.pipe(bl((err, bldata) => {
stream.pipe(bl((err, data) => {
expect(err).to.not.exist
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
expect(data.toString()).to.contain('Check out some of the other files in this directory:')
}))
})
})
})
})

describe('.get', () => {
it('with a base58 encoded multihash', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.files.get(hash, (err, stream) => {
expect(err).to.not.exist
stream.pipe(concat((files) => {
expect(err).to.not.exist
expect(files).to.be.length(1)
expect(files[0].path).to.equal(hash)
files[0].content.pipe(concat((content) => {
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}))
})
})

it('with a multihash', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
const mhBuf = new Buffer(bs58.decode(hash))
ipfs.files.get(mhBuf, (err, stream) => {
expect(err).to.not.exist
stream.pipe(concat((files) => {
expect(files).to.be.length(1)
expect(files[0].path).to.deep.equal(hash)
files[0].content.pipe(concat((content) => {
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}))
})
})

it('large file', (done) => {
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
ipfs.files.get(hash, (err, stream) => {
expect(err).to.not.exist

// accumulate the files and their content
var files = []
stream.pipe(through.obj((file, enc, next) => {
file.content.pipe(concat((content) => {
files.push({
path: file.path,
content: content
})
next()
}))
}, () => {
expect(files.length).to.equal(1)
expect(files[0].path).to.equal(hash)
expect(files[0].content).to.deep.equal(bigFile)
done()
}))
})
})

it('directory', (done) => {
const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
ipfs.files.get(hash, (err, stream) => {
expect(err).to.not.exist

// accumulate the files and their content
var files = []
stream.pipe(through.obj((file, enc, next) => {
if (file.content) {
file.content.pipe(concat((content) => {
files.push({
path: file.path,
content: content
})
next()
}))
} else {
files.push(file)
next()
}
}, () => {
// Check paths
var paths = files.map((file) => {
return file.path
})
expect(paths).to.deep.equal([
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt'
])

// Check contents
var contents = files.map((file) => {
return file.content ? file.content : null
})
expect(contents).to.deep.equal([
null,
directoryContent['alice.txt'],
null,
null,
null,
directoryContent['files/hello.txt'],
directoryContent['files/ipfs.txt'],
directoryContent['holmes.txt'],
directoryContent['jungle.txt'],
directoryContent['pp.txt']
])
done()
}))
})
})

describe('promise', () => {
it('with a base58 encoded string', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.files.get(hash)
.then((stream) => {
stream.pipe(concat((files) => {
expect(files).to.be.length(1)
expect(files[0].path).to.equal(hash)
files[0].content.pipe(concat((content) => {
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}))
})
.catch((err) => {
expect(err).to.not.exist
})
})

it('errors on invalid key', (done) => {
const hash = 'somethingNotMultihash'
ipfs.files.get(hash)
.then((stream) => {})
.catch((err) => {
expect(err).to.exist
const errString = err.toString()
if (errString === 'Error: invalid ipfs ref path') {
expect(err.toString()).to.contain('Error: invalid ipfs ref path')
}
if (errString === 'Error: Invalid Key') {
expect(err.toString()).to.contain('Error: Invalid Key')
}
done()
})
})
})
})
})
}

0 comments on commit 600e6c4

Please sign in to comment.