From 7e1dde8a7fdc2529eba939dc9c9d547553ef6771 Mon Sep 17 00:00:00 2001 From: Stephen Sawchuk Date: Thu, 20 Nov 2014 12:12:10 -0500 Subject: [PATCH] support resumableThreshold & using simple vs resumable --- lib/storage/bucket.js | 114 +++++++---- lib/storage/file.js | 417 +++++++++++++++++++++++------------------ lib/storage/index.js | 13 ++ regression/storage.js | 24 ++- test/storage/bucket.js | 196 +++++++++++++------ test/storage/file.js | 357 ++++++++++++++++++++++------------- test/storage/index.js | 15 +- 7 files changed, 712 insertions(+), 424 deletions(-) diff --git a/lib/storage/bucket.js b/lib/storage/bucket.js index 16b61daf126..f9332dd0bd1 100644 --- a/lib/storage/bucket.js +++ b/lib/storage/bucket.js @@ -228,13 +228,16 @@ Bucket.prototype.setMetadata = function(metadata, callback) { * * @param {string} localPath - The fully qualified path to the file you wish to * upload to your bucket. - * @param {string=|module:storage/file=} destination - The place to save your - * file. If given a string, the file will be uploaded to the bucket using - * the string as a filename. When given a File object, your local file will - * be uploaded to the File object's bucket and under the File object's name. - * Lastly, when this argument is omitted, the file is uploaded to your + * @param {object=} options - Configuration options. + * @param {string=|module:storage/file=} options.destination - The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local file + * will be uploaded to the File object's bucket and under the File object's + * name. Lastly, when this argument is omitted, the file is uploaded to your * bucket using the name of the local file. - * @param {object=} metadata - Metadata to set for your file. + * @param {object=} options.metadata - Metadata to set for your file. + * @param {boolean=} options.resumable - Whether to use a resumable upload or + * simple upload. * @param {function} callback - The callback function. * * @example @@ -251,8 +254,19 @@ Bucket.prototype.setMetadata = function(metadata, callback) { * //- * // It's not always that easy. You will likely want to specify the filename * // used when your new file lands in your bucket. + * // + * // You may also need to set metadata, or customize other options, like if you + * // want the security of a resumable upload. * //- - * bucket.upload('/local/path/image.png', 'new-image.png', function(err, file) { + * var options = { + * destination: 'new-image.png', + * resumable: true, + * metadata: { + * event: 'Fall trip to the zoo' + * } + * }; + * + * bucket.upload('/local/path/image.png', options, function(err, file) { * // Your bucket now contains: * // - "new-image.png" (with the contents of `/local/path/image.png') * @@ -263,8 +277,12 @@ Bucket.prototype.setMetadata = function(metadata, callback) { * // You may also re-use a File object, {module:storage/file}, that references * // the file you wish to create or overwrite. * //- - * var file = bucket.file('existing-file.png'); - * bucket.upload('/local/path/image.png', file, function(err, newFile) { + * var options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('/local/path/image.png', options, function(err, newFile) { * // Your bucket now contains: * // - "existing-file.png" (with the contents of `/local/path/image.png') * @@ -272,36 +290,24 @@ Bucket.prototype.setMetadata = function(metadata, callback) { * // The `newFile` parameter is equal to `file`. * }); */ -Bucket.prototype.upload = function(localPath, destination, metadata, callback) { - var name; - var newFile; - switch (arguments.length) { - case 4: - break; - case 3: - callback = metadata; - if (util.is(destination, 'object')) { - metadata = destination; - } else { - metadata = {}; - } - /* falls through */ - default: - callback = callback || destination; - name = path.basename(localPath); - break; - } - metadata = metadata || {}; - callback = callback || util.noop; - if (util.is(destination, 'string')) { - name = destination; +Bucket.prototype.upload = function(localPath, options, callback) { + if (util.is(options, 'function')) { + callback = options; + options = {}; } - if (destination instanceof File) { - name = destination.name; - newFile = destination; + + var newFile; + if (options.destination instanceof File) { + newFile = options.destination; + } else if (util.is(options.destination, 'string')) { + // Use the string as the name of the file. + newFile = this.file(options.destination); + } else { + // Resort to using the name of the incoming file. + newFile = this.file(path.basename(localPath)); } - newFile = newFile || this.file(name); + var metadata = options.metadata || {}; var contentType = mime.lookup(localPath); if (contentType && !metadata.contentType) { metadata.contentType = contentType; @@ -312,12 +318,38 @@ Bucket.prototype.upload = function(localPath, destination, metadata, callback) { metadata.contentType += '; charset=' + charset; } - fs.createReadStream(localPath) - .pipe(newFile.createWriteStream(metadata)) - .on('error', callback) - .on('complete', function() { - callback(null, newFile); + var resumable; + if (util.is(options.resumable, 'boolean')) { + resumable = options.resumable; + upload(); + } else { + // Determine if the upload should be resumable based on if it meets the + // resumableThreshold. + var resumableThreshold = this.storage.resumableThreshold; + + fs.stat(localPath, function(err, fd) { + if (err) { + callback(err); + return; + } + + resumable = fd.size > resumableThreshold; + + upload(); }); + } + + function upload() { + fs.createReadStream(localPath) + .pipe(newFile.createWriteStream({ + resumable: resumable, + metadata: metadata + })) + .on('error', callback) + .on('complete', function() { + callback(null, newFile); + }); + } }; /** diff --git a/lib/storage/file.js b/lib/storage/file.js index da33eb564e0..cb18cdbcb7f 100644 --- a/lib/storage/file.js +++ b/lib/storage/file.js @@ -175,7 +175,6 @@ File.prototype.copy = function(destination, callback) { }); }; - /** * Create a readable stream to read the contents of the remote file. It can be * piped to a writable stream or listened to for 'data' events to read a file's @@ -227,43 +226,16 @@ File.prototype.createReadStream = function() { return dup; }; -/*! Developer Documentation - * - * `createWriteStream` uses the Resumable Upload API: http://goo.gl/jb0e9D. - * - * The process involves these steps: - * - * 1. POST the file's metadata. We get a resumable upload URI back, then cache - * it with ConfigStore. - * 2. PUT data to that URI with a Content-Range header noting what position - * the data is beginning from. We also cache, at most, the first 16 bytes - * of the data being uploaded. - * 3. Delete the ConfigStore cache after the upload completes. - * - * If the initial upload operation is interrupted, the next time the user - * uploads the file, these steps occur: - * - * 1. Detect the presence of a cached URI in ConfigStore. - * 2. Make an empty PUT request to that URI to get the last byte written to - * the remote file. - * 3. PUT data to the URI starting from the first byte after the last byte - * returned from the call above. - * - * If the user tries to upload entirely different data to the remote file: - * - * 1. -- same as above -- - * 2. -- same as above -- - * 3. -- same as above -- - * 4. Compare the first chunk of the new data with the chunk in cache. If it's - * different, start a new resumable upload (Step 1 of the first example). - */ /** * Create a writable stream to overwrite the contents of the file in your * bucket. * * A File object can also be used to create files for the first time. * - * @param {object=} metadata - Set the metadata for this file. + * @param {object=} options - Configuration object. + * @param {object=} options.metadata - Set the metadata for this file. + * @param {boolean=} options.resumable - Whether to use a resumable upload or + * simple upload. * * @example * //- @@ -292,14 +264,208 @@ File.prototype.createReadStream = function() { * var image = myBucket.file('image.png'); * * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') - * .pipe(image.createWriteStream({ contentType: 'image/jpeg' })) + * .pipe(image.createWriteStream({ + * metadata: contentType: 'image/jpeg' + * })) * .on('error', function(err) {}); */ -File.prototype.createWriteStream = function(metadata) { +File.prototype.createWriteStream = function(options) { + options = options || {}; + + var that = this; + var metadata = options.metadata || {}; + var dup = streamEvents(duplexify()); + + // Wait until we've received data to determine what upload technique to + // pursue. + dup.once('writing', function() { + if (util.is(options.resumable, 'boolean') && !options.resumable) { + that.startSimpleUpload_(dup, metadata); + } else { + that.startResumableUpload_(dup, metadata); + } + }); + + return dup; +}; + +/** + * Delete the file. + * + * @param {function=} callback - The callback function. + * + * @example + * file.delete(function(err) {}); + */ +File.prototype.delete = function(callback) { + callback = callback || util.noop; + var path = '/o/' + encodeURIComponent(this.name); + this.makeReq_('DELETE', path, null, true, function(err) { + if (err) { + callback(err); + return; + } + callback(); + }.bind(this)); +}; + +/** + * Get the file's metadata. + * + * @param {function=} callback - The callback function. + * + * @example + * file.getMetadata(function(err, metadata) {}); + */ +File.prototype.getMetadata = function(callback) { + callback = callback || util.noop; + var path = '/o/' + encodeURIComponent(this.name); + this.makeReq_('GET', path, null, true, function(err, resp) { + if (err) { + callback(err); + return; + } + this.metadata = resp; + callback(null, this.metadata); + }.bind(this)); +}; + +/** + * Get a signed URL to allow limited time access to the file. + * + * *[Reference](http://goo.gl/LcqhjU).* + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} options - Configuration object. + * @param {string} options.action - "read", "write", or "delete" + * @param {string=} options.contentMd5 - The MD5 digest value in base64. If you + * provide this, the client must provide this HTTP header with this same + * value in its request. + * @param {string=} options.contentType - If you provide this value, the client + * must provide this HTTP header set to the same value. + * @param {number} options.expires - Timestamp (seconds since epoch) when this + * link will expire. + * @param {string=} options.extensionHeaders - If these headers are used, the + * server will check to make sure that the client provides matching values. + * + * @example + * file.getSignedUrl({ + * action: 'read', + * expires: Math.round(Date.now() / 1000) + (60 * 60 * 24 * 14) // 2 weeks. + * }, function(err, url) {}); + */ +File.prototype.getSignedUrl = function(options, callback) { + if (options.expires < Math.floor(Date.now() / 1000)) { + throw new Error('An expiration date cannot be in the past.'); + } + + options.action = { + read: 'GET', + write: 'PUT', + delete: 'DELETE' + }[options.action]; + + var name = encodeURIComponent(this.name); + + options.resource = '/' + this.bucket.name + '/' + name; + + var makeAuthorizedRequest_ = this.bucket.storage.makeAuthorizedRequest_; + + makeAuthorizedRequest_.getCredentials(function(err, credentials) { + if (err) { + callback(err); + return; + } + + var sign = crypto.createSign('RSA-SHA256'); + sign.update([ + options.action, + (options.contentMd5 || ''), + (options.contentType || ''), + options.expires, + (options.extensionHeaders || '') + options.resource + ].join('\n')); + var signature = sign.sign(credentials.private_key, 'base64'); + + callback(null, [ + 'http://storage.googleapis.com' + options.resource, + '?GoogleAccessId=' + credentials.client_email, + '&Expires=' + options.expires, + '&Signature=' + encodeURIComponent(signature) + ].join('')); + }); +}; + +/** + * Set the file's metadata. + * + * @param {object} metadata - The metadata you wish to set. + * @param {function=} callback - The callback function. + * + * @example + * file.setMetadata({ + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }, function(err, metadata) {}); + */ +File.prototype.setMetadata = function(metadata, callback) { + callback = callback || util.noop; + var path = '/o/' + encodeURIComponent(this.name); + this.makeReq_('PATCH', path, null, metadata, function(err, resp) { + if (err) { + callback(err); + return; + } + this.metadata = resp; + callback(null, this.metadata); + }.bind(this)); +}; + +/** + * `startResumableUpload_` uses the Resumable Upload API: http://goo.gl/jb0e9D. + * + * The process involves these steps: + * + * 1. POST the file's metadata. We get a resumable upload URI back, then cache + * it with ConfigStore. + * 2. PUT data to that URI with a Content-Range header noting what position + * the data is beginning from. We also cache, at most, the first 16 bytes + * of the data being uploaded. + * 3. Delete the ConfigStore cache after the upload completes. + * + * If the initial upload operation is interrupted, the next time the user + * uploads the file, these steps occur: + * + * 1. Detect the presence of a cached URI in ConfigStore. + * 2. Make an empty PUT request to that URI to get the last byte written to + * the remote file. + * 3. PUT data to the URI starting from the first byte after the last byte + * returned from the call above. + * + * If the user tries to upload entirely different data to the remote file: + * + * 1. -- same as above -- + * 2. -- same as above -- + * 3. -- same as above -- + * 4. Compare the first chunk of the new data with the chunk in cache. If it's + * different, start a new resumable upload (Step 1 of the first example). + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} metadata - Optional metadata to set on the file. + * + * @private + */ +File.prototype.startResumableUpload_ = function(stream, metadata) { + metadata = metadata || {}; + var that = this; var configStore = new ConfigStore('gcloud-node'); + var config = configStore.get(that.name); var makeAuthorizedRequest = that.bucket.storage.makeAuthorizedRequest_; - metadata = metadata || {}; var numBytesWritten; var resumableUri; @@ -311,23 +477,12 @@ File.prototype.createWriteStream = function(metadata) { // data until we're ready again. var bufferStream = through(); - // This is the stream returned to the user. - var dup = streamEvents(duplexify()); - - // Wait until we've received data to determine if we're resuming an upload or - // creating a new one. - dup.once('writing', function() { - var config = configStore.get(that.name); - - if (config && config.uri) { - resumableUri = config.uri; - resumeUpload(); - } else { - startUpload(); - } - }); - - return dup; + if (config && config.uri) { + resumableUri = config.uri; + resumeUpload(); + } else { + startUpload(); + } // Begin a new resumable upload. Send the metadata and cache the URI returned. function startUpload() { @@ -466,14 +621,15 @@ File.prototype.createWriteStream = function(metadata) { } that.metadata = data; - dup.emit('complete', that.metadata); + + stream.emit('complete', that.metadata); configStore.del(that.name); }); }); bufferStream.pipe(offsetStream).pipe(writeStream); - dup.setWritable(bufferStream); + stream.setWritable(bufferStream); } // If an upload to this file has previously started, this will return the last @@ -524,145 +680,42 @@ File.prototype.createWriteStream = function(metadata) { return; } - dup.emit('error', err); - dup.end(); + stream.emit('error', err); + stream.end(); } }; /** - * Delete the file. - * - * @param {function=} callback - The callback function. + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). * - * @example - * file.delete(function(err) {}); - */ -File.prototype.delete = function(callback) { - callback = callback || util.noop; - var path = '/o/' + encodeURIComponent(this.name); - this.makeReq_('DELETE', path, null, true, function(err) { - if (err) { - callback(err); - return; - } - callback(); - }.bind(this)); -}; - -/** - * Get the file's metadata. - * - * @param {function=} callback - The callback function. + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} metadata - Optional metadata to set on the file. * - * @example - * file.getMetadata(function(err, metadata) {}); - */ -File.prototype.getMetadata = function(callback) { - callback = callback || util.noop; - var path = '/o/' + encodeURIComponent(this.name); - this.makeReq_('GET', path, null, true, function(err, resp) { - if (err) { - callback(err); - return; - } - this.metadata = resp; - callback(null, this.metadata); - }.bind(this)); -}; - -/** - * Get a signed URL to allow limited time access to the file. - * - * *[Reference](http://goo.gl/LcqhjU).* - * - * @throws {Error} if an expiration timestamp from the past is given. - * - * @param {object} options - Configuration object. - * @param {string} options.action - "read", "write", or "delete" - * @param {string=} options.contentMd5 - The MD5 digest value in base64. If you - * provide this, the client must provide this HTTP header with this same - * value in its request. - * @param {string=} options.contentType - If you provide this value, the client - * must provide this HTTP header set to the same value. - * @param {number} options.expires - Timestamp (seconds since epoch) when this - * link will expire. - * @param {string=} options.extensionHeaders - If these headers are used, the - * server will check to make sure that the client provides matching values. - * - * @example - * file.getSignedUrl({ - * action: 'read', - * expires: Math.round(Date.now() / 1000) + (60 * 60 * 24 * 14) // 2 weeks. - * }, function(err, url) {}); + * @private */ -File.prototype.getSignedUrl = function(options, callback) { - if (options.expires < Math.floor(Date.now() / 1000)) { - throw new Error('An expiration date cannot be in the past.'); - } - - options.action = { - read: 'GET', - write: 'PUT', - delete: 'DELETE' - }[options.action]; - - var name = encodeURIComponent(this.name); - - options.resource = '/' + this.bucket.name + '/' + name; - - var makeAuthorizedRequest_ = this.bucket.storage.makeAuthorizedRequest_; +File.prototype.startSimpleUpload_ = function(stream, metadata) { + var that = this; - makeAuthorizedRequest_.getCredentials(function(err, credentials) { - if (err) { - callback(err); - return; + util.makeWritableStream(stream, { + makeAuthorizedRequest: that.bucket.storage.makeAuthorizedRequest_, + metadata: metadata, + request: { + qs: { + name: that.name + }, + uri: util.format('{base}/{bucket}/o', { + base: STORAGE_UPLOAD_BASE_URL, + bucket: that.bucket.name + }) } + }, function(data) { + that.metadata = data; - var sign = crypto.createSign('RSA-SHA256'); - sign.update([ - options.action, - (options.contentMd5 || ''), - (options.contentType || ''), - options.expires, - (options.extensionHeaders || '') + options.resource - ].join('\n')); - var signature = sign.sign(credentials.private_key, 'base64'); - - callback(null, [ - 'http://storage.googleapis.com' + options.resource, - '?GoogleAccessId=' + credentials.client_email, - '&Expires=' + options.expires, - '&Signature=' + encodeURIComponent(signature) - ].join('')); + stream.emit('complete', data); + stream.end(); }); }; -/** - * Set the file's metadata. - * - * @param {object} metadata - The metadata you wish to set. - * @param {function=} callback - The callback function. - * - * @example - * file.setMetadata({ - * contentType: 'application/x-font-ttf', - * metadata: { - * my: 'custom', - * properties: 'go here' - * } - * }, function(err, metadata) {}); - */ -File.prototype.setMetadata = function(metadata, callback) { - callback = callback || util.noop; - var path = '/o/' + encodeURIComponent(this.name); - this.makeReq_('PATCH', path, null, metadata, function(err, resp) { - if (err) { - callback(err); - return; - } - this.metadata = resp; - callback(null, this.metadata); - }.bind(this)); -}; - module.exports = File; diff --git a/lib/storage/index.js b/lib/storage/index.js index 81ed2dfd3b1..6a954c2b1e4 100644 --- a/lib/storage/index.js +++ b/lib/storage/index.js @@ -63,6 +63,14 @@ var STORAGE_BASE_URL = 'https://www.googleapis.com/storage/v1/b'; * The example below will demonstrate the different usage patterns your app may * need to connect to `gcloud` and access your bucket. * + * @param {object} options - Configuration object. + * @param {number} options.resumableThreshold - In bytes, the maximum file size + * to allow before switching uploads to the resumable upload technique. + * Files uploaded below this size will be sent with the simple upload + * technique. Learn more about these options + * [here](https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload). + * (default: 5000000 (5mb)) + * * @alias module:storage * @constructor * @@ -108,6 +116,11 @@ function Storage(config) { }); this.projectId = config.projectId; + this.resumableThreshold = 5000000; // 5 MB + + if (util.is(config.resumableThreshold, 'number')) { + this.resumableThreshold = config.resumableThreshold; + } } /** diff --git a/regression/storage.js b/regression/storage.js index e595f30b71c..636c13abec4 100644 --- a/regression/storage.js +++ b/regression/storage.js @@ -147,7 +147,7 @@ describe('storage', function() { var file = bucket.file('directory/file'); var contents = 'test'; - var writeStream = file.createWriteStream(); + var writeStream = file.createWriteStream({ resumable: false }); writeStream.write(contents); writeStream.end(); @@ -163,10 +163,10 @@ describe('storage', function() { }); describe('stream write', function() { - it('should stream write, then remove large file (3mb)', function(done) { + it('should stream write, then remove file (3mb)', function(done) { var file = bucket.file('LargeFile'); fs.createReadStream(files.big.path) - .pipe(file.createWriteStream()) + .pipe(file.createWriteStream({ resumable: false })) .on('error', done) .on('complete', function(fileObject) { assert.equal(fileObject.md5Hash, files.big.hash); @@ -175,12 +175,17 @@ describe('storage', function() { }); it('should write metadata', function(done) { - var myMetadata = { contentType: 'image/png' }; - bucket.upload(files.logo.path, myMetadata, function(err, file) { + var options = { + metadata: { contentType: 'image/png' }, + resumable: false + }; + + bucket.upload(files.logo.path, options, function(err, file) { assert.ifError(err); + file.getMetadata(function(err, metadata) { assert.ifError(err); - assert.equal(metadata.contentType, myMetadata.contentType); + assert.equal(metadata.contentType, options.metadata.contentType); file.delete(done); }); }); @@ -240,12 +245,16 @@ describe('storage', function() { it('should write/read/remove from a buffer', function(done) { tmp.setGracefulCleanup(); tmp.file(function _tempFileCreated(err, tmpFilePath) { + assert.ifError(err); + var file = bucket.file('MyBuffer'); var fileContent = 'Hello World'; - assert.ifError(err); + var writable = file.createWriteStream(); + writable.write(fileContent); writable.end(); + writable.on('complete', function() { file.createReadStream() .pipe(fs.createWriteStream(tmpFilePath)) @@ -253,6 +262,7 @@ describe('storage', function() { .on('finish', function() { file.delete(function(err) { assert.ifError(err); + fs.readFile(tmpFilePath, function(err, data) { assert.equal(data, fileContent); done(); diff --git a/test/storage/bucket.js b/test/storage/bucket.js index 0adfa9edf1e..9f20b794319 100644 --- a/test/storage/bucket.js +++ b/test/storage/bucket.js @@ -27,8 +27,8 @@ function FakeFile(bucket, name, metadata) { this.bucket = bucket; this.name = name; this.metadata = metadata; - this.createWriteStream = function(metadata) { - this.metadata = metadata; + this.createWriteStream = function(options) { + this.metadata = options.metadata; var dup = duplexify(); dup._write = function() { dup.emit('complete'); @@ -278,112 +278,184 @@ describe('Bucket', function() { }; }); - describe('variable arity', function() { - it('should accept a path & cb', function(done) { - bucket.upload(filepath, function(err, file) { - assert.ifError(err); - assert.equal(file.bucket.name, bucket.name); - assert.equal(file.name, basename); - done(); - }); + it('should accept a path & cb', function(done) { + bucket.upload(filepath, function(err, file) { + assert.ifError(err); + assert.equal(file.bucket.name, bucket.name); + assert.equal(file.name, basename); + done(); }); + }); - it('should accept a path, metadata, & cb', function(done) { - bucket.upload(filepath, metadata, function(err, file) { - assert.ifError(err); - assert.equal(file.bucket.name, bucket.name); - assert.deepEqual(file.metadata, metadata); - done(); - }); + it('should accept a path, metadata, & cb', function(done) { + var options = { metadata: metadata }; + bucket.upload(filepath, options, function(err, file) { + assert.ifError(err); + assert.equal(file.bucket.name, bucket.name); + assert.deepEqual(file.metadata, metadata); + done(); }); + }); - it('should accept a path, a string dest, & cb', function(done) { - var newFileName = 'new-file-name.png'; - bucket.upload(filepath, newFileName, function(err, file) { - assert.ifError(err); - assert.equal(file.bucket.name, bucket.name); - assert.equal(file.name, newFileName); - done(); - }); + it('should accept a path, a string dest, & cb', function(done) { + var newFileName = 'new-file-name.png'; + var options = { destination: newFileName }; + bucket.upload(filepath, options, function(err, file) { + assert.ifError(err); + assert.equal(file.bucket.name, bucket.name); + assert.equal(file.name, newFileName); + done(); }); + }); - it('should accept a path, a string dest, metadata, & cb', function(done) { - var newFileName = 'new-file-name.png'; - bucket.upload(filepath, newFileName, metadata, function(err, file) { - assert.ifError(err); - assert.equal(file.bucket.name, bucket.name); - assert.equal(file.name, newFileName); - assert.deepEqual(file.metadata, metadata); - done(); - }); + it('should accept a path, a string dest, metadata, & cb', function(done) { + var newFileName = 'new-file-name.png'; + var options = { destination: newFileName, metadata: metadata }; + bucket.upload(filepath, options, function(err, file) { + assert.ifError(err); + assert.equal(file.bucket.name, bucket.name); + assert.equal(file.name, newFileName); + assert.deepEqual(file.metadata, metadata); + done(); }); + }); - it('should accept a path, a File dest, & cb', function(done) { - var fakeFile = new FakeFile(bucket, 'file-name'); - fakeFile.isSameFile = function() { - done(); - }; - bucket.upload(filepath, fakeFile, function(err, file) { - assert.ifError(err); - file.isSameFile(); - }); + it('should accept a path, a File dest, & cb', function(done) { + var fakeFile = new FakeFile(bucket, 'file-name'); + fakeFile.isSameFile = function() { + return true; + }; + var options = { destination: fakeFile }; + bucket.upload(filepath, options, function(err, file) { + assert.ifError(err); + assert(file.isSameFile()); + done(); }); + }); - it('should accept a path, a File dest, metadata, & cb', function(done) { - var fakeFile = new FakeFile(bucket, 'file-name'); - fakeFile.isSameFile = function() { - done(); - }; - bucket.upload(filepath, fakeFile, metadata, function(err, file) { - assert.ifError(err); - file.isSameFile(); - }); + it('should accept a path, a File dest, metadata, & cb', function(done) { + var fakeFile = new FakeFile(bucket, 'file-name'); + fakeFile.isSameFile = function() { + return true; + }; + var options = { destination: fakeFile, metadata: metadata }; + bucket.upload(filepath, options, function(err, file) { + assert.ifError(err); + assert(file.isSameFile()); + assert.deepEqual(file.metadata, metadata); + done(); }); }); it('should guess at the content type', function(done) { var fakeFile = new FakeFile(bucket, 'file-name'); - fakeFile.createWriteStream = function(metadata) { + var options = { destination: fakeFile }; + fakeFile.createWriteStream = function(options) { var dup = duplexify(); setImmediate(function() { - assert.equal(metadata.contentType, 'application/json'); + assert.equal(options.metadata.contentType, 'application/json'); done(); }); return dup; }; - bucket.upload(filepath, fakeFile, assert.ifError); + bucket.upload(filepath, options, assert.ifError); }); it('should guess at the charset', function(done) { var fakeFile = new FakeFile(bucket, 'file-name'); - fakeFile.createWriteStream = function(metadata) { + var options = { destination: fakeFile }; + fakeFile.createWriteStream = function(options) { var dup = duplexify(); setImmediate(function() { - assert.equal(metadata.contentType, 'text/plain; charset=UTF-8'); + assert.equal( + options.metadata.contentType, 'text/plain; charset=UTF-8'); done(); }); return dup; }; - bucket.upload(textFilepath, fakeFile, assert.ifError); + bucket.upload(textFilepath, options, assert.ifError); }); it('should allow overriding content type', function(done) { var fakeFile = new FakeFile(bucket, 'file-name'); var metadata = { contentType: 'made-up-content-type' }; - fakeFile.createWriteStream = function(meta) { + var options = { destination: fakeFile, metadata: metadata }; + fakeFile.createWriteStream = function(options) { + var dup = duplexify(); + setImmediate(function() { + assert.equal(options.metadata.contentType, metadata.contentType); + done(); + }); + return dup; + }; + bucket.upload(filepath, options, assert.ifError); + }); + + it('should allow specifying options.resumable', function(done) { + var fakeFile = new FakeFile(bucket, 'file-name'); + var options = { destination: fakeFile, resumable: false }; + fakeFile.createWriteStream = function(options) { var dup = duplexify(); setImmediate(function() { - assert.equal(meta.contentType, metadata.contentType); + assert.strictEqual(options.resumable, false); done(); }); return dup; }; - bucket.upload(filepath, fakeFile, metadata, assert.ifError); + bucket.upload(filepath, options, assert.ifError); + }); + + it('should use the global resumableThreshold', function(done) { + function getFileForLowThreshold() { + var fakeFile = new FakeFile(bucket, 'file-name'); + + fakeFile.createWriteStream = function(options) { + var dup = duplexify(); + setImmediate(function() { + // First upload will be resumable. + assert.strictEqual(options.resumable, true); + dup.emit('complete'); + }); + return dup; + }; + + return fakeFile; + } + + function getFileForHighThreshold() { + var fakeFile = new FakeFile(bucket, 'file-name'); + + fakeFile.createWriteStream = function(options) { + var dup = duplexify(); + setImmediate(function() { + // Second upload will be simple. + assert.strictEqual(options.resumable, false); + dup.emit('complete'); + }); + return dup; + }; + + return fakeFile; + } + + // Force all uploads to be resumable. + bucket.storage.resumableThreshold = 0; + var options = { destination: getFileForLowThreshold() }; + bucket.upload(filepath, options, function(err) { + assert.ifError(err); + + // Put the threshold over the file size of the fake file, forcing the + // upload to be simple. + bucket.storage.resumableThreshold = 9e9; + var options = { destination: getFileForHighThreshold() }; + bucket.upload(filepath, options, done); + }); }); it('should execute callback on error', function(done) { var error = new Error('Error.'); var fakeFile = new FakeFile(bucket, 'file-name'); + var options = { destination: fakeFile }; fakeFile.createWriteStream = function() { var dup = duplexify(); setImmediate(function() { @@ -391,7 +463,7 @@ describe('Bucket', function() { }); return dup; }; - bucket.upload(filepath, fakeFile, function(err) { + bucket.upload(filepath, options, function(err) { assert.equal(err, error); done(); }); diff --git a/test/storage/file.js b/test/storage/file.js index 0a55f3f787c..69b693facbf 100644 --- a/test/storage/file.js +++ b/test/storage/file.js @@ -321,11 +321,7 @@ describe('File', function() { }); describe('createWriteStream', function() { - var RESUMABLE_URI = 'http://resume'; - - beforeEach(function() { - configStoreData = {}; - }); + var METADATA = { a: 'b', c: 'd' }; it('should return a stream', function() { assert(file.createWriteStream() instanceof stream); @@ -346,140 +342,48 @@ describe('File', function() { .emit('writing'); }); - describe('starting', function() { - it('should start a new upload when written to', function(done) { - file.bucket.storage.makeAuthorizedRequest_ = function(reqOpts) { - var uri = 'https://www.googleapis.com/upload/storage/v1/b/' + - file.bucket.name + '/o'; - - assert.equal(reqOpts.method, 'POST'); - assert.equal(reqOpts.uri, uri); - assert.equal(reqOpts.qs.name, file.name); - assert.equal(reqOpts.qs.uploadType, 'resumable'); - - assert.deepEqual(reqOpts.headers, { - 'X-Upload-Content-Type': 'custom' - }); - assert.deepEqual(reqOpts.json, { contentType: 'custom' }); - - done(); - }; - - file.createWriteStream({ contentType: 'custom' }).emit('writing'); + it('should start a simple upload if specified', function(done) { + var writable = file.createWriteStream({ + metadata: METADATA, + resumable: false }); - it('should upload file', function(done) { - var requestCount = 0; - file.bucket.storage.makeAuthorizedRequest_ = function(reqOpts, cb) { - requestCount++; - - // respond to creation POST. - if (requestCount === 1) { - cb(null, null, { headers: { location: RESUMABLE_URI }}); - assert.deepEqual(configStoreData[file.name].uri, RESUMABLE_URI); - return; - } - - // create an authorized request for the first PUT. - if (requestCount === 2) { - assert.equal(reqOpts.method, 'PUT'); - assert.equal(reqOpts.uri, RESUMABLE_URI); - - cb.onAuthorized(null, { headers: {} }); - } - }; - - // respond to first upload PUT request. - var metadata = { a: 'b', c: 'd' }; - request_Override = function(reqOpts) { - assert.equal(reqOpts.headers['Content-Range'], 'bytes 0-*/*'); - - var stream = through(); - setImmediate(function() { - stream.emit('complete', { body: metadata }); - }); - return stream; - }; - - file.createWriteStream() - .on('error', done) - .on('complete', function(data) { - assert.deepEqual(data, metadata); + file.startSimpleUpload_ = function(stream, metadata) { + assert.deepEqual(stream, writable); + assert.deepEqual(metadata, METADATA); + done(); + }; - setImmediate(function() { - // cache deleted. - assert(!configStoreData[file.name]); - done(); - }); - }) - .emit('writing'); - }); + writable.emit('writing'); }); - describe('resuming', function() { - beforeEach(function() { - configStoreData[file.name] = { - uri: RESUMABLE_URI - }; + it('should start a resumable upload if specified', function(done) { + var writable = file.createWriteStream({ + metadata: METADATA, + resumable: true }); - it('should resume uploading from last sent byte', function(done) { - var lastByte = 135; - - var requestCount = 0; - file.bucket.storage.makeAuthorizedRequest_ = function(reqOpts, cb) { - requestCount++; - - if (requestCount === 1) { - assert.equal(reqOpts.method, 'PUT'); - assert.equal(reqOpts.uri, RESUMABLE_URI); - assert.deepEqual(reqOpts.headers, { - 'Content-Length': 0, - 'Content-Range': 'bytes */*' - }); - - cb({ - code: 308, // resumable upload status code - response: { headers: { range: '0-' + lastByte } } - }); - - return; - } - - if (requestCount === 2) { - assert.equal(reqOpts.method, 'PUT'); - assert.equal(reqOpts.uri, RESUMABLE_URI); - - cb.onAuthorized(null, { headers: {} }); - } - }; + file.startResumableUpload_ = function(stream, metadata) { + assert.deepEqual(stream, writable); + assert.deepEqual(metadata, METADATA); + done(); + }; - var metadata = { a: 'b', c: 'd' }; - request_Override = function(reqOpts) { - var startByte = lastByte + 1; - assert.equal( - reqOpts.headers['Content-Range'], 'bytes ' + startByte + '-*/*'); + writable.emit('writing'); + }); - var stream = through(); - setImmediate(function() { - stream.emit('complete', { body: metadata }); - }); - return stream; - }; + it('should default to a resumable upload', function(done) { + var writable = file.createWriteStream({ + metadata: METADATA + }); - file.createWriteStream() - .on('error', done) - .on('complete', function(data) { - assert.deepEqual(data, metadata); + file.startResumableUpload_ = function(stream, metadata) { + assert.deepEqual(stream, writable); + assert.deepEqual(metadata, METADATA); + done(); + }; - setImmediate(function() { - // cache deleted. - assert(!configStoreData[file.name]); - done(); - }); - }) - .emit('writing'); - }); + writable.emit('writing'); }); }); @@ -661,4 +565,201 @@ describe('File', function() { }); }); }); + + describe('startResumableUpload_', function() { + var RESUMABLE_URI = 'http://resume'; + + beforeEach(function() { + configStoreData = {}; + }); + + describe('starting', function() { + it('should start a resumable upload', function(done) { + file.bucket.storage.makeAuthorizedRequest_ = function(reqOpts) { + var uri = 'https://www.googleapis.com/upload/storage/v1/b/' + + file.bucket.name + '/o'; + + assert.equal(reqOpts.method, 'POST'); + assert.equal(reqOpts.uri, uri); + assert.equal(reqOpts.qs.name, file.name); + assert.equal(reqOpts.qs.uploadType, 'resumable'); + + assert.deepEqual(reqOpts.headers, { + 'X-Upload-Content-Type': 'custom' + }); + assert.deepEqual(reqOpts.json, { contentType: 'custom' }); + + done(); + }; + + file.startResumableUpload_(duplexify(), { contentType: 'custom' }); + }); + + it('should upload file', function(done) { + var requestCount = 0; + file.bucket.storage.makeAuthorizedRequest_ = function(reqOpts, cb) { + requestCount++; + + // respond to creation POST. + if (requestCount === 1) { + cb(null, null, { headers: { location: RESUMABLE_URI }}); + assert.deepEqual(configStoreData[file.name].uri, RESUMABLE_URI); + return; + } + + // create an authorized request for the first PUT. + if (requestCount === 2) { + assert.equal(reqOpts.method, 'PUT'); + assert.equal(reqOpts.uri, RESUMABLE_URI); + cb.onAuthorized(null, { headers: {} }); + } + }; + + // respond to first upload PUT request. + var metadata = { a: 'b', c: 'd' }; + request_Override = function(reqOpts) { + assert.equal(reqOpts.headers['Content-Range'], 'bytes 0-*/*'); + + var stream = through(); + setImmediate(function() { + stream.emit('complete', { body: metadata }); + }); + return stream; + }; + + var stream = duplexify(); + + stream + .on('error', done) + .on('complete', function(data) { + assert.deepEqual(data, metadata); + + setImmediate(function() { + // cache deleted. + assert(!configStoreData[file.name]); + done(); + }); + }); + + file.startResumableUpload_(stream); + }); + }); + + describe('resuming', function() { + beforeEach(function() { + configStoreData[file.name] = { + uri: RESUMABLE_URI + }; + }); + + it('should resume uploading from last sent byte', function(done) { + var lastByte = 135; + + var requestCount = 0; + file.bucket.storage.makeAuthorizedRequest_ = function(reqOpts, cb) { + requestCount++; + + if (requestCount === 1) { + assert.equal(reqOpts.method, 'PUT'); + assert.equal(reqOpts.uri, RESUMABLE_URI); + assert.deepEqual(reqOpts.headers, { + 'Content-Length': 0, + 'Content-Range': 'bytes */*' + }); + + cb({ + code: 308, // resumable upload status code + response: { headers: { range: '0-' + lastByte } } + }); + + return; + } + + if (requestCount === 2) { + assert.equal(reqOpts.method, 'PUT'); + assert.equal(reqOpts.uri, RESUMABLE_URI); + + cb.onAuthorized(null, { headers: {} }); + } + }; + + var metadata = { a: 'b', c: 'd' }; + request_Override = function(reqOpts) { + var startByte = lastByte + 1; + assert.equal( + reqOpts.headers['Content-Range'], 'bytes ' + startByte + '-*/*'); + + var stream = through(); + setImmediate(function() { + stream.emit('complete', { body: metadata }); + }); + return stream; + }; + + var stream = duplexify(); + + stream + .on('error', done) + .on('complete', function(data) { + assert.deepEqual(data, metadata); + + setImmediate(function() { + // cache deleted. + assert(!configStoreData[file.name]); + done(); + }); + }); + + file.startResumableUpload_(stream); + }); + }); + }); + + describe('startSimpleUpload_', function() { + it('should get a writable stream', function(done) { + makeWritableStream_Override = function() { + done(); + }; + + file.startSimpleUpload_(duplexify()); + }); + + it('should pass the required arguments', function(done) { + var metadata = { a: 'b', c: 'd' }; + + makeWritableStream_Override = function(stream, options) { + assert.deepEqual(options.metadata, metadata); + assert.deepEqual(options.request, { + qs: { + name: file.name + }, + uri: 'https://www.googleapis.com/upload/storage/v1/b/' + + file.bucket.name + '/o' + }); + done(); + }; + + file.startSimpleUpload_(duplexify(), metadata); + }); + + it('should finish stream and set metadata', function(done) { + var metadata = { a: 'b', c: 'd' }; + + makeWritableStream_Override = function(stream, options, callback) { + callback(metadata); + }; + + var stream = duplexify(); + + stream + .on('error', done) + .on('complete', function(meta) { + assert.deepEqual(meta, metadata); + assert.deepEqual(file.metadata, metadata); + done(); + }); + + file.startSimpleUpload_(stream, metadata); + }); + }); }); diff --git a/test/storage/index.js b/test/storage/index.js index 405886e6133..42c47d18ef2 100644 --- a/test/storage/index.js +++ b/test/storage/index.js @@ -32,10 +32,17 @@ describe('Storage', function() { }); describe('initialization', function() { - it('should throw if a bucket name is not passed', function() { - assert.throws(function() { - storage.bucket(); - }, Error); + it('should set the project id', function() { + assert.equal(storage.projectId, 'project-id'); + }); + + it('should default the resumableThreshold to 5mb', function() { + assert.strictEqual(storage.resumableThreshold, 5000000); + }); + + it('should allow overriding the resumableThreshold', function() { + var storage = new Storage({ resumableThreshold: 1 }); + assert.strictEqual(storage.resumableThreshold, 1); }); });