diff --git a/.eslintignore b/.eslintignore
new file mode 100644
index 0000000000..e0b31fc01e
--- /dev/null
+++ b/.eslintignore
@@ -0,0 +1,5 @@
+**/node_modules/*
+appengine/loopback/*
+functions/**
+iot/*
+appengine/*
diff --git a/.eslintrc.yml b/.eslintrc.yml
new file mode 100644
index 0000000000..599d506aba
--- /dev/null
+++ b/.eslintrc.yml
@@ -0,0 +1,19 @@
+---
+extends:
+ - 'eslint:recommended'
+ - 'plugin:node/recommended'
+ - prettier
+env:
+ mocha: true
+plugins:
+ - node
+ - prettier
+rules:
+ prettier/prettier: error
+ block-scoped-var: error
+ eqeqeq: error
+ no-warning-comments: warn
+ no-console: off
+ node/no-missing-require: off
+ node/no-unpublished-require: off
+
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000000..f6fac98b0a
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,3 @@
+node_modules/*
+samples/node_modules/*
+src/**/doc/*
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000000..df6eac0744
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,8 @@
+---
+bracketSpacing: false
+printWidth: 80
+semi: true
+singleQuote: true
+tabWidth: 2
+trailingComma: es5
+useTabs: false
diff --git a/appengine/analytics/app.js b/appengine/analytics/app.js
index 691b3ed1cb..b1563495aa 100644
--- a/appengine/analytics/app.js
+++ b/appengine/analytics/app.js
@@ -26,7 +26,7 @@ app.enable('trust proxy');
// Engine, but will need to be set manually when running locally. See README.md.
const GA_TRACKING_ID = process.env.GA_TRACKING_ID;
-function trackEvent (category, action, label, value) {
+function trackEvent(category, action, label, value) {
const data = {
// API Version.
v: '1',
@@ -44,7 +44,7 @@ function trackEvent (category, action, label, value) {
// Event label.
el: label,
// Event value.
- ev: value
+ ev: value,
};
return got.post('http://www.google-analytics.com/collect', data);
@@ -54,7 +54,10 @@ app.get('/', (req, res, next) => {
// Event value must be numeric.
trackEvent('Example category', 'Example action', 'Example label', '100')
.then(() => {
- res.status(200).send('Event tracked.').end();
+ res
+ .status(200)
+ .send('Event tracked.')
+ .end();
})
// This sample treats an event tracking error as a fatal error. Depending
// on your application's needs, failing to track an event may not be
diff --git a/appengine/building-an-app/update/server.js b/appengine/building-an-app/update/server.js
index de0afd5a33..88bb8d933c 100755
--- a/appengine/building-an-app/update/server.js
+++ b/appengine/building-an-app/update/server.js
@@ -23,7 +23,7 @@ const path = require(`path`);
const app = express();
// [START enable_parser]
-app.use(bodyParser.urlencoded({ extended: true }));
+app.use(bodyParser.urlencoded({extended: true}));
// [END enable_parser]
app.get('/', (req, res) => {
@@ -40,7 +40,7 @@ app.get('/submit', (req, res) => {
app.post('/submit', (req, res) => {
console.log({
name: req.body.name,
- message: req.body.message
+ message: req.body.message,
});
res.send('Thanks for your message!');
});
diff --git a/appengine/building-an-app/update/test/server.test.js b/appengine/building-an-app/update/test/server.test.js
index c624b4c4b6..775636283b 100755
--- a/appengine/building-an-app/update/test/server.test.js
+++ b/appengine/building-an-app/update/test/server.test.js
@@ -20,39 +20,42 @@ const utils = require(`@google-cloud/nodejs-repo-tools`);
const cwd = path.join(__dirname, `../`);
const requestObj = utils.getRequest({
cwd: cwd,
- cmd: `server` });
+ cmd: `server`,
+});
test.beforeEach(utils.stubConsole);
test.afterEach.always(utils.restoreConsole);
-test.cb.serial(`should send greetings`, (t) => {
+test.cb.serial(`should send greetings`, t => {
requestObj
.get(`/`)
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.text, `Hello from App Engine!`);
})
.end(t.end);
});
-test.cb.serial(`should display form`, (t) => {
+test.cb.serial(`should display form`, t => {
requestObj
.get(`/submit`)
.expect(200)
- .expect((response) => {
- t.true(response.text.includes('textarea name="message" placeholder="Message"'));
+ .expect(response => {
+ t.true(
+ response.text.includes('textarea name="message" placeholder="Message"')
+ );
})
.end(t.end);
});
-test.cb.serial(`should record message`, (t) => {
+test.cb.serial(`should record message`, t => {
requestObj
.post(`/submit`, {
name: `sample-user`,
- message: `sample-message`
+ message: `sample-message`,
})
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.text, `Thanks for your message!`);
})
.end(t.end);
diff --git a/appengine/cloudsql/createTables.js b/appengine/cloudsql/createTables.js
index 73a4c79ff1..dc7a09ace2 100644
--- a/appengine/cloudsql/createTables.js
+++ b/appengine/cloudsql/createTables.js
@@ -31,11 +31,11 @@ prompt.get(FIELDS, (err, config) => {
}
// Connect to the database
- const knex = Knex({ client: 'mysql', connection: config });
+ const knex = Knex({client: 'mysql', connection: config});
// Create the "visits" table
- knex.schema.createTable('visits',
- (table) => {
+ knex.schema
+ .createTable('visits', table => {
table.increments();
table.timestamp('timestamp');
table.string('userIp');
@@ -44,7 +44,7 @@ prompt.get(FIELDS, (err, config) => {
console.log(`Successfully created 'visits' table.`);
return knex.destroy();
})
- .catch((err) => {
+ .catch(err => {
console.error(`Failed to create 'visits' table:`, err);
if (knex) {
knex.destroy();
diff --git a/appengine/cloudsql/server.js b/appengine/cloudsql/server.js
index ed47a941c0..12e32f7d7f 100644
--- a/appengine/cloudsql/server.js
+++ b/appengine/cloudsql/server.js
@@ -28,22 +28,25 @@ app.enable('trust proxy');
const knex = connect();
-function connect () {
+function connect() {
// [START gae_flex_mysql_connect]
const config = {
user: process.env.SQL_USER,
password: process.env.SQL_PASSWORD,
- database: process.env.SQL_DATABASE
+ database: process.env.SQL_DATABASE,
};
- if (process.env.INSTANCE_CONNECTION_NAME && process.env.NODE_ENV === 'production') {
+ if (
+ process.env.INSTANCE_CONNECTION_NAME &&
+ process.env.NODE_ENV === 'production'
+ ) {
config.socketPath = `/cloudsql/${process.env.INSTANCE_CONNECTION_NAME}`;
}
// Connect to the database
const knex = Knex({
client: 'mysql',
- connection: config
+ connection: config,
});
// [END gae_flex_mysql_connect]
@@ -57,7 +60,7 @@ function connect () {
* @param {object} visit The visit record to insert.
* @returns {Promise}
*/
-function insertVisit (knex, visit) {
+function insertVisit(knex, visit) {
return knex('visits').insert(visit);
}
@@ -67,13 +70,16 @@ function insertVisit (knex, visit) {
* @param {object} knex The Knex connection object.
* @returns {Promise}
*/
-function getVisits (knex) {
- return knex.select('timestamp', 'userIp')
+function getVisits(knex) {
+ return knex
+ .select('timestamp', 'userIp')
.from('visits')
.orderBy('timestamp', 'desc')
.limit(10)
- .then((results) => {
- return results.map((visit) => `Time: ${visit.timestamp}, AddrHash: ${visit.userIp}`);
+ .then(results => {
+ return results.map(
+ visit => `Time: ${visit.timestamp}, AddrHash: ${visit.userIp}`
+ );
});
}
@@ -82,20 +88,24 @@ app.get('/', (req, res, next) => {
const visit = {
timestamp: new Date(),
// Store a hash of the visitor's ip address
- userIp: crypto.createHash('sha256').update(req.ip).digest('hex').substr(0, 7)
+ userIp: crypto
+ .createHash('sha256')
+ .update(req.ip)
+ .digest('hex')
+ .substr(0, 7),
};
insertVisit(knex, visit)
// Query the last 10 visits from the database.
.then(() => getVisits(knex))
- .then((visits) => {
+ .then(visits => {
res
.status(200)
.set('Content-Type', 'text/plain')
.send(`Last 10 visits:\n${visits.join('\n')}`)
.end();
})
- .catch((err) => {
+ .catch(err => {
next(err);
});
});
diff --git a/appengine/cloudsql/test/createTables.test.js b/appengine/cloudsql/test/createTables.test.js
index 1ec9bdfbf6..437f38028a 100644
--- a/appengine/cloudsql/test/createTables.test.js
+++ b/appengine/cloudsql/test/createTables.test.js
@@ -23,31 +23,29 @@ const tools = require(`@google-cloud/nodejs-repo-tools`);
const SAMPLE_PATH = path.join(__dirname, `../createTables.js`);
-const exampleConfig = [
- `user`,
- `password`,
- `database`
-];
+const exampleConfig = [`user`, `password`, `database`];
-function getSample () {
+function getSample() {
const configMock = exampleConfig;
const promptMock = {
start: sinon.stub(),
- get: sinon.stub().yields(null, configMock)
+ get: sinon.stub().yields(null, configMock),
};
const tableMock = {
increments: sinon.stub(),
timestamp: sinon.stub(),
- string: sinon.stub()
+ string: sinon.stub(),
};
const knexMock = {
schema: {
- createTable: sinon.stub()
+ createTable: sinon.stub(),
},
- destroy: sinon.stub().returns(Promise.resolve())
+ destroy: sinon.stub().returns(Promise.resolve()),
};
- knexMock.schema.createTable.returns(Promise.resolve(knexMock)).yields(tableMock);
+ knexMock.schema.createTable
+ .returns(Promise.resolve(knexMock))
+ .yields(tableMock);
const KnexMock = sinon.stub().returns(knexMock);
return {
@@ -55,21 +53,21 @@ function getSample () {
Knex: KnexMock,
knex: knexMock,
config: configMock,
- prompt: promptMock
- }
+ prompt: promptMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test.cb.serial(`should create a table`, (t) => {
+test.cb.serial(`should create a table`, t => {
const sample = getSample();
const expectedResult = `Successfully created 'visits' table.`;
proxyquire(SAMPLE_PATH, {
knex: sample.mocks.Knex,
- prompt: sample.mocks.prompt
+ prompt: sample.mocks.prompt,
});
t.true(sample.mocks.prompt.start.calledOnce);
@@ -78,10 +76,12 @@ test.cb.serial(`should create a table`, (t) => {
setTimeout(() => {
t.true(sample.mocks.Knex.calledOnce);
- t.deepEqual(sample.mocks.Knex.firstCall.args, [{
- client: 'mysql',
- connection: exampleConfig
- }]);
+ t.deepEqual(sample.mocks.Knex.firstCall.args, [
+ {
+ client: 'mysql',
+ connection: exampleConfig,
+ },
+ ]);
t.true(sample.mocks.knex.schema.createTable.calledOnce);
t.is(sample.mocks.knex.schema.createTable.firstCall.args[0], 'visits');
@@ -92,14 +92,14 @@ test.cb.serial(`should create a table`, (t) => {
}, 10);
});
-test.cb.serial(`should handle prompt error`, (t) => {
+test.cb.serial(`should handle prompt error`, t => {
const error = new Error(`error`);
const sample = getSample();
sample.mocks.prompt.get = sinon.stub().yields(error);
proxyquire(SAMPLE_PATH, {
knex: sample.mocks.Knex,
- prompt: sample.mocks.prompt
+ prompt: sample.mocks.prompt,
});
setTimeout(() => {
@@ -110,14 +110,16 @@ test.cb.serial(`should handle prompt error`, (t) => {
}, 10);
});
-test.cb.serial(`should handle knex creation error`, (t) => {
+test.cb.serial(`should handle knex creation error`, t => {
const error = new Error(`error`);
const sample = getSample();
- sample.mocks.knex.schema.createTable = sinon.stub().returns(Promise.reject(error));
+ sample.mocks.knex.schema.createTable = sinon
+ .stub()
+ .returns(Promise.reject(error));
proxyquire(SAMPLE_PATH, {
knex: sample.mocks.Knex,
- prompt: sample.mocks.prompt
+ prompt: sample.mocks.prompt,
});
setTimeout(() => {
diff --git a/appengine/cloudsql/test/server.test.js b/appengine/cloudsql/test/server.test.js
index a800c34713..66bf1a4f91 100644
--- a/appengine/cloudsql/test/server.test.js
+++ b/appengine/cloudsql/test/server.test.js
@@ -25,25 +25,25 @@ const tools = require(`@google-cloud/nodejs-repo-tools`);
const SAMPLE_PATH = path.join(__dirname, `../server.js`);
-function getSample () {
+function getSample() {
const testApp = express();
sinon.stub(testApp, `listen`).yields();
const expressMock = sinon.stub().returns(testApp);
const resultsMock = [
{
timestamp: `1234`,
- userIp: `abcd`
- }
+ userIp: `abcd`,
+ },
];
const knexMock = sinon.stub().returns({
- insert: sinon.stub().returns(Promise.resolve())
+ insert: sinon.stub().returns(Promise.resolve()),
});
Object.assign(knexMock, {
select: sinon.stub().returnsThis(),
from: sinon.stub().returnsThis(),
orderBy: sinon.stub().returnsThis(),
- limit: sinon.stub().returns(Promise.resolve(resultsMock))
+ limit: sinon.stub().returns(Promise.resolve(resultsMock)),
});
const KnexMock = sinon.stub().returns(knexMock);
@@ -52,14 +52,14 @@ function getSample () {
env: {
SQL_USER: 'user',
SQL_PASSWORD: 'password',
- SQL_DATABASE: 'database'
- }
+ SQL_DATABASE: 'database',
+ },
};
const app = proxyquire(SAMPLE_PATH, {
knex: KnexMock,
express: expressMock,
- process: processMock
+ process: processMock,
});
return {
@@ -69,43 +69,45 @@ function getSample () {
results: resultsMock,
knex: knexMock,
Knex: KnexMock,
- process: processMock
- }
+ process: processMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test(`should set up sample in Postgres`, (t) => {
+test(`should set up sample in Postgres`, t => {
const sample = getSample();
t.true(sample.mocks.express.calledOnce);
t.true(sample.mocks.Knex.calledOnce);
- t.deepEqual(sample.mocks.Knex.firstCall.args, [{
- client: 'mysql',
- connection: {
- user: sample.mocks.process.env.SQL_USER,
- password: sample.mocks.process.env.SQL_PASSWORD,
- database: sample.mocks.process.env.SQL_DATABASE
- }
- }]);
+ t.deepEqual(sample.mocks.Knex.firstCall.args, [
+ {
+ client: 'mysql',
+ connection: {
+ user: sample.mocks.process.env.SQL_USER,
+ password: sample.mocks.process.env.SQL_PASSWORD,
+ database: sample.mocks.process.env.SQL_DATABASE,
+ },
+ },
+ ]);
});
-test.cb(`should record a visit`, (t) => {
+test.cb(`should record a visit`, t => {
const sample = getSample();
const expectedResult = `Last 10 visits:\nTime: 1234, AddrHash: abcd`;
request(sample.app)
.get(`/`)
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.text, expectedResult);
})
.end(t.end);
});
-test.cb(`should handle insert error`, (t) => {
+test.cb(`should handle insert error`, t => {
const sample = getSample();
const expectedResult = `insert_error`;
@@ -114,13 +116,13 @@ test.cb(`should handle insert error`, (t) => {
request(sample.app)
.get(`/`)
.expect(500)
- .expect((response) => {
+ .expect(response => {
t.is(response.text.includes(expectedResult), true);
})
.end(t.end);
});
-test.cb(`should handle read error`, (t) => {
+test.cb(`should handle read error`, t => {
const sample = getSample();
const expectedResult = `read_error`;
@@ -129,7 +131,7 @@ test.cb(`should handle read error`, (t) => {
request(sample.app)
.get(`/`)
.expect(500)
- .expect((response) => {
+ .expect(response => {
t.is(response.text.includes(expectedResult), true);
})
.end(t.end);
diff --git a/appengine/cloudsql_postgresql/createTables.js b/appengine/cloudsql_postgresql/createTables.js
index 91dcc51dc7..18f408e096 100644
--- a/appengine/cloudsql_postgresql/createTables.js
+++ b/appengine/cloudsql_postgresql/createTables.js
@@ -31,19 +31,20 @@ prompt.get(FIELDS, (err, config) => {
}
// Connect to the database
- const knex = Knex({ client: 'pg', connection: config });
+ const knex = Knex({client: 'pg', connection: config});
// Create the "visits" table
- knex.schema.createTable('visits', (table) => {
- table.increments();
- table.timestamp('timestamp');
- table.string('userIp');
- })
+ knex.schema
+ .createTable('visits', table => {
+ table.increments();
+ table.timestamp('timestamp');
+ table.string('userIp');
+ })
.then(() => {
console.log(`Successfully created 'visits' table.`);
return knex.destroy();
})
- .catch((err) => {
+ .catch(err => {
console.error(`Failed to create 'visits' table:`, err);
if (knex) {
knex.destroy();
diff --git a/appengine/cloudsql_postgresql/server.js b/appengine/cloudsql_postgresql/server.js
index 5eb8bde275..8b7643d92c 100644
--- a/appengine/cloudsql_postgresql/server.js
+++ b/appengine/cloudsql_postgresql/server.js
@@ -28,22 +28,25 @@ app.enable('trust proxy');
const knex = connect();
-function connect () {
+function connect() {
// [START gae_flex_postgres_connect]
const config = {
user: process.env.SQL_USER,
password: process.env.SQL_PASSWORD,
- database: process.env.SQL_DATABASE
+ database: process.env.SQL_DATABASE,
};
- if (process.env.INSTANCE_CONNECTION_NAME && process.env.NODE_ENV === 'production') {
+ if (
+ process.env.INSTANCE_CONNECTION_NAME &&
+ process.env.NODE_ENV === 'production'
+ ) {
config.host = `/cloudsql/${process.env.INSTANCE_CONNECTION_NAME}`;
}
// Connect to the database
const knex = Knex({
client: 'pg',
- connection: config
+ connection: config,
});
// [END gae_flex_postgres_connect]
@@ -57,7 +60,7 @@ function connect () {
* @param {object} visit The visit record to insert.
* @returns {Promise}
*/
-function insertVisit (knex, visit) {
+function insertVisit(knex, visit) {
return knex('visits').insert(visit);
}
@@ -67,13 +70,16 @@ function insertVisit (knex, visit) {
* @param {object} knex The Knex connection object.
* @returns {Promise}
*/
-function getVisits (knex) {
- return knex.select('timestamp', 'userIp')
+function getVisits(knex) {
+ return knex
+ .select('timestamp', 'userIp')
.from('visits')
.orderBy('timestamp', 'desc')
.limit(10)
- .then((results) => {
- return results.map((visit) => `Time: ${visit.timestamp}, AddrHash: ${visit.userIp}`);
+ .then(results => {
+ return results.map(
+ visit => `Time: ${visit.timestamp}, AddrHash: ${visit.userIp}`
+ );
});
}
@@ -82,20 +88,24 @@ app.get('/', (req, res, next) => {
const visit = {
timestamp: new Date(),
// Store a hash of the visitor's ip address
- userIp: crypto.createHash('sha256').update(req.ip).digest('hex').substr(0, 7)
+ userIp: crypto
+ .createHash('sha256')
+ .update(req.ip)
+ .digest('hex')
+ .substr(0, 7),
};
insertVisit(knex, visit)
// Query the last 10 visits from the database.
.then(() => getVisits(knex))
- .then((visits) => {
+ .then(visits => {
res
.status(200)
.set('Content-Type', 'text/plain')
.send(`Last 10 visits:\n${visits.join('\n')}`)
.end();
})
- .catch((err) => {
+ .catch(err => {
next(err);
});
});
diff --git a/appengine/cloudsql_postgresql/test/createTables.test.js b/appengine/cloudsql_postgresql/test/createTables.test.js
index 0d43f8add7..c1aba8b65b 100644
--- a/appengine/cloudsql_postgresql/test/createTables.test.js
+++ b/appengine/cloudsql_postgresql/test/createTables.test.js
@@ -23,31 +23,29 @@ const tools = require(`@google-cloud/nodejs-repo-tools`);
const SAMPLE_PATH = path.join(__dirname, `../createTables.js`);
-const exampleConfig = [
- `user`,
- `password`,
- `database`
-];
+const exampleConfig = [`user`, `password`, `database`];
-function getSample () {
+function getSample() {
const configMock = exampleConfig;
const promptMock = {
start: sinon.stub(),
- get: sinon.stub().yields(null, configMock)
+ get: sinon.stub().yields(null, configMock),
};
const tableMock = {
increments: sinon.stub(),
timestamp: sinon.stub(),
- string: sinon.stub()
+ string: sinon.stub(),
};
const knexMock = {
schema: {
- createTable: sinon.stub()
+ createTable: sinon.stub(),
},
- destroy: sinon.stub().returns(Promise.resolve())
+ destroy: sinon.stub().returns(Promise.resolve()),
};
- knexMock.schema.createTable.returns(Promise.resolve(knexMock)).yields(tableMock);
+ knexMock.schema.createTable
+ .returns(Promise.resolve(knexMock))
+ .yields(tableMock);
const KnexMock = sinon.stub().returns(knexMock);
return {
@@ -55,21 +53,21 @@ function getSample () {
Knex: KnexMock,
knex: knexMock,
config: configMock,
- prompt: promptMock
- }
+ prompt: promptMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test.cb.serial(`should create a table`, (t) => {
+test.cb.serial(`should create a table`, t => {
const sample = getSample();
const expectedResult = `Successfully created 'visits' table.`;
proxyquire(SAMPLE_PATH, {
knex: sample.mocks.Knex,
- prompt: sample.mocks.prompt
+ prompt: sample.mocks.prompt,
});
t.true(sample.mocks.prompt.start.calledOnce);
@@ -78,10 +76,12 @@ test.cb.serial(`should create a table`, (t) => {
setTimeout(() => {
t.true(sample.mocks.Knex.calledOnce);
- t.deepEqual(sample.mocks.Knex.firstCall.args, [{
- client: 'pg',
- connection: exampleConfig
- }]);
+ t.deepEqual(sample.mocks.Knex.firstCall.args, [
+ {
+ client: 'pg',
+ connection: exampleConfig,
+ },
+ ]);
t.true(sample.mocks.knex.schema.createTable.calledOnce);
t.is(sample.mocks.knex.schema.createTable.firstCall.args[0], 'visits');
@@ -92,14 +92,14 @@ test.cb.serial(`should create a table`, (t) => {
}, 10);
});
-test.cb.serial(`should handle prompt error`, (t) => {
+test.cb.serial(`should handle prompt error`, t => {
const error = new Error(`error`);
const sample = getSample();
sample.mocks.prompt.get = sinon.stub().yields(error);
proxyquire(SAMPLE_PATH, {
knex: sample.mocks.Knex,
- prompt: sample.mocks.prompt
+ prompt: sample.mocks.prompt,
});
setTimeout(() => {
@@ -110,14 +110,16 @@ test.cb.serial(`should handle prompt error`, (t) => {
}, 10);
});
-test.cb.serial(`should handle knex creation error`, (t) => {
+test.cb.serial(`should handle knex creation error`, t => {
const error = new Error(`error`);
const sample = getSample();
- sample.mocks.knex.schema.createTable = sinon.stub().returns(Promise.reject(error));
+ sample.mocks.knex.schema.createTable = sinon
+ .stub()
+ .returns(Promise.reject(error));
proxyquire(SAMPLE_PATH, {
knex: sample.mocks.Knex,
- prompt: sample.mocks.prompt
+ prompt: sample.mocks.prompt,
});
setTimeout(() => {
diff --git a/appengine/cloudsql_postgresql/test/server.test.js b/appengine/cloudsql_postgresql/test/server.test.js
index 94b49af475..955d5cf38e 100644
--- a/appengine/cloudsql_postgresql/test/server.test.js
+++ b/appengine/cloudsql_postgresql/test/server.test.js
@@ -25,25 +25,25 @@ const tools = require(`@google-cloud/nodejs-repo-tools`);
const SAMPLE_PATH = path.join(__dirname, `../server.js`);
-function getSample () {
+function getSample() {
const testApp = express();
sinon.stub(testApp, `listen`).yields();
const expressMock = sinon.stub().returns(testApp);
const resultsMock = [
{
timestamp: `1234`,
- userIp: `abcd`
- }
+ userIp: `abcd`,
+ },
];
const knexMock = sinon.stub().returns({
- insert: sinon.stub().returns(Promise.resolve())
+ insert: sinon.stub().returns(Promise.resolve()),
});
Object.assign(knexMock, {
select: sinon.stub().returnsThis(),
from: sinon.stub().returnsThis(),
orderBy: sinon.stub().returnsThis(),
- limit: sinon.stub().returns(Promise.resolve(resultsMock))
+ limit: sinon.stub().returns(Promise.resolve(resultsMock)),
});
const KnexMock = sinon.stub().returns(knexMock);
@@ -52,14 +52,14 @@ function getSample () {
env: {
SQL_USER: 'user',
SQL_PASSWORD: 'password',
- SQL_DATABASE: 'database'
- }
+ SQL_DATABASE: 'database',
+ },
};
const app = proxyquire(SAMPLE_PATH, {
knex: KnexMock,
express: expressMock,
- process: processMock
+ process: processMock,
});
return {
@@ -69,43 +69,45 @@ function getSample () {
results: resultsMock,
knex: knexMock,
Knex: KnexMock,
- process: processMock
- }
+ process: processMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test(`should set up sample in Postgres`, (t) => {
+test(`should set up sample in Postgres`, t => {
const sample = getSample();
t.true(sample.mocks.express.calledOnce);
t.true(sample.mocks.Knex.calledOnce);
- t.deepEqual(sample.mocks.Knex.firstCall.args, [{
- client: 'pg',
- connection: {
- user: sample.mocks.process.env.SQL_USER,
- password: sample.mocks.process.env.SQL_PASSWORD,
- database: sample.mocks.process.env.SQL_DATABASE
- }
- }]);
+ t.deepEqual(sample.mocks.Knex.firstCall.args, [
+ {
+ client: 'pg',
+ connection: {
+ user: sample.mocks.process.env.SQL_USER,
+ password: sample.mocks.process.env.SQL_PASSWORD,
+ database: sample.mocks.process.env.SQL_DATABASE,
+ },
+ },
+ ]);
});
-test.cb(`should record a visit`, (t) => {
+test.cb(`should record a visit`, t => {
const sample = getSample();
const expectedResult = `Last 10 visits:\nTime: 1234, AddrHash: abcd`;
request(sample.app)
.get(`/`)
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.text, expectedResult);
})
.end(t.end);
});
-test.cb(`should handle insert error`, (t) => {
+test.cb(`should handle insert error`, t => {
const sample = getSample();
const expectedResult = `insert_error`;
@@ -114,13 +116,13 @@ test.cb(`should handle insert error`, (t) => {
request(sample.app)
.get(`/`)
.expect(500)
- .expect((response) => {
+ .expect(response => {
t.is(response.text.includes(expectedResult), true);
})
.end(t.end);
});
-test.cb(`should handle read error`, (t) => {
+test.cb(`should handle read error`, t => {
const sample = getSample();
const expectedResult = `read_error`;
@@ -129,7 +131,7 @@ test.cb(`should handle read error`, (t) => {
request(sample.app)
.get(`/`)
.expect(500)
- .expect((response) => {
+ .expect(response => {
t.is(response.text.includes(expectedResult), true);
})
.end(t.end);
diff --git a/appengine/datastore/app.js b/appengine/datastore/app.js
index 7ebc61cf29..e4578d62ae 100644
--- a/appengine/datastore/app.js
+++ b/appengine/datastore/app.js
@@ -37,26 +37,28 @@ const datastore = Datastore();
*
* @param {object} visit The visit record to insert.
*/
-function insertVisit (visit) {
+function insertVisit(visit) {
return datastore.save({
key: datastore.key('visit'),
- data: visit
+ data: visit,
});
}
/**
* Retrieve the latest 10 visit records from the database.
*/
-function getVisits () {
- const query = datastore.createQuery('visit')
- .order('timestamp', { descending: true })
+function getVisits() {
+ const query = datastore
+ .createQuery('visit')
+ .order('timestamp', {descending: true})
.limit(10);
- return datastore.runQuery(query)
- .then((results) => {
- const entities = results[0];
- return entities.map((entity) => `Time: ${entity.timestamp}, AddrHash: ${entity.userIp}`);
- });
+ return datastore.runQuery(query).then(results => {
+ const entities = results[0];
+ return entities.map(
+ entity => `Time: ${entity.timestamp}, AddrHash: ${entity.userIp}`
+ );
+ });
}
app.get('/', (req, res, next) => {
@@ -64,13 +66,17 @@ app.get('/', (req, res, next) => {
const visit = {
timestamp: new Date(),
// Store a hash of the visitor's ip address
- userIp: crypto.createHash('sha256').update(req.ip).digest('hex').substr(0, 7)
+ userIp: crypto
+ .createHash('sha256')
+ .update(req.ip)
+ .digest('hex')
+ .substr(0, 7),
};
insertVisit(visit)
// Query the last 10 visits from Datastore.
.then(() => getVisits())
- .then((visits) => {
+ .then(visits => {
res
.status(200)
.set('Content-Type', 'text/plain')
diff --git a/appengine/endpoints/app.js b/appengine/endpoints/app.js
index 7df2277df1..e4c1e2d129 100644
--- a/appengine/endpoints/app.js
+++ b/appengine/endpoints/app.js
@@ -23,11 +23,11 @@ const app = express();
app.use(bodyParser.json());
app.post('/echo', (req, res) => {
- res.status(200).json({ message: req.body.message });
+ res.status(200).json({message: req.body.message});
});
-function authInfoHandler (req, res) {
- let authUser = { id: 'anonymous' };
+function authInfoHandler(req, res) {
+ let authUser = {id: 'anonymous'};
const encodedInfo = req.get('X-Endpoint-API-UserInfo');
if (encodedInfo) {
authUser = JSON.parse(Buffer.from(encodedInfo, 'base64'));
diff --git a/appengine/endpoints/test/app.test.js b/appengine/endpoints/test/app.test.js
index e43b480db2..714ac92f78 100644
--- a/appengine/endpoints/test/app.test.js
+++ b/appengine/endpoints/test/app.test.js
@@ -26,61 +26,64 @@ const tools = require('@google-cloud/nodejs-repo-tools');
const SAMPLE_PATH = path.join(__dirname, '../app.js');
-function getSample () {
+function getSample() {
const testApp = express();
const expressMock = sinon.stub().returns(testApp);
const app = proxyquire(SAMPLE_PATH, {
- express: expressMock
+ express: expressMock,
});
return {
app: app,
mocks: {
- express: expressMock
- }
+ express: expressMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test(`sets up the sample`, (t) => {
+test(`sets up the sample`, t => {
const sample = getSample();
t.true(sample.mocks.express.calledOnce);
});
-test.cb(`should echo a message`, (t) => {
+test.cb(`should echo a message`, t => {
const sample = getSample();
request(sample.app)
.post('/echo')
- .send({ message: 'foo' })
+ .send({message: 'foo'})
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.body.message, 'foo');
})
.end(t.end);
});
-test.cb(`should try to parse encoded info`, (t) => {
+test.cb(`should try to parse encoded info`, t => {
const sample = getSample();
request(sample.app)
.get('/auth/info/googlejwt')
.expect(200)
- .expect((response) => {
- t.deepEqual(response.body, { id: 'anonymous' });
+ .expect(response => {
+ t.deepEqual(response.body, {id: 'anonymous'});
})
.end(t.end);
});
-test.cb(`should successfully parse encoded info`, (t) => {
+test.cb(`should successfully parse encoded info`, t => {
const sample = getSample();
request(sample.app)
.get('/auth/info/googlejwt')
- .set('X-Endpoint-API-UserInfo', Buffer.from(JSON.stringify({ id: 'foo' })).toString('base64'))
+ .set(
+ 'X-Endpoint-API-UserInfo',
+ Buffer.from(JSON.stringify({id: 'foo'})).toString('base64')
+ )
.expect(200)
- .expect((response) => {
- t.deepEqual(response.body, { id: 'foo' });
+ .expect(response => {
+ t.deepEqual(response.body, {id: 'foo'});
})
.end(t.end);
});
diff --git a/appengine/errorreporting/test/app.test.js b/appengine/errorreporting/test/app.test.js
index 91a067b2fb..d2a0a43b02 100644
--- a/appengine/errorreporting/test/app.test.js
+++ b/appengine/errorreporting/test/app.test.js
@@ -25,24 +25,25 @@ const tools = require('@google-cloud/nodejs-repo-tools');
const SAMPLE_PATH = path.join(__dirname, `../app.js`);
-function getSample () {
+function getSample() {
const testApp = express();
sinon.stub(testApp, `listen`).callsArg(1);
const expressMock = sinon.stub().returns(testApp);
- const resultsMock = JSON.stringify({
- timestamp: `1234`,
- userIp: `abcd`
- }) + `\n`;
+ const resultsMock =
+ JSON.stringify({
+ timestamp: `1234`,
+ userIp: `abcd`,
+ }) + `\n`;
const reportMock = sinon.stub();
- const errorsMock = sinon.stub().callsFake(function ErrorReporting () {
+ const errorsMock = sinon.stub().callsFake(function ErrorReporting() {
return {
- report: reportMock
+ report: reportMock,
};
});
const app = proxyquire(SAMPLE_PATH, {
express: expressMock,
- '@google-cloud/error-reporting': errorsMock
+ '@google-cloud/error-reporting': errorsMock,
});
return {
app: app,
@@ -50,15 +51,15 @@ function getSample () {
errors: errorsMock,
express: expressMock,
report: reportMock,
- results: resultsMock
- }
+ results: resultsMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test(`sets up the sample`, (t) => {
+test(`sets up the sample`, t => {
const sample = getSample();
t.true(sample.mocks.express.calledOnce);
@@ -68,14 +69,14 @@ test(`sets up the sample`, (t) => {
t.is(sample.app.listen.firstCall.args[0], process.env.PORT || 8080);
});
-test.cb(`should throw an error`, (t) => {
+test.cb(`should throw an error`, t => {
const sample = getSample();
const expectedResult = `something is wrong!`;
request(sample.app)
.get(`/`)
.expect(500)
- .expect((response) => {
+ .expect(response => {
t.true(sample.mocks.report.calledOnce);
t.is(response.text, expectedResult);
})
diff --git a/appengine/headless-chrome/app.js b/appengine/headless-chrome/app.js
index cb7cc29614..6d8a147cf6 100644
--- a/appengine/headless-chrome/app.js
+++ b/appengine/headless-chrome/app.js
@@ -26,13 +26,15 @@ app.use(async (req, res) => {
const url = req.query.url;
if (!url) {
- return res.send('Please provide URL as GET parameter, for example: ?url=https://example.com');
+ return res.send(
+ 'Please provide URL as GET parameter, for example: ?url=https://example.com'
+ );
}
if (!browser) {
// [START browser]
browser = await puppeteer.launch({
- args: ['--no-sandbox']
+ args: ['--no-sandbox'],
});
// [END browser]
}
diff --git a/appengine/headless-chrome/test/app.test.js b/appengine/headless-chrome/test/app.test.js
index 40fed3da60..00e0a1daaa 100644
--- a/appengine/headless-chrome/test/app.test.js
+++ b/appengine/headless-chrome/test/app.test.js
@@ -18,7 +18,7 @@ const path = require(`path`);
const utils = require(`@google-cloud/nodejs-repo-tools`);
const cwd = path.join(__dirname, `../`);
-const requestObj = utils.getRequest({ cwd: cwd });
+const requestObj = utils.getRequest({cwd: cwd});
test.serial.cb(`should return a screenshot`, t => {
requestObj
diff --git a/appengine/hello-world/flexible/app.js b/appengine/hello-world/flexible/app.js
index d6c60726c8..ad6fb61bba 100644
--- a/appengine/hello-world/flexible/app.js
+++ b/appengine/hello-world/flexible/app.js
@@ -21,7 +21,10 @@ const express = require('express');
const app = express();
app.get('/', (req, res) => {
- res.status(200).send('Hello, world!').end();
+ res
+ .status(200)
+ .send('Hello, world!')
+ .end();
});
// Start the server
diff --git a/appengine/hello-world/standard/app.js b/appengine/hello-world/standard/app.js
index 7a09308358..181b3e3b87 100644
--- a/appengine/hello-world/standard/app.js
+++ b/appengine/hello-world/standard/app.js
@@ -21,7 +21,10 @@ const express = require('express');
const app = express();
app.get('/', (req, res) => {
- res.status(200).send('Hello, world!').end();
+ res
+ .status(200)
+ .send('Hello, world!')
+ .end();
});
// Start the server
diff --git a/appengine/mailjet/app.js b/appengine/mailjet/app.js
index 76da21f9da..b40aac12c5 100644
--- a/appengine/mailjet/app.js
+++ b/appengine/mailjet/app.js
@@ -34,50 +34,50 @@ app.set('view engine', 'jade');
// Parse form data
app.use(bodyParser.json());
-app.use(bodyParser.urlencoded({ extended: false }));
+app.use(bodyParser.urlencoded({extended: false}));
-app.get('/', function (req, res) {
+app.get('/', function(req, res) {
res.render('index');
});
// [START gae_flex_mailjet_send_message]
-app.post('/hello', function (req, res, next) {
+app.post('/hello', function(req, res, next) {
var options = {
- 'Messages': [
+ Messages: [
{
- 'From': {
- 'Email': 'no-reply@appengine-mailjet-demo.com',
- 'Name': 'Mailjet Pilot'
+ From: {
+ Email: 'no-reply@appengine-mailjet-demo.com',
+ Name: 'Mailjet Pilot',
},
- 'To': [
+ To: [
{
- 'Email': req.body.email
- }
+ Email: req.body.email,
+ },
],
- 'Subject': 'Your email flight plan!',
- 'TextPart': 'Mailjet on Google App Engine with Node.js',
- 'HTMLPart': '
Mailjet on Google App Engine with Node.js
'
- }
- ]
+ Subject: 'Your email flight plan!',
+ TextPart: 'Mailjet on Google App Engine with Node.js',
+ HTMLPart: 'Mailjet on Google App Engine with Node.js
',
+ },
+ ],
};
- var request = Mailjet.post('send', {'version': 'v3.1'}).request(options);
+ var request = Mailjet.post('send', {version: 'v3.1'}).request(options);
request
- .then(function (response, body) {
+ .then(function(response, body) {
console.log(response.statusCode, body);
// Render the index route on success
return res.render('index', {
- sent: true
+ sent: true,
});
})
- .catch(function (err) {
+ .catch(function(err) {
return next(err);
});
});
// [END gae_flex_mailjet_send_message]
-var server = app.listen(process.env.PORT || 8080, function () {
+var server = app.listen(process.env.PORT || 8080, function() {
console.log('App listening on port %s', server.address().port);
console.log('Press Ctrl+C to quit.');
});
diff --git a/appengine/memcached/app.js b/appengine/memcached/app.js
index 05b4db55d9..e7dcb33d26 100644
--- a/appengine/memcached/app.js
+++ b/appengine/memcached/app.js
@@ -38,13 +38,18 @@ app.get('/', (req, res, next) => {
return;
}
- mc.set('foo', `${Math.random()}`, (err) => {
- if (err) {
- next(err);
- return;
- }
- res.redirect('/');
- }, 60);
+ mc.set(
+ 'foo',
+ `${Math.random()}`,
+ err => {
+ if (err) {
+ next(err);
+ return;
+ }
+ res.redirect('/');
+ },
+ 60
+ );
});
});
diff --git a/appengine/metadata/flexible/server.js b/appengine/metadata/flexible/server.js
index 414c486b47..640e967ea5 100644
--- a/appengine/metadata/flexible/server.js
+++ b/appengine/metadata/flexible/server.js
@@ -22,22 +22,25 @@ const request = require('got');
const app = express();
app.enable('trust proxy');
-const METADATA_NETWORK_INTERFACE_URL = 'http://metadata/computeMetadata/v1/' +
-'/instance/network-interfaces/0/access-configs/0/external-ip';
+const METADATA_NETWORK_INTERFACE_URL =
+ 'http://metadata/computeMetadata/v1/' +
+ '/instance/network-interfaces/0/access-configs/0/external-ip';
-function getExternalIp () {
+function getExternalIp() {
const options = {
headers: {
- 'Metadata-Flavor': 'Google'
+ 'Metadata-Flavor': 'Google',
},
- json: true
+ json: true,
};
return request(METADATA_NETWORK_INTERFACE_URL, options)
- .then((response) => response.body)
- .catch((err) => {
+ .then(response => response.body)
+ .catch(err => {
if (err || err.statusCode !== 200) {
- console.log('Error while talking to metadata server, assuming localhost');
+ console.log(
+ 'Error while talking to metadata server, assuming localhost'
+ );
return 'localhost';
}
return Promise.reject(err);
@@ -46,8 +49,11 @@ function getExternalIp () {
app.get('/', (req, res, next) => {
getExternalIp()
- .then((externalIp) => {
- res.status(200).send(`External IP: ${externalIp}`).end();
+ .then(externalIp => {
+ res
+ .status(200)
+ .send(`External IP: ${externalIp}`)
+ .end();
})
.catch(next);
});
diff --git a/appengine/metadata/standard/server.js b/appengine/metadata/standard/server.js
index 13774a3750..74d7cd7efc 100644
--- a/appengine/metadata/standard/server.js
+++ b/appengine/metadata/standard/server.js
@@ -21,18 +21,19 @@ const request = require('got');
const app = express();
app.enable('trust proxy');
-const METADATA_PROJECT_ID_URL = 'http://metadata.google.internal/computeMetadata/v1/project/project-id';
+const METADATA_PROJECT_ID_URL =
+ 'http://metadata.google.internal/computeMetadata/v1/project/project-id';
-function getProjectId () {
+function getProjectId() {
const options = {
headers: {
- 'Metadata-Flavor': 'Google'
- }
+ 'Metadata-Flavor': 'Google',
+ },
};
return request(METADATA_PROJECT_ID_URL, options)
- .then((response) => response.body)
- .catch((err) => {
+ .then(response => response.body)
+ .catch(err => {
if (err && err.statusCode !== 200) {
console.log('Error while talking to metadata server.');
return 'Unknown_Project_ID';
@@ -43,8 +44,11 @@ function getProjectId () {
app.get('/', (req, res, next) => {
getProjectId()
- .then((projectId) => {
- res.status(200).send(`Project ID: ${projectId}`).end();
+ .then(projectId => {
+ res
+ .status(200)
+ .send(`Project ID: ${projectId}`)
+ .end();
})
.catch(next);
});
diff --git a/appengine/mongodb/server.js b/appengine/mongodb/server.js
index 1bf6668bf4..b081724f98 100644
--- a/appengine/mongodb/server.js
+++ b/appengine/mongodb/server.js
@@ -21,7 +21,10 @@ const nconf = require('nconf');
// Read in keys and secrets. You can store these in
// a keys.json file, or in environment variables
-nconf.argv().env().file('keys.json');
+nconf
+ .argv()
+ .env()
+ .file('keys.json');
// Connect to a MongoDB server provisioned over at
// MongoLab. See the README for more info.
@@ -36,52 +39,57 @@ if (nconf.get('mongoDatabase')) {
uri = `${uri}/${nconf.get('mongoDatabase')}`;
}
-mongodb.MongoClient.connect(uri, (err, client) => {
- if (err) {
- throw err;
- }
-
- // Create a simple little server.
- http.createServer((req, res) => {
- if (req.url === '/_ah/health') {
- res.writeHead(200, {
- 'Content-Type': 'text/plain'
- });
- res.write('OK');
- res.end();
- return;
+mongodb.MongoClient.connect(
+ uri,
+ (err, client) => {
+ if (err) {
+ throw err;
}
- // Track every IP that has visited this site
- const db = client.db(nconf.get('mongoDatabase'));
- const collection = db.collection('IPs');
- const ip = {
- address: req.connection.remoteAddress
- };
+ // Create a simple little server.
+ http
+ .createServer((req, res) => {
+ if (req.url === '/_ah/health') {
+ res.writeHead(200, {
+ 'Content-Type': 'text/plain',
+ });
+ res.write('OK');
+ res.end();
+ return;
+ }
+ // Track every IP that has visited this site
+ const db = client.db(nconf.get('mongoDatabase'));
+ const collection = db.collection('IPs');
+
+ const ip = {
+ address: req.connection.remoteAddress,
+ };
- collection.insert(ip, (err) => {
- if (err) {
- throw err;
- }
+ collection.insert(ip, err => {
+ if (err) {
+ throw err;
+ }
- // push out a range
- let iplist = '';
- collection.find().toArray((err, data) => {
- if (err) {
- throw err;
- }
- data.forEach((ip) => {
- iplist += `${ip.address}; `;
- });
+ // push out a range
+ let iplist = '';
+ collection.find().toArray((err, data) => {
+ if (err) {
+ throw err;
+ }
+ data.forEach(ip => {
+ iplist += `${ip.address}; `;
+ });
- res.writeHead(200, {
- 'Content-Type': 'text/plain'
+ res.writeHead(200, {
+ 'Content-Type': 'text/plain',
+ });
+ res.write('IPs:\n');
+ res.end(iplist);
+ });
});
- res.write('IPs:\n');
- res.end(iplist);
+ })
+ .listen(process.env.PORT || 8080, () => {
+ console.log('started web process');
});
- });
- }).listen(process.env.PORT || 8080, () => {
- console.log('started web process');
- });
-});
+ }
+);
diff --git a/appengine/parse-server/server.js b/appengine/parse-server/server.js
index eaff9854d8..44d8d6b502 100644
--- a/appengine/parse-server/server.js
+++ b/appengine/parse-server/server.js
@@ -20,7 +20,10 @@ const nconf = require('nconf');
const ParseServer = require('parse-server').ParseServer;
const path = require('path');
-nconf.argv().env().file({ file: 'config.json' });
+nconf
+ .argv()
+ .env()
+ .file({file: 'config.json'});
const app = express();
@@ -30,7 +33,7 @@ const parseServer = new ParseServer({
appId: nconf.get('APP_ID'),
masterKey: nconf.get('MASTER_KEY'),
fileKey: nconf.get('FILE_KEY'),
- serverURL: nconf.get('SERVER_URL')
+ serverURL: nconf.get('SERVER_URL'),
});
// Mount the Parse API server middleware to /parse
diff --git a/appengine/pubsub/app.js b/appengine/pubsub/app.js
index 7ae0880807..0722c20753 100644
--- a/appengine/pubsub/app.js
+++ b/appengine/pubsub/app.js
@@ -35,7 +35,7 @@ const app = express();
app.set('view engine', 'pug');
app.set('views', path.join(__dirname, 'views'));
-const formBodyParser = bodyParser.urlencoded({ extended: false });
+const formBodyParser = bodyParser.urlencoded({extended: false});
const jsonBodyParser = bodyParser.json();
// List of all messages received by this instance
@@ -50,7 +50,7 @@ const publisher = topic.publisher();
// [START gae_flex_pubsub_index]
app.get('/', (req, res) => {
- res.render('index', { messages: messages });
+ res.render('index', {messages: messages});
});
app.post('/', formBodyParser, (req, res, next) => {
@@ -59,7 +59,7 @@ app.post('/', formBodyParser, (req, res, next) => {
return;
}
- publisher.publish(Buffer.from(req.body.payload), (err) => {
+ publisher.publish(Buffer.from(req.body.payload), err => {
if (err) {
next(err);
return;
@@ -77,7 +77,9 @@ app.post('/pubsub/push', jsonBodyParser, (req, res) => {
}
// The message is a unicode string encoded in base64.
- const message = Buffer.from(req.body.message.data, 'base64').toString('utf-8');
+ const message = Buffer.from(req.body.message.data, 'base64').toString(
+ 'utf-8'
+ );
messages.push(message);
diff --git a/appengine/pubsub/test/app.test.js b/appengine/pubsub/test/app.test.js
index a3f7adbc92..e5bd9fff9d 100644
--- a/appengine/pubsub/test/app.test.js
+++ b/appengine/pubsub/test/app.test.js
@@ -26,46 +26,49 @@ const message = `This is a test message sent at: `;
const payload = message + Date.now();
const cwd = path.join(__dirname, `../`);
-const requestObj = utils.getRequest({ cwd: cwd });
+const requestObj = utils.getRequest({cwd: cwd});
-test.serial.cb(`should send a message to Pub/Sub`, (t) => {
+test.serial.cb(`should send a message to Pub/Sub`, t => {
requestObj
.post(`/`)
.type('form')
- .send({ payload: payload })
+ .send({payload: payload})
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.text, `Message sent`);
})
.end(t.end);
});
-test.serial.cb(`should receive incoming Pub/Sub messages`, (t) => {
+test.serial.cb(`should receive incoming Pub/Sub messages`, t => {
requestObj
.post(`/pubsub/push`)
- .query({ token: process.env.PUBSUB_VERIFICATION_TOKEN })
+ .query({token: process.env.PUBSUB_VERIFICATION_TOKEN})
.send({
message: {
- data: payload
- }
+ data: payload,
+ },
})
.expect(200)
.end(t.end);
});
-test.serial.cb(`should check for verification token on incoming Pub/Sub messages`, (t) => {
- requestObj
- .post(`/pubsub/push`)
- .field(`payload`, payload)
- .expect(400)
- .end(t.end);
-});
+test.serial.cb(
+ `should check for verification token on incoming Pub/Sub messages`,
+ t => {
+ requestObj
+ .post(`/pubsub/push`)
+ .field(`payload`, payload)
+ .expect(400)
+ .end(t.end);
+ }
+);
-test.serial.cb(`should list sent Pub/Sub messages`, (t) => {
+test.serial.cb(`should list sent Pub/Sub messages`, t => {
requestObj
.get(`/`)
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.regex(response.text, /Messages received by this instance/);
})
.end(t.end);
diff --git a/appengine/redis/server.js b/appengine/redis/server.js
index 29d1412880..d13e325b9a 100644
--- a/appengine/redis/server.js
+++ b/appengine/redis/server.js
@@ -22,44 +22,51 @@ const nconf = require('nconf');
// read in keys and secrets. You can store these in a variety of ways.
// I like to use a keys.json file that is in the .gitignore file,
// but you can also store them in environment variables
-nconf.argv().env().file('keys.json');
+nconf
+ .argv()
+ .env()
+ .file('keys.json');
// [START gae_flex_node_redis]
// Connect to a redis server provisioned over at
// Redis Labs. See the README for more info.
-const client = redis.createClient(
- nconf.get('redisPort') || '6379',
- nconf.get('redisHost') || '127.0.0.1',
- {
- 'auth_pass': nconf.get('redisKey'),
- 'return_buffers': true
- }
-).on('error', (err) => console.error('ERR:REDIS:', err));
+const client = redis
+ .createClient(
+ nconf.get('redisPort') || '6379',
+ nconf.get('redisHost') || '127.0.0.1',
+ {
+ auth_pass: nconf.get('redisKey'),
+ return_buffers: true,
+ }
+ )
+ .on('error', err => console.error('ERR:REDIS:', err));
// [END gae_flex_node_redis]]
// Create a simple little server.
-http.createServer((req, res) => {
- // Track every IP that has visited this site
- const listName = 'IPs';
- client.lpush(listName, req.connection.remoteAddress);
- client.ltrim(listName, 0, 25);
+http
+ .createServer((req, res) => {
+ // Track every IP that has visited this site
+ const listName = 'IPs';
+ client.lpush(listName, req.connection.remoteAddress);
+ client.ltrim(listName, 0, 25);
- // push out a range
- let iplist = '';
- client.lrange(listName, 0, -1, (err, data) => {
- if (err) {
- console.log(err);
- res.status(500).send(err.message);
- return;
- }
+ // push out a range
+ let iplist = '';
+ client.lrange(listName, 0, -1, (err, data) => {
+ if (err) {
+ console.log(err);
+ res.status(500).send(err.message);
+ return;
+ }
- data.forEach((ip) => {
- iplist += `${ip}; `;
- });
+ data.forEach(ip => {
+ iplist += `${ip}; `;
+ });
- res.writeHead(200, { 'Content-Type': 'text/plain' });
- res.end(iplist);
- });
-}).listen(process.env.PORT || 8080);
+ res.writeHead(200, {'Content-Type': 'text/plain'});
+ res.end(iplist);
+ });
+ })
+ .listen(process.env.PORT || 8080);
console.log('started web process');
diff --git a/appengine/sendgrid/app.js b/appengine/sendgrid/app.js
index a8f19c6c2c..d09540f84a 100644
--- a/appengine/sendgrid/app.js
+++ b/appengine/sendgrid/app.js
@@ -34,7 +34,7 @@ app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'pug');
// Parse form data
-app.use(bodyParser.urlencoded({ extended: false }));
+app.use(bodyParser.urlencoded({extended: false}));
app.get('/', (req, res) => {
res.render('index');
@@ -45,26 +45,30 @@ app.post('/hello', (req, res, next) => {
method: 'POST',
path: '/v3/mail/send',
body: {
- personalizations: [{
- to: [{ email: req.body.email }],
- subject: 'Hello World!'
- }],
- from: { email: SENDGRID_SENDER },
- content: [{
- type: 'text/plain',
- value: 'Sendgrid on Google App Engine with Node.js.'
- }]
- }
+ personalizations: [
+ {
+ to: [{email: req.body.email}],
+ subject: 'Hello World!',
+ },
+ ],
+ from: {email: SENDGRID_SENDER},
+ content: [
+ {
+ type: 'text/plain',
+ value: 'Sendgrid on Google App Engine with Node.js.',
+ },
+ ],
+ },
});
- Sendgrid.API(sgReq, (err) => {
+ Sendgrid.API(sgReq, err => {
if (err) {
next(err);
return;
}
// Render the index route on success
res.render('index', {
- sent: true
+ sent: true,
});
});
});
diff --git a/appengine/storage/flexible/app.js b/appengine/storage/flexible/app.js
index cedb34f061..035d6bb0a8 100644
--- a/appengine/storage/flexible/app.js
+++ b/appengine/storage/flexible/app.js
@@ -42,8 +42,8 @@ app.use(bodyParser.json());
const multer = Multer({
storage: Multer.memoryStorage(),
limits: {
- fileSize: 5 * 1024 * 1024 // no larger than 5mb, you can change as needed.
- }
+ fileSize: 5 * 1024 * 1024, // no larger than 5mb, you can change as needed.
+ },
});
// A bucket is a container for objects (files).
@@ -65,13 +65,15 @@ app.post('/upload', multer.single('file'), (req, res, next) => {
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream();
- blobStream.on('error', (err) => {
+ blobStream.on('error', err => {
next(err);
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
- const publicUrl = format(`https://storage.googleapis.com/${bucket.name}/${blob.name}`);
+ const publicUrl = format(
+ `https://storage.googleapis.com/${bucket.name}/${blob.name}`
+ );
res.status(200).send(publicUrl);
});
diff --git a/appengine/storage/flexible/system-test/app.test.js b/appengine/storage/flexible/system-test/app.test.js
index 98f4cd9d18..02236ac991 100644
--- a/appengine/storage/flexible/system-test/app.test.js
+++ b/appengine/storage/flexible/system-test/app.test.js
@@ -23,14 +23,14 @@ const bucketName = process.env.GCLOUD_STORAGE_BUCKET;
const bucket = storage.bucket(bucketName);
const cwd = path.join(__dirname, `../`);
-const requestObj = utils.getRequest({ cwd: cwd });
+const requestObj = utils.getRequest({cwd: cwd});
test.before(async () => {
utils.checkCredentials();
- await bucket.create(bucket).then((data) => {
+ await bucket.create(bucket).then(data => {
return bucket.acl.add({
entity: 'allUsers',
- role: Storage.acl.READER_ROLE
+ role: Storage.acl.READER_ROLE,
});
});
});
@@ -41,23 +41,26 @@ test.after.always(async () => {
} catch (err) {} // ignore error
});
-test.cb.serial(`should load`, (t) => {
+test.cb.serial(`should load`, t => {
requestObj
.get(`/`)
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.regex(response.text, //);
})
.end(t.end);
});
-test.cb.serial(`should upload a file`, (t) => {
+test.cb.serial(`should upload a file`, t => {
requestObj
.post(`/upload`)
.attach(`file`, path.join(__dirname, `resources/test.txt`))
.expect(200)
- .expect((response) => {
- t.is(response.text, `https://storage.googleapis.com/${bucketName}/test.txt`);
+ .expect(response => {
+ t.is(
+ response.text,
+ `https://storage.googleapis.com/${bucketName}/test.txt`
+ );
})
.end(t.end);
});
diff --git a/appengine/storage/standard/app.js b/appengine/storage/standard/app.js
index bc0eb31d4a..45485f4d57 100644
--- a/appengine/storage/standard/app.js
+++ b/appengine/storage/standard/app.js
@@ -42,8 +42,8 @@ app.use(bodyParser.json());
const multer = Multer({
storage: Multer.memoryStorage(),
limits: {
- fileSize: 5 * 1024 * 1024 // no larger than 5mb, you can change as needed.
- }
+ fileSize: 5 * 1024 * 1024, // no larger than 5mb, you can change as needed.
+ },
});
// A bucket is a container for objects (files).
@@ -64,16 +64,18 @@ app.post('/upload', multer.single('file'), (req, res, next) => {
// Create a new blob in the bucket and upload the file data.
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({
- resumable: false
+ resumable: false,
});
- blobStream.on('error', (err) => {
+ blobStream.on('error', err => {
next(err);
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
- const publicUrl = format(`https://storage.googleapis.com/${bucket.name}/${blob.name}`);
+ const publicUrl = format(
+ `https://storage.googleapis.com/${bucket.name}/${blob.name}`
+ );
res.status(200).send(publicUrl);
});
diff --git a/appengine/storage/standard/system-test/app.test.js b/appengine/storage/standard/system-test/app.test.js
index 98f4cd9d18..02236ac991 100644
--- a/appengine/storage/standard/system-test/app.test.js
+++ b/appengine/storage/standard/system-test/app.test.js
@@ -23,14 +23,14 @@ const bucketName = process.env.GCLOUD_STORAGE_BUCKET;
const bucket = storage.bucket(bucketName);
const cwd = path.join(__dirname, `../`);
-const requestObj = utils.getRequest({ cwd: cwd });
+const requestObj = utils.getRequest({cwd: cwd});
test.before(async () => {
utils.checkCredentials();
- await bucket.create(bucket).then((data) => {
+ await bucket.create(bucket).then(data => {
return bucket.acl.add({
entity: 'allUsers',
- role: Storage.acl.READER_ROLE
+ role: Storage.acl.READER_ROLE,
});
});
});
@@ -41,23 +41,26 @@ test.after.always(async () => {
} catch (err) {} // ignore error
});
-test.cb.serial(`should load`, (t) => {
+test.cb.serial(`should load`, t => {
requestObj
.get(`/`)
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.regex(response.text, //);
})
.end(t.end);
});
-test.cb.serial(`should upload a file`, (t) => {
+test.cb.serial(`should upload a file`, t => {
requestObj
.post(`/upload`)
.attach(`file`, path.join(__dirname, `resources/test.txt`))
.expect(200)
- .expect((response) => {
- t.is(response.text, `https://storage.googleapis.com/${bucketName}/test.txt`);
+ .expect(response => {
+ t.is(
+ response.text,
+ `https://storage.googleapis.com/${bucketName}/test.txt`
+ );
})
.end(t.end);
});
diff --git a/appengine/system-test/all.test.js b/appengine/system-test/all.test.js
index 1c20b36257..8d5531cfdc 100644
--- a/appengine/system-test/all.test.js
+++ b/appengine/system-test/all.test.js
@@ -24,7 +24,7 @@ var sampleTests = [
cmd: 'node',
args: ['./src/bin/www'],
msg: 'Hello World! Express.js + Grunt.js on Google App Engine.',
- TRAVIS_NODE_VERSION: '0.12'
+ TRAVIS_NODE_VERSION: '0.12',
},
// TODO: Investigate flaky test
// {
@@ -39,14 +39,14 @@ var sampleTests = [
cmd: 'node',
args: ['app.js'],
msg: 'Value:',
- test: /Value: \d\.\d+/
+ test: /Value: \d\.\d+/,
},
{
dir: 'appengine/mongodb',
cmd: 'node',
args: ['server.js'],
msg: 'IPs:',
- TRAVIS: true
+ TRAVIS: true,
},
{
dir: 'appengine/pubsub',
@@ -55,26 +55,26 @@ var sampleTests = [
msg: 'Messages received by this instance:',
env: {
PUBSUB_TOPIC: 'test',
- PUBSUB_VERIFICATION_TOKEN: 'foo'
- }
+ PUBSUB_VERIFICATION_TOKEN: 'foo',
+ },
},
{
dir: 'appengine/redis',
cmd: 'node',
args: ['server.js'],
- msg: '127.0.0.1'
+ msg: '127.0.0.1',
},
{
dir: 'appengine/sendgrid',
cmd: 'node',
args: ['app.js'],
- msg: 'Express.js + Sendgrid on Google App Engine.'
+ msg: 'Express.js + Sendgrid on Google App Engine.',
},
{
dir: 'appengine/static-files',
cmd: 'node',
args: ['app.js'],
- msg: 'This is a static file serving example.'
+ msg: 'This is a static file serving example.',
},
{
dir: 'appengine/storage/flexible',
@@ -82,8 +82,8 @@ var sampleTests = [
args: ['app.js'],
msg: 'Static Files',
env: {
- GCLOUD_STORAGE_BUCKET: 'nodejs-docs-samples'
- }
+ GCLOUD_STORAGE_BUCKET: 'nodejs-docs-samples',
+ },
},
{
dir: 'appengine/storage/standard',
@@ -91,8 +91,8 @@ var sampleTests = [
args: ['app.js'],
msg: 'Static Files',
env: {
- GCLOUD_STORAGE_BUCKET: 'nodejs-docs-samples'
- }
+ GCLOUD_STORAGE_BUCKET: 'nodejs-docs-samples',
+ },
},
{
dir: 'appengine/parse-server',
@@ -103,11 +103,11 @@ var sampleTests = [
env: {
APP_ID: 'foo',
MASTER_KEY: 'bar',
- SERVER_URL: 'http://localhost:'
- }
- }
+ SERVER_URL: 'http://localhost:',
+ },
+ },
];
-test((t) => {
+test(t => {
t.truthy(sampleTests);
});
diff --git a/appengine/twilio/app.js b/appengine/twilio/app.js
index 3d57103d6c..a30f085ddb 100644
--- a/appengine/twilio/app.js
+++ b/appengine/twilio/app.js
@@ -18,14 +18,16 @@
const format = require('util').format;
const express = require('express');
const bodyParser = require('body-parser').urlencoded({
- extended: false
+ extended: false,
});
const app = express();
const TWILIO_NUMBER = process.env.TWILIO_NUMBER;
if (!TWILIO_NUMBER) {
- console.log('Please configure environment variables as described in README.md');
+ console.log(
+ 'Please configure environment variables as described in README.md'
+ );
process.exit(1);
}
@@ -41,7 +43,8 @@ app.post('/call/receive', (req, res) => {
const resp = new TwimlResponse();
resp.say('Hello from Google App Engine.');
- res.status(200)
+ res
+ .status(200)
.contentType('text/xml')
.send(resp.toString());
});
@@ -51,21 +54,26 @@ app.post('/call/receive', (req, res) => {
app.get('/sms/send', (req, res, next) => {
const to = req.query.to;
if (!to) {
- res.status(400).send('Please provide an number in the "to" query string parameter.');
+ res
+ .status(400)
+ .send('Please provide an number in the "to" query string parameter.');
return;
}
- twilio.sendMessage({
- to: to,
- from: TWILIO_NUMBER,
- body: 'Hello from Google App Engine'
- }, (err) => {
- if (err) {
- next(err);
- return;
+ twilio.sendMessage(
+ {
+ to: to,
+ from: TWILIO_NUMBER,
+ body: 'Hello from Google App Engine',
+ },
+ err => {
+ if (err) {
+ next(err);
+ return;
+ }
+ res.status(200).send('Message sent.');
}
- res.status(200).send('Message sent.');
- });
+ );
});
// [END gae_flex_twilio_send_sms]
@@ -77,7 +85,8 @@ app.post('/sms/receive', bodyParser, (req, res) => {
const resp = new TwimlResponse();
resp.message(format('Hello, %s, you said: %s', sender, body));
- res.status(200)
+ res
+ .status(200)
.contentType('text/xml')
.send(resp.toString());
});
diff --git a/appengine/uglifyjs/static/message.js b/appengine/uglifyjs/static/message.js
index cd0b47b640..f4264036e2 100644
--- a/appengine/uglifyjs/static/message.js
+++ b/appengine/uglifyjs/static/message.js
@@ -12,12 +12,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-window.onload = function () {
+window.onload = function() {
window.messageDiv = document.getElementById('message');
window.buttonClicks = 0;
};
-window.buttonClick = function () {
+window.buttonClick = function() {
window.buttonClicks++;
var buttonClickTimes;
diff --git a/auth/auth.js b/auth/auth.js
index c048cd2797..49e308f93c 100644
--- a/auth/auth.js
+++ b/auth/auth.js
@@ -20,7 +20,7 @@
'use strict';
-function authCloudImplicit () {
+function authCloudImplicit() {
// [START auth_cloud_implicit]
// Imports the Google Cloud client library.
const {Storage} = require('@google-cloud/storage');
@@ -33,21 +33,21 @@ function authCloudImplicit () {
// Makes an authenticated API request.
storage
.getBuckets()
- .then((results) => {
+ .then(results => {
const buckets = results[0];
console.log('Buckets:');
- buckets.forEach((bucket) => {
+ buckets.forEach(bucket => {
console.log(bucket.name);
});
})
- .catch((err) => {
+ .catch(err => {
console.error('ERROR:', err);
});
// [END auth_cloud_implicit]
}
-function authCloudExplicit () {
+function authCloudExplicit() {
// [START auth_cloud_explicit]
// Imports the Google Cloud client library.
const {Storage} = require('@google-cloud/storage');
@@ -57,21 +57,21 @@ function authCloudExplicit () {
// helper, see https://github.com/GoogleCloudPlatform/google-cloud-node/blob/master/docs/authentication.md
const storage = new Storage({
projectId: 'project-id',
- keyFilename: '/path/to/keyfile.json'
+ keyFilename: '/path/to/keyfile.json',
});
// Makes an authenticated API request.
storage
.getBuckets()
- .then((results) => {
+ .then(results => {
const buckets = results[0];
console.log('Buckets:');
- buckets.forEach((bucket) => {
+ buckets.forEach(bucket => {
console.log(bucket.name);
});
})
- .catch((err) => {
+ .catch(err => {
console.error('ERROR:', err);
});
// [END auth_cloud_explicit]
@@ -95,7 +95,9 @@ const cli = require(`yargs`)
.example(`node $0 explicit`, `Loads credentials explicitly.`)
.wrap(120)
.recommendCommands()
- .epilogue(`For more information, see https://cloud.google.com/docs/authentication`)
+ .epilogue(
+ `For more information, see https://cloud.google.com/docs/authentication`
+ )
.help()
.strict();
diff --git a/auth/package.json b/auth/package.json
index 170bb46d85..5757c578fe 100644
--- a/auth/package.json
+++ b/auth/package.json
@@ -9,11 +9,9 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=6"
+ "node": ">=8"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 20s --verbose system-test/*.test.js"
},
"dependencies": {
@@ -22,8 +20,7 @@
},
"devDependencies": {
"@google-cloud/nodejs-repo-tools": "^3.0.0",
- "ava": "0.25.0",
- "semistandard": "^12.0.1"
+ "ava": "0.25.0"
},
"cloud-repo-tools": {
"requiresKeyFile": true,
diff --git a/auth/system-test/auth.test.js b/auth/system-test/auth.test.js
index ec733e43ba..9199e89a0c 100644
--- a/auth/system-test/auth.test.js
+++ b/auth/system-test/auth.test.js
@@ -24,12 +24,12 @@ const cmd = `node auth.js`;
test.before(tools.checkCredentials);
-test.serial(`should load credentials implicitly`, async (t) => {
+test.serial(`should load credentials implicitly`, async t => {
const output = await tools.runAsync(`${cmd} auth-cloud-implicit`, cwd);
t.is(output.includes(`Buckets:`), true);
});
-test.serial(`should load credentials explicitly`, async (t) => {
+test.serial(`should load credentials explicitly`, async t => {
const output = await tools.runAsync(`${cmd} auth-cloud-explicit`, cwd);
t.is(output.includes(`Buckets:`), true);
});
diff --git a/containerengine/hello-world/package.json b/containerengine/hello-world/package.json
index e41a5d6903..9e054a5b02 100644
--- a/containerengine/hello-world/package.json
+++ b/containerengine/hello-world/package.json
@@ -10,19 +10,16 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=4.3.2"
+ "node": ">=8.0.0"
},
"scripts": {
"start": "node server.js",
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"system-test": "repo-tools test app -- server.js",
"test": "npm run system-test"
},
"dependencies": {},
"devDependencies": {
- "@google-cloud/nodejs-repo-tools": "^2.3.0",
- "semistandard": "^12.0.1"
+ "@google-cloud/nodejs-repo-tools": "^2.3.0"
},
"cloud-repo-tools": {
"test": {
diff --git a/containerengine/hello-world/server.js b/containerengine/hello-world/server.js
index 7cf9003a8c..5b033fc304 100644
--- a/containerengine/hello-world/server.js
+++ b/containerengine/hello-world/server.js
@@ -17,7 +17,7 @@
// [START all]
var http = require('http');
-var handleRequest = function (req, res) {
+var handleRequest = function(req, res) {
res.writeHead(200);
res.end('Hello Kubernetes!');
};
diff --git a/debugger/package.json b/debugger/package.json
index 9b20e41eb1..39cc39f57f 100644
--- a/debugger/package.json
+++ b/debugger/package.json
@@ -10,13 +10,11 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=6"
+ "node": ">=8"
},
"scripts": {
"deploy": "gcloud app deploy",
"start": "node app.js",
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"system-test": "repo-tools test app",
"test": "npm run system-test",
"e2e-test": "repo-tools test deploy"
@@ -26,8 +24,7 @@
"express": "4.16.4"
},
"devDependencies": {
- "@google-cloud/nodejs-repo-tools": "^3.0.0",
- "semistandard": "^12.0.1"
+ "@google-cloud/nodejs-repo-tools": "^3.0.0"
},
"cloud-repo-tools": {
"test": {
diff --git a/debugger/snippets.js b/debugger/snippets.js
index 4abda897c6..4b6d9a2b5d 100644
--- a/debugger/snippets.js
+++ b/debugger/snippets.js
@@ -18,6 +18,6 @@
// [START debugger_setup_explicit]
require('@google-cloud/debug-agent').start({
projectId: 'your-project-id',
- keyFilename: '/path/to/key.json'
+ keyFilename: '/path/to/key.json',
});
// [END debugger_setup_explicity]
diff --git a/endpoints/getting-started-grpc/client.js b/endpoints/getting-started-grpc/client.js
index da490842d8..2acfc722ae 100644
--- a/endpoints/getting-started-grpc/client.js
+++ b/endpoints/getting-started-grpc/client.js
@@ -15,7 +15,7 @@
'use strict';
-function makeGrpcRequest (JWT_AUTH_TOKEN, API_KEY, HOST, GREETEE) {
+function makeGrpcRequest(JWT_AUTH_TOKEN, API_KEY, HOST, GREETEE) {
// Uncomment these lines to set their values
// const JWT_AUTH_TOKEN = 'YOUR_JWT_AUTH_TOKEN';
// const API_KEY = 'YOUR_API_KEY';
@@ -42,7 +42,7 @@ function makeGrpcRequest (JWT_AUTH_TOKEN, API_KEY, HOST, GREETEE) {
}
// Execute gRPC request
- client.sayHello({ name: GREETEE }, metadata, (err, response) => {
+ client.sayHello({name: GREETEE}, metadata, (err, response) => {
if (err) {
console.error(err);
}
@@ -55,32 +55,34 @@ function makeGrpcRequest (JWT_AUTH_TOKEN, API_KEY, HOST, GREETEE) {
// The command-line program
const argv = require('yargs')
- .usage('Usage: node $0 {-k YOUR_API_KEY>, <-j YOUR_JWT_AUTH_TOKEN} [-h YOUR_ENDPOINTS_HOST] [-g GREETEE_NAME]')
+ .usage(
+ 'Usage: node $0 {-k YOUR_API_KEY>, <-j YOUR_JWT_AUTH_TOKEN} [-h YOUR_ENDPOINTS_HOST] [-g GREETEE_NAME]'
+ )
.option('jwtAuthToken', {
alias: 'j',
type: 'string',
global: true,
- default: ''
+ default: '',
})
.option('apiKey', {
alias: 'k',
type: 'string',
global: true,
- default: ''
+ default: '',
})
.option('host', {
alias: 'h',
type: 'string',
default: 'localhost:50051',
- global: true
+ global: true,
})
.option('greetee', {
alias: 'g',
type: 'string',
default: 'world',
- global: true
+ global: true,
})
- .check((argv) => {
+ .check(argv => {
const valid = !!(argv.jwtAuthToken || argv.apiKey);
if (!valid) {
console.error('One of API_KEY or JWT_AUTH_TOKEN must be set.');
diff --git a/endpoints/getting-started-grpc/package.json b/endpoints/getting-started-grpc/package.json
index bf66b5fed5..a6bc1df370 100644
--- a/endpoints/getting-started-grpc/package.json
+++ b/endpoints/getting-started-grpc/package.json
@@ -10,12 +10,10 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=4.3.2"
+ "node": ">=8"
},
"scripts": {
"start": "node server.js",
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 1m --verbose system-test/*.test.js"
},
"dependencies": {
diff --git a/endpoints/getting-started-grpc/server.js b/endpoints/getting-started-grpc/server.js
index 29b42fb4b2..5c8e20988e 100644
--- a/endpoints/getting-started-grpc/server.js
+++ b/endpoints/getting-started-grpc/server.js
@@ -22,14 +22,14 @@ const grpc = require('grpc');
const helloProto = grpc.load(PROTO_PATH).helloworld;
// Implement the SayHello RPC method.
-function sayHello (call, callback) {
- callback(null, { message: `Hello ${call.request.name}` });
+function sayHello(call, callback) {
+ callback(null, {message: `Hello ${call.request.name}`});
}
// Start an RPC server to handle Greeter service requests
-function startServer (PORT) {
+function startServer(PORT) {
const server = new grpc.Server();
- server.addProtoService(helloProto.Greeter.service, { sayHello: sayHello });
+ server.addProtoService(helloProto.Greeter.service, {sayHello: sayHello});
server.bind(`0.0.0.0:${PORT}`, grpc.ServerCredentials.createInsecure());
server.start();
}
@@ -41,7 +41,7 @@ const argv = require('yargs')
alias: 'p',
type: 'number',
default: 50051,
- global: true
+ global: true,
})
.wrap(120)
.epilogue(`For more information, see https://cloud.google.com/endpoints/docs`)
diff --git a/endpoints/getting-started-grpc/system-test/endpoints.test.js b/endpoints/getting-started-grpc/system-test/endpoints.test.js
index f9d98fdf18..26fe92fab2 100644
--- a/endpoints/getting-started-grpc/system-test/endpoints.test.js
+++ b/endpoints/getting-started-grpc/system-test/endpoints.test.js
@@ -28,79 +28,121 @@ const serverCmd = `node server.js`;
const cwd = path.join(__dirname, `..`);
const API_KEY = process.env.ENDPOINTS_API_KEY;
-const GOOGLE_KEYFILE = JSON.parse(fs.readFileSync(process.env.GOOGLE_APPLICATION_CREDENTIALS, 'utf8'));
+const GOOGLE_KEYFILE = JSON.parse(
+ fs.readFileSync(process.env.GOOGLE_APPLICATION_CREDENTIALS, 'utf8')
+);
const SERVICE_NAME = process.env.ENDPOINTS_SERVICE_NAME;
const GCE_HOST = process.env.ENDPOINTS_GCE_HOST;
const GKE_HOST = process.env.ENDPOINTS_GKE_HOST;
-test.before((t) => {
+test.before(t => {
t.truthy(API_KEY, 'Must set ENDPOINTS_API_KEY environment variable!');
t.truthy(GCE_HOST, 'Must set ENDPOINTS_GCE_HOST environment variable!');
t.truthy(GKE_HOST, 'Must set ENDPOINTS_GKE_HOST environment variable!');
- t.truthy(SERVICE_NAME, 'Must set ENDPOINTS_SERVICE_NAME environment variable!');
- t.truthy(GOOGLE_KEYFILE, 'GOOGLE_APPLICATION_CREDENTIALS environment variable must point to a service account keyfile!');
- t.truthy(GOOGLE_KEYFILE.client_email, 'Service account keyfile must contain a "client_email" field!');
- t.truthy(GOOGLE_KEYFILE.private_key, 'Service account keyfile must contain a "private_key" field!');
+ t.truthy(
+ SERVICE_NAME,
+ 'Must set ENDPOINTS_SERVICE_NAME environment variable!'
+ );
+ t.truthy(
+ GOOGLE_KEYFILE,
+ 'GOOGLE_APPLICATION_CREDENTIALS environment variable must point to a service account keyfile!'
+ );
+ t.truthy(
+ GOOGLE_KEYFILE.client_email,
+ 'Service account keyfile must contain a "client_email" field!'
+ );
+ t.truthy(
+ GOOGLE_KEYFILE.private_key,
+ 'Service account keyfile must contain a "private_key" field!'
+ );
});
// Generate JWT based on GOOGLE_APPLICATION_CREDENTIALS and ENDPOINTS_SERVICE_NAME
-const JWT_AUTH_TOKEN = jwt.sign({
- 'aud': SERVICE_NAME,
- 'iss': GOOGLE_KEYFILE.client_email,
- 'iat': parseInt(Date.now() / 1000),
- 'exp': parseInt(Date.now() / 1000) + (20 * 60), // 20 minutes
- 'email': GOOGLE_KEYFILE.client_email,
- 'sub': GOOGLE_KEYFILE.client_email
-}, GOOGLE_KEYFILE.private_key, { algorithm: 'RS256' });
-
-const delay = (mSec) => {
- return new Promise((resolve) => setTimeout(resolve, mSec));
+const JWT_AUTH_TOKEN = jwt.sign(
+ {
+ aud: SERVICE_NAME,
+ iss: GOOGLE_KEYFILE.client_email,
+ iat: parseInt(Date.now() / 1000),
+ exp: parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
+ email: GOOGLE_KEYFILE.client_email,
+ sub: GOOGLE_KEYFILE.client_email,
+ },
+ GOOGLE_KEYFILE.private_key,
+ {algorithm: 'RS256'}
+);
+
+const delay = mSec => {
+ return new Promise(resolve => setTimeout(resolve, mSec));
};
// API key
-test(`should request a greeting from a remote Compute Engine instance using an API key`, async (t) => {
- const output = await tools.runAsync(`${clientCmd} -h ${GCE_HOST} -k ${API_KEY}`, cwd);
+test(`should request a greeting from a remote Compute Engine instance using an API key`, async t => {
+ const output = await tools.runAsync(
+ `${clientCmd} -h ${GCE_HOST} -k ${API_KEY}`,
+ cwd
+ );
t.regex(output, /Hello world/);
});
-test(`should request a greeting from a remote Container Engine cluster using an API key`, async (t) => {
- const output = await tools.runAsync(`${clientCmd} -h ${GKE_HOST} -k ${API_KEY}`, cwd);
+test(`should request a greeting from a remote Container Engine cluster using an API key`, async t => {
+ const output = await tools.runAsync(
+ `${clientCmd} -h ${GKE_HOST} -k ${API_KEY}`,
+ cwd
+ );
t.regex(output, /Hello world/);
});
-test.serial(`should request and handle a greeting locally using an API key`, async (t) => {
- const PORT = 50051;
- const server = childProcess.exec(`${serverCmd} -p ${PORT}`, { cwd: cwd });
-
- await delay(1000);
- const clientOutput = await tools.runAsync(`${clientCmd} -h localhost:${PORT} -k ${API_KEY}`, cwd);
- t.regex(clientOutput, /Hello world/);
- server.kill();
-});
+test.serial(
+ `should request and handle a greeting locally using an API key`,
+ async t => {
+ const PORT = 50051;
+ const server = childProcess.exec(`${serverCmd} -p ${PORT}`, {cwd: cwd});
+
+ await delay(1000);
+ const clientOutput = await tools.runAsync(
+ `${clientCmd} -h localhost:${PORT} -k ${API_KEY}`,
+ cwd
+ );
+ t.regex(clientOutput, /Hello world/);
+ server.kill();
+ }
+);
// Authtoken
-test(`should request a greeting from a remote Compute Engine instance using a JWT Auth Token`, async (t) => {
- const output = await tools.runAsync(`${clientCmd} -h ${GCE_HOST} -j ${JWT_AUTH_TOKEN}`, cwd);
+test(`should request a greeting from a remote Compute Engine instance using a JWT Auth Token`, async t => {
+ const output = await tools.runAsync(
+ `${clientCmd} -h ${GCE_HOST} -j ${JWT_AUTH_TOKEN}`,
+ cwd
+ );
t.regex(output, /Hello world/);
});
-test(`should request a greeting from a remote Container Engine cluster using a JWT Auth Token`, async (t) => {
- const output = await tools.runAsync(`${clientCmd} -h ${GKE_HOST} -j ${JWT_AUTH_TOKEN}`, cwd);
+test(`should request a greeting from a remote Container Engine cluster using a JWT Auth Token`, async t => {
+ const output = await tools.runAsync(
+ `${clientCmd} -h ${GKE_HOST} -j ${JWT_AUTH_TOKEN}`,
+ cwd
+ );
t.regex(output, /Hello world/);
});
-test.serial(`should request and handle a greeting locally using a JWT Auth Token`, async (t) => {
- const PORT = 50051;
- const server = childProcess.exec(`${serverCmd} -p ${PORT}`, { cwd: cwd });
-
- await delay(1000);
- const clientOutput = await tools.runAsync(`${clientCmd} -h localhost:${PORT} -j ${JWT_AUTH_TOKEN}`, cwd);
- t.regex(clientOutput, /Hello world/);
- server.kill();
-});
+test.serial(
+ `should request and handle a greeting locally using a JWT Auth Token`,
+ async t => {
+ const PORT = 50051;
+ const server = childProcess.exec(`${serverCmd} -p ${PORT}`, {cwd: cwd});
+
+ await delay(1000);
+ const clientOutput = await tools.runAsync(
+ `${clientCmd} -h localhost:${PORT} -j ${JWT_AUTH_TOKEN}`,
+ cwd
+ );
+ t.regex(clientOutput, /Hello world/);
+ server.kill();
+ }
+);
// Misc
-test('should require either an API key or a JWT Auth Token', async (t) => {
+test('should require either an API key or a JWT Auth Token', async t => {
await t.throws(
tools.runAsync(`${clientCmd} -h ${GCE_HOST}`, cwd),
/One of API_KEY or JWT_AUTH_TOKEN must be set/
diff --git a/endpoints/getting-started/app.js b/endpoints/getting-started/app.js
index 4416ec8b32..396dbd1702 100644
--- a/endpoints/getting-started/app.js
+++ b/endpoints/getting-started/app.js
@@ -28,16 +28,22 @@ app.use(bodyParser.json());
// [END setup]
app.post('/echo', (req, res) => {
- res.status(200).json({ message: req.body.message }).end();
+ res
+ .status(200)
+ .json({message: req.body.message})
+ .end();
});
-function authInfoHandler (req, res) {
- let authUser = { id: 'anonymous' };
+function authInfoHandler(req, res) {
+ let authUser = {id: 'anonymous'};
const encodedInfo = req.get('X-Endpoint-API-UserInfo');
if (encodedInfo) {
authUser = JSON.parse(Buffer.from(encodedInfo, 'base64'));
}
- res.status(200).json(authUser).end();
+ res
+ .status(200)
+ .json(authUser)
+ .end();
}
app.get('/auth/info/googlejwt', authInfoHandler);
diff --git a/endpoints/getting-started/package.json b/endpoints/getting-started/package.json
index c6aac0d30a..3bcf208a8c 100644
--- a/endpoints/getting-started/package.json
+++ b/endpoints/getting-started/package.json
@@ -10,12 +10,10 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=4.3.2"
+ "node": ">=8.0.0"
},
"scripts": {
"start": "node app.js",
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 20s --verbose test/*.test.js"
},
"dependencies": {
@@ -27,7 +25,6 @@
"@google-cloud/nodejs-repo-tools": "^2.3.0",
"ava": "0.25.0",
"proxyquire": "2.0.0",
- "semistandard": "^12.0.1",
"sinon": "4.4.2",
"supertest": "3.3.0"
}
diff --git a/endpoints/getting-started/test/app.test.js b/endpoints/getting-started/test/app.test.js
index b62c942718..a9c2590ab4 100644
--- a/endpoints/getting-started/test/app.test.js
+++ b/endpoints/getting-started/test/app.test.js
@@ -26,53 +26,56 @@ const tools = require('@google-cloud/nodejs-repo-tools');
const SAMPLE_PATH = path.join(__dirname, '../app.js');
-function getSample () {
+function getSample() {
const testApp = express();
sinon.stub(testApp, 'listen').callsArg(1);
const expressMock = sinon.stub().returns(testApp);
const app = proxyquire(SAMPLE_PATH, {
- express: expressMock
+ express: expressMock,
});
return {
app: app,
mocks: {
- express: expressMock
- }
+ express: expressMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test.cb('should echo a message', (t) => {
+test.cb('should echo a message', t => {
request(getSample().app)
.post('/echo')
- .send({ message: 'foo' })
+ .send({message: 'foo'})
.expect(200)
- .expect((response) => {
+ .expect(response => {
t.is(response.body.message, 'foo');
})
.end(t.end);
});
-test.cb('should try to parse encoded info', (t) => {
+test.cb('should try to parse encoded info', t => {
request(getSample().app)
.get('/auth/info/googlejwt')
.expect(200)
- .expect((response) => {
- t.deepEqual(response.body, { id: 'anonymous' });
+ .expect(response => {
+ t.deepEqual(response.body, {id: 'anonymous'});
})
.end(t.end);
});
-test.cb('should successfully parse encoded info', (t) => {
+test.cb('should successfully parse encoded info', t => {
request(getSample().app)
.get('/auth/info/googlejwt')
- .set('X-Endpoint-API-UserInfo', Buffer.from(JSON.stringify({ id: 'foo' })).toString('base64'))
+ .set(
+ 'X-Endpoint-API-UserInfo',
+ Buffer.from(JSON.stringify({id: 'foo'})).toString('base64')
+ )
.expect(200)
- .expect((response) => {
- t.deepEqual(response.body, { id: 'foo' });
+ .expect(response => {
+ t.deepEqual(response.body, {id: 'foo'});
})
.end(t.end);
});
diff --git a/error-reporting/package.json b/error-reporting/package.json
index 596aa76211..bacd21adc6 100644
--- a/error-reporting/package.json
+++ b/error-reporting/package.json
@@ -9,11 +9,9 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=6"
+ "node": ">=8"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"error-test": "repo-tools test app --msg \"Something broke!\" --url \"http://localhost:33332/error\" --port 33332 -- snippets.js express",
"exception-test": "repo-tools test app --code 500 --msg SyntaxError --url \"http://localhost:33333/exception\" --port 33333 -- snippets.js express",
"system-test": "ava -T 1m --verbose system-test/*.test.js",
@@ -29,7 +27,6 @@
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
"proxyquire": "2.0.0",
- "semistandard": "^12.0.1",
"sinon": "4.4.2"
},
"cloud-repo-tools": {
diff --git a/error-reporting/snippets.js b/error-reporting/snippets.js
index 3ee41a85f9..9f8bc7b300 100644
--- a/error-reporting/snippets.js
+++ b/error-reporting/snippets.js
@@ -15,7 +15,7 @@
'use strict';
-function setupImplicit () {
+function setupImplicit() {
// [START error_reporting_setup_implicit]
// Imports the Google Cloud client library
const ErrorReporting = require('@google-cloud/error-reporting');
@@ -28,7 +28,7 @@ function setupImplicit () {
// [END error_reporting_setup_implicit]
}
-function setupExplicit () {
+function setupExplicit() {
// [START error_reporting_setup_explicit]
// Imports the Google Cloud client library
const ErrorReporting = require('@google-cloud/error-reporting');
@@ -36,7 +36,7 @@ function setupExplicit () {
// Instantiates a client
const errors = ErrorReporting({
projectId: 'your-project-id',
- keyFilename: '/path/to/key.json'
+ keyFilename: '/path/to/key.json',
});
// Reports a simple error
@@ -44,7 +44,7 @@ function setupExplicit () {
// [END error_reporting_setup_explicit]
}
-function manual () {
+function manual() {
// [START error_reporting_manual]
// Imports the Google Cloud client library
const ErrorReporting = require('@google-cloud/error-reporting');
@@ -76,7 +76,7 @@ function manual () {
// [END error_reporting_manual]
}
-function express () {
+function express() {
// [START error_reporting_express]
const express = require('express');
@@ -112,17 +112,43 @@ function express () {
// The command-line program
const cli = require(`yargs`)
.demand(1)
- .command('setup-implicit', 'Reports a simple error using implicit credentials.', {}, setupImplicit)
- .command('setup-explicit', 'Reports a simple error using explicit credentials.', {}, setupExplicit)
+ .command(
+ 'setup-implicit',
+ 'Reports a simple error using implicit credentials.',
+ {},
+ setupImplicit
+ )
+ .command(
+ 'setup-explicit',
+ 'Reports a simple error using explicit credentials.',
+ {},
+ setupExplicit
+ )
.command('manual', 'Manually reports errors.', {}, manual)
- .command('express', 'Starts and Express service with integrated error reporting.', {}, express)
- .example('node $0 setup-implicit', 'Reports a simple error using implicit credentials.')
- .example('node $0 setup-explicit', 'Reports a simple error using explicit credentials.')
+ .command(
+ 'express',
+ 'Starts and Express service with integrated error reporting.',
+ {},
+ express
+ )
+ .example(
+ 'node $0 setup-implicit',
+ 'Reports a simple error using implicit credentials.'
+ )
+ .example(
+ 'node $0 setup-explicit',
+ 'Reports a simple error using explicit credentials.'
+ )
.example('node $0 manual', 'Manually report some errors.')
- .example('node $0 express', 'Starts and Express service with integrated error reporting.')
+ .example(
+ 'node $0 express',
+ 'Starts and Express service with integrated error reporting.'
+ )
.wrap(120)
.recommendCommands()
- .epilogue(`For more information, see https://cloud.google.com/error-reporting/docs`)
+ .epilogue(
+ `For more information, see https://cloud.google.com/error-reporting/docs`
+ )
.help()
.strict();
diff --git a/error-reporting/system-test/snippets.test.js b/error-reporting/system-test/snippets.test.js
index c830419619..b5b4088c6b 100644
--- a/error-reporting/system-test/snippets.test.js
+++ b/error-reporting/system-test/snippets.test.js
@@ -24,13 +24,13 @@ const cmd = `node snippets.js`;
test.before(tools.checkCredentials);
-test.serial(`should setup using implicit credentials`, async (t) => {
+test.serial(`should setup using implicit credentials`, async t => {
t.plan(0);
// There's no output, the command should just succeed
await tools.runAsync(`${cmd} setup-implicit`, cwd);
});
-test.serial(`should report errors manually`, async (t) => {
+test.serial(`should report errors manually`, async t => {
const output = await tools.runAsync(`${cmd} manual`, cwd);
t.is(output.includes('Done reporting error event!'), true);
t.is(output.includes('Done reporting Error object!'), true);
diff --git a/healthcare/.eslintrc.yml b/healthcare/.eslintrc.yml
new file mode 100644
index 0000000000..8461e81644
--- /dev/null
+++ b/healthcare/.eslintrc.yml
@@ -0,0 +1,5 @@
+---
+rules:
+ node/no-unsupported-features/es-syntax: off
+ no-empty: off
+
diff --git a/healthcare/datasets/datasets.js b/healthcare/datasets/datasets.js
index 8993f8fa05..e80dee7473 100644
--- a/healthcare/datasets/datasets.js
+++ b/healthcare/datasets/datasets.js
@@ -18,7 +18,7 @@
const {google} = require('googleapis');
// [START healthcare_create_dataset]
-function createDataset (client, projectId, cloudRegion, datasetId) {
+function createDataset(client, projectId, cloudRegion, datasetId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
// const cloudRegion = 'us-central1';
@@ -28,7 +28,8 @@ function createDataset (client, projectId, cloudRegion, datasetId) {
const request = {parent: parentName, datasetId: datasetId};
- client.projects.locations.datasets.create(request)
+ client.projects.locations.datasets
+ .create(request)
.then(() => {
console.log(`Created dataset: ${datasetId}`);
})
@@ -39,18 +40,18 @@ function createDataset (client, projectId, cloudRegion, datasetId) {
// [END healthcare_create_dataset]
// [START healthcare_delete_dataset]
-function deleteDataset (client, projectId, cloudRegion, datasetId, cb) {
+function deleteDataset(client, projectId, cloudRegion, datasetId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
- const datasetName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}`;
+ const datasetName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}`;
const request = {name: datasetName};
- client.projects.locations.datasets.delete(request)
+ client.projects.locations.datasets
+ .delete(request)
.then(() => {
console.log(`Deleted dataset: ${datasetId}`);
})
@@ -61,18 +62,18 @@ function deleteDataset (client, projectId, cloudRegion, datasetId, cb) {
// [END healthcare_delete_dataset]
// [START healthcare_get_dataset]
-function getDataset (client, projectId, cloudRegion, datasetId) {
+function getDataset(client, projectId, cloudRegion, datasetId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
- const datasetName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}`;
+ const datasetName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}`;
const request = {name: datasetName};
- client.projects.locations.datasets.get(request)
+ client.projects.locations.datasets
+ .get(request)
.then(results => {
console.log('Got dataset:\n', results.data);
})
@@ -83,7 +84,7 @@ function getDataset (client, projectId, cloudRegion, datasetId) {
// [END healthcare_get_dataset]
// [START healthcare_list_datasets]
-function listDatasets (client, projectId, cloudRegion) {
+function listDatasets(client, projectId, cloudRegion) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
// const cloudRegion = 'us-central1';
@@ -92,7 +93,8 @@ function listDatasets (client, projectId, cloudRegion) {
const request = {parent: parentName};
- client.projects.locations.datasets.list(request)
+ client.projects.locations.datasets
+ .list(request)
.then(results => {
console.log('Datasets:', results.data);
})
@@ -103,27 +105,27 @@ function listDatasets (client, projectId, cloudRegion) {
// [END healthcare_list_datasets]
// [START healthcare_patch_dataset]
-function patchDataset (client, projectId, cloudRegion, datasetId, timeZone) {
+function patchDataset(client, projectId, cloudRegion, datasetId, timeZone) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
// const timeZone = 'GMT'
- const datasetName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}`;
+ const datasetName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}`;
const request = {
name: datasetName,
updateMask: 'timeZone',
- resource: {timeZone: timeZone}
+ resource: {timeZone: timeZone},
};
- client.projects.locations.datasets.patch(request)
+ client.projects.locations.datasets
+ .patch(request)
.then(results => {
console.log(
- `Dataset ${datasetId} patched with time zone ${
- results.data.timeZone}`);
+ `Dataset ${datasetId} patched with time zone ${results.data.timeZone}`
+ );
})
.catch(err => {
console.error(err);
@@ -132,9 +134,14 @@ function patchDataset (client, projectId, cloudRegion, datasetId, timeZone) {
// [END healthcare_patch_dataset]
// [START healthcare_deidentify_dataset]
-function deidentifyDataset (
- client, projectId, cloudRegion, sourceDatasetId, destinationDatasetId,
- whitelistTags) {
+function deidentifyDataset(
+ client,
+ projectId,
+ cloudRegion,
+ sourceDatasetId,
+ destinationDatasetId,
+ whitelistTags
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
// const cloudRegion = 'us-central1';
@@ -142,19 +149,18 @@ function deidentifyDataset (
// const sourceDatasetId = 'my-dataset';
// const destinationDatasetId = 'my-destination-dataset';
// const whitelistTags = 'PatientID';
- const sourceDatasetName = `projects/${projectId}/locations/${
- cloudRegion}/datasets/${sourceDatasetId}`;
- const destinationDatasetName = `projects/${projectId}/locations/${
- cloudRegion}/datasets/${destinationDatasetId}`;
+ const sourceDatasetName = `projects/${projectId}/locations/${cloudRegion}/datasets/${sourceDatasetId}`;
+ const destinationDatasetName = `projects/${projectId}/locations/${cloudRegion}/datasets/${destinationDatasetId}`;
const request = {
sourceDataset: sourceDatasetName,
destinationDataset: destinationDatasetName,
- resource: {config: {dicom: {whitelistTags: whitelistTags}}}
+ resource: {config: {dicom: {whitelistTags: whitelistTags}}},
};
- client.projects.locations.datasets.deidentify(request)
- .then(results => {
+ client.projects.locations.datasets
+ .deidentify(request)
+ .then(() => {
console.log(`De-identified data written from dataset
${sourceDatasetId} to dataset ${destinationDatasetId}`);
})
@@ -167,23 +173,23 @@ function deidentifyDataset (
// [START healthcare_get_client]
// Returns an authorized API client by discovering the Healthcare API with
// the provided API key.
-function getClient (apiKey, serviceAccountJson, cb) {
+function getClient(apiKey, serviceAccountJson, cb) {
const API_VERSION = 'v1alpha';
const DISCOVERY_API = 'https://healthcare.googleapis.com/$discovery/rest';
google.auth
.getClient({scopes: ['https://www.googleapis.com/auth/cloud-platform']})
.then(authClient => {
- const discoveryUrl = `${DISCOVERY_API}?labels=CHC_ALPHA&version=${
- API_VERSION}&key=${apiKey}`;
+ const discoveryUrl = `${DISCOVERY_API}?labels=CHC_ALPHA&version=${API_VERSION}&key=${apiKey}`;
google.options({auth: authClient});
- google.discoverAPI(discoveryUrl)
- .then((client) => {
+ google
+ .discoverAPI(discoveryUrl)
+ .then(client => {
cb(client);
})
- .catch((err) => {
+ .catch(err => {
console.log(`Error during API discovery: ${err}`);
});
});
@@ -196,101 +202,121 @@ require(`yargs`) // eslint-disable-line
apiKey: {
alias: 'a',
default: process.env.API_KEY,
- description: 'The API key used for discovering the API. ' +
- 'Defaults to the value of the API_KEY environment variable.',
+ description:
+ 'The API key used for discovering the API. ' +
+ 'Defaults to the value of the API_KEY environment variable.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
cloudRegion: {
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
description:
- 'The Project ID to use. Defaults to the value of the ' +
- 'GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ 'The Project ID to use. Defaults to the value of the ' +
+ 'GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
- `createDataset `, `Creates a new health dataset.`, {},
- (opts) => {
- const cb = function (client) {
- createDataset(
- client, opts.projectId, opts.cloudRegion, opts.datasetId);
+ `createDataset `,
+ `Creates a new health dataset.`,
+ {},
+ opts => {
+ const cb = function(client) {
+ createDataset(client, opts.projectId, opts.cloudRegion, opts.datasetId);
};
getClient(opts.apiKey, opts.serviceAccount, cb);
- })
+ }
+ )
.command(
`deleteDataset `,
`Deletes the specified health dataset and all data contained
in the dataset.`,
{},
- (opts) => {
- const cb = function (client) {
- deleteDataset(
- client, opts.projectId, opts.cloudRegion, opts.datasetId);
+ opts => {
+ const cb = function(client) {
+ deleteDataset(client, opts.projectId, opts.cloudRegion, opts.datasetId);
};
getClient(opts.apiKey, opts.serviceAccount, cb);
- })
+ }
+ )
.command(
`getDataset `,
- `Gets any metadata associated with a dataset.`, {},
- (opts) => {
- const cb = function (client) {
- getDataset(
- client, opts.projectId, opts.cloudRegion, opts.datasetId);
+ `Gets any metadata associated with a dataset.`,
+ {},
+ opts => {
+ const cb = function(client) {
+ getDataset(client, opts.projectId, opts.cloudRegion, opts.datasetId);
};
getClient(opts.apiKey, opts.serviceAccount, cb);
- })
+ }
+ )
.command(
- `listDatasets`, `Lists the datasets in the given GCP project.`, {},
- (opts) => {
- const cb = function (client) {
+ `listDatasets`,
+ `Lists the datasets in the given GCP project.`,
+ {},
+ opts => {
+ const cb = function(client) {
listDatasets(client, opts.projectId, opts.cloudRegion);
};
getClient(opts.apiKey, opts.serviceAccount, cb);
- })
+ }
+ )
.command(
- `patchDataset `, `Updates dataset metadata.`, {},
- (opts) => {
- const cb = function (client) {
+ `patchDataset `,
+ `Updates dataset metadata.`,
+ {},
+ opts => {
+ const cb = function(client) {
patchDataset(
- client, opts.projectId, opts.cloudRegion, opts.datasetId,
- opts.timeZone);
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.timeZone
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
- })
+ }
+ )
.command(
`deidentifyDataset
`,
`Creates a new dataset containing de-identified data from the
source dataset.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
deidentifyDataset(
- client, opts.projectId, opts.cloudRegion, opts.sourceDatasetId,
- opts.destinationDatasetId, opts.whitelistTags);
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.sourceDatasetId,
+ opts.destinationDatasetId,
+ opts.whitelistTags
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
- })
+ }
+ )
.wrap(120)
.recommendCommands()
.epilogue(
- `For more information, see https://cloud.google.com/healthcare/docs`)
+ `For more information, see https://cloud.google.com/healthcare/docs`
+ )
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/healthcare/datasets/system-test/datasets.test.js b/healthcare/datasets/system-test/datasets.test.js
index 705e08feca..568d1421be 100644
--- a/healthcare/datasets/system-test/datasets.test.js
+++ b/healthcare/datasets/system-test/datasets.test.js
@@ -29,20 +29,17 @@ const whitelistTags = 'PatientID';
test.before(tools.checkCredentials);
test.after.always(async () => {
try {
- await tools.runAsync(
- `${cmd} deleteDataset ${destinationDatasetId}`, cwd);
- } catch (err) { } // Ignore error
+ await tools.runAsync(`${cmd} deleteDataset ${destinationDatasetId}`, cwd);
+ } catch (err) {} // Ignore error
});
test.serial(`should create a dataset`, async t => {
- const output = await tools.runAsync(
- `${cmd} createDataset ${datasetId}`, cwd);
+ const output = await tools.runAsync(`${cmd} createDataset ${datasetId}`, cwd);
t.is(output, `Created dataset: ${datasetId}`);
});
test.serial(`should get a dataset`, async t => {
- const output = await tools.runAsync(
- `${cmd} getDataset ${datasetId}`, cwd);
+ const output = await tools.runAsync(`${cmd} getDataset ${datasetId}`, cwd);
t.regex(output, /name/);
t.regex(output, /timeZone/);
});
@@ -50,25 +47,33 @@ test.serial(`should get a dataset`, async t => {
test.serial(`should patch a dataset`, async t => {
const patchTimeZone = 'GMT';
const output = await tools.runAsync(
- `${cmd} patchDataset ${datasetId} ${patchTimeZone}`, cwd);
+ `${cmd} patchDataset ${datasetId} ${patchTimeZone}`,
+ cwd
+ );
t.is(output, `Dataset ${datasetId} patched with time zone ${patchTimeZone}`);
});
test.serial(`should list datasets`, async t => {
- const output = await tools.runAsync(
- `${cmd} listDatasets`, cwd);
+ const output = await tools.runAsync(`${cmd} listDatasets`, cwd);
t.regex(output, /datasets/);
});
-test.serial(`should de-identify data in a dataset and write to a new dataset`, async t => {
- const output = await tools.runAsync(
- `${cmd} deidentifyDataset ${datasetId} ${destinationDatasetId} ${whitelistTags}`, cwd);
- t.is(output, `De-identified data written from dataset
- ${datasetId} to dataset ${destinationDatasetId}`);
-});
+test.serial(
+ `should de-identify data in a dataset and write to a new dataset`,
+ async t => {
+ const output = await tools.runAsync(
+ `${cmd} deidentifyDataset ${datasetId} ${destinationDatasetId} ${whitelistTags}`,
+ cwd
+ );
+ t.is(
+ output,
+ `De-identified data written from dataset
+ ${datasetId} to dataset ${destinationDatasetId}`
+ );
+ }
+);
test.serial(`should delete a dataset`, async t => {
- const output = await tools.runAsync(
- `${cmd} deleteDataset ${datasetId}`, cwd);
+ const output = await tools.runAsync(`${cmd} deleteDataset ${datasetId}`, cwd);
t.is(output, `Deleted dataset: ${datasetId}`);
});
diff --git a/healthcare/dicom/dicom_stores.js b/healthcare/dicom/dicom_stores.js
index de6daba881..9d9b036817 100644
--- a/healthcare/dicom/dicom_stores.js
+++ b/healthcare/dicom/dicom_stores.js
@@ -18,7 +18,13 @@
const {google} = require('googleapis');
// [START healthcare_create_dicom_store]
-function createDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreId) {
+function createDicomStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -29,7 +35,8 @@ function createDicomStore (client, projectId, cloudRegion, datasetId, dicomStore
const request = {parent: parentName, dicomStoreId: dicomStoreId};
- client.projects.locations.datasets.dicomStores.create(request)
+ client.projects.locations.datasets.dicomStores
+ .create(request)
.then(() => {
console.log(`Created DICOM store: ${dicomStoreId}`);
})
@@ -40,19 +47,25 @@ function createDicomStore (client, projectId, cloudRegion, datasetId, dicomStore
// [END healthcare_create_dicom_store]
// [START healthcare_delete_dicom_store]
-function deleteDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreId) {
+function deleteDicomStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
// const dicomStoreId = 'my-dicom-store';
- const dicomStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
+ const dicomStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
const request = {name: dicomStoreName};
- client.projects.locations.datasets.dicomStores.delete(request)
+ client.projects.locations.datasets.dicomStores
+ .delete(request)
.then(() => {
console.log(`Deleted DICOM store: ${dicomStoreId}`);
})
@@ -63,19 +76,25 @@ function deleteDicomStore (client, projectId, cloudRegion, datasetId, dicomStore
// [END healthcare_delete_dicom_store]
// [START healthcare_get_dicom_store]
-function getDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreId) {
+function getDicomStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
// const dicomStoreId = 'my-dicom-store';
- const dicomStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
+ const dicomStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
const request = {name: dicomStoreName};
- client.projects.locations.datasets.dicomStores.get(request)
+ client.projects.locations.datasets.dicomStores
+ .get(request)
.then(results => {
console.log('Got DICOM store:\n', results['data']);
})
@@ -86,7 +105,7 @@ function getDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreId)
// [END healthcare_get_dicom_store]
// [START healthcare_list_dicom_stores]
-function listDicomStores (client, projectId, cloudRegion, datasetId) {
+function listDicomStores(client, projectId, cloudRegion, datasetId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -96,7 +115,8 @@ function listDicomStores (client, projectId, cloudRegion, datasetId) {
const request = {parent: parentName};
- client.projects.locations.datasets.dicomStores.list(request)
+ client.projects.locations.datasets.dicomStores
+ .list(request)
.then(results => {
console.log('DICOM stores:\n', results['data']['dicomStores']);
})
@@ -107,7 +127,14 @@ function listDicomStores (client, projectId, cloudRegion, datasetId) {
// [END healthcare_list_dicom_stores]
// [START healthcare_patch_dicom_store]
-function patchDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreId, pubsubTopic) {
+function patchDicomStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId,
+ pubsubTopic
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -115,19 +142,25 @@ function patchDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreI
// const datasetId = 'my-dataset';
// const dicomStoreId = 'my-dicom-store';
// const pubsubTopic = 'my-topic'
- const dicomStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
+ const dicomStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
const request = {
name: dicomStoreName,
updateMask: 'notificationConfig',
- resource: { 'notificationConfig': { pubsubTopic: `projects/${projectId}/locations/${cloudRegion}/topics/${pubsubTopic}` } }
+ resource: {
+ notificationConfig: {
+ pubsubTopic: `projects/${projectId}/locations/${cloudRegion}/topics/${pubsubTopic}`,
+ },
+ },
};
- client.projects.locations.datasets.dicomStores.patch(request)
+ client.projects.locations.datasets.dicomStores
+ .patch(request)
.then(results => {
console.log(
- 'Patched DICOM store with Cloud Pub/Sub topic', results['data']['notificationConfig']['pubsubTopic']);
+ 'Patched DICOM store with Cloud Pub/Sub topic',
+ results['data']['notificationConfig']['pubsubTopic']
+ );
})
.catch(err => {
console.error(err);
@@ -136,7 +169,14 @@ function patchDicomStore (client, projectId, cloudRegion, datasetId, dicomStoreI
// [END healthcare_patch_dicom_store]
// [START healthcare_import_dicom_object]
-function importDicomObject (client, projectId, cloudRegion, datasetId, dicomStoreId, contentUri) {
+function importDicomObject(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId,
+ contentUri
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -144,21 +184,22 @@ function importDicomObject (client, projectId, cloudRegion, datasetId, dicomStor
// const datasetId = 'my-dataset';
// const dicomStoreId = 'my-dicom-store';
// const contentUri = 'my-bucket'
- const dicomStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
+ const dicomStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
const request = {
name: dicomStoreName,
resource: {
inputConfig: {
gcsSource: {
- contentUri: `gs://${contentUri}`
- }
- }
- }};
+ contentUri: `gs://${contentUri}`,
+ },
+ },
+ },
+ };
- client.projects.locations.datasets.dicomStores.import(request)
- .then(results => {
+ client.projects.locations.datasets.dicomStores
+ .import(request)
+ .then(() => {
console.log(`Imported DICOM objects from bucket ${contentUri}`);
})
.catch(err => {
@@ -168,7 +209,14 @@ function importDicomObject (client, projectId, cloudRegion, datasetId, dicomStor
// [END healthcare_import_dicom_object]
// [START healthcare_export_dicom_instance_gcs]
-function exportDicomInstanceGcs (client, projectId, cloudRegion, datasetId, dicomStoreId, uriPrefix) {
+function exportDicomInstanceGcs(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId,
+ uriPrefix
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -176,21 +224,22 @@ function exportDicomInstanceGcs (client, projectId, cloudRegion, datasetId, dico
// const datasetId = 'my-dataset';
// const dicomStoreId = 'my-dicom-store';
// const uriPrefix = 'my-bucket'
- const dicomStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
+ const dicomStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
const request = {
name: dicomStoreName,
resource: {
outputConfig: {
gcsDestination: {
- uriPrefix: `gs://${uriPrefix}`
- }
- }
- }};
+ uriPrefix: `gs://${uriPrefix}`,
+ },
+ },
+ },
+ };
- client.projects.locations.datasets.dicomStores.export(request)
- .then(results => {
+ client.projects.locations.datasets.dicomStores
+ .export(request)
+ .then(() => {
console.log(`Exported DICOM instances to bucket ${uriPrefix}`);
})
.catch(err => {
@@ -202,23 +251,23 @@ function exportDicomInstanceGcs (client, projectId, cloudRegion, datasetId, dico
// Returns an authorized API client by discovering the Healthcare API with
// the provided API key.
// [START healthcare_get_client]
-function getClient (apiKey, serviceAccountJson, cb) {
+function getClient(apiKey, serviceAccountJson, cb) {
const API_VERSION = 'v1alpha';
const DISCOVERY_API = 'https://healthcare.googleapis.com/$discovery/rest';
google.auth
.getClient({scopes: ['https://www.googleapis.com/auth/cloud-platform']})
.then(authClient => {
- const discoveryUrl = `${DISCOVERY_API}?labels=CHC_ALPHA&version=${
- API_VERSION}&key=${apiKey}`;
+ const discoveryUrl = `${DISCOVERY_API}?labels=CHC_ALPHA&version=${API_VERSION}&key=${apiKey}`;
google.options({auth: authClient});
- google.discoverAPI(discoveryUrl)
- .then((client) => {
+ google
+ .discoverAPI(discoveryUrl)
+ .then(client => {
cb(client);
})
- .catch((err) => {
+ .catch(err => {
console.error(err);
});
});
@@ -233,36 +282,43 @@ require(`yargs`) // eslint-disable-line
default: process.env.API_KEY,
description: 'The API key used for discovering the API.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
cloudRegion: {
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
`createDicomStore `,
`Creates a new DICOM store within the parent dataset.`,
{},
- (opts) => {
- const cb = function (client) {
- createDicomStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId);
+ opts => {
+ const cb = function(client) {
+ createDicomStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -271,9 +327,15 @@ require(`yargs`) // eslint-disable-line
`deleteDicomStore `,
`Deletes the DICOM store and removes all resources that are contained within it.`,
{},
- (opts) => {
- const cb = function (client) {
- deleteDicomStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId);
+ opts => {
+ const cb = function(client) {
+ deleteDicomStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -282,9 +344,15 @@ require(`yargs`) // eslint-disable-line
`getDicomStore `,
`Gets the specified DICOM store or returns NOT_FOUND if it doesn't exist.`,
{},
- (opts) => {
- const cb = function (client) {
- getDicomStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId);
+ opts => {
+ const cb = function(client) {
+ getDicomStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -293,9 +361,14 @@ require(`yargs`) // eslint-disable-line
`listDicomStores `,
`Lists the DICOM stores in the given dataset.`,
{},
- (opts) => {
- const cb = function (client) {
- listDicomStores(client, opts.projectId, opts.cloudRegion, opts.datasetId);
+ opts => {
+ const cb = function(client) {
+ listDicomStores(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -304,9 +377,16 @@ require(`yargs`) // eslint-disable-line
`patchDicomStore `,
`Updates the DICOM store.`,
{},
- (opts) => {
- const cb = function (client) {
- patchDicomStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId, opts.pubsubTopic);
+ opts => {
+ const cb = function(client) {
+ patchDicomStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId,
+ opts.pubsubTopic
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -315,9 +395,16 @@ require(`yargs`) // eslint-disable-line
`importDicomObject `,
`Imports data into the DICOM store by copying it from the specified source.`,
{},
- (opts) => {
- const cb = function (client) {
- importDicomObject(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId, opts.contentUri);
+ opts => {
+ const cb = function(client) {
+ importDicomObject(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId,
+ opts.contentUri
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -326,16 +413,24 @@ require(`yargs`) // eslint-disable-line
`exportDicomInstanceGcs `,
`Exports data to a Cloud Storage bucket by copying it from the DICOM store.`,
{},
- (opts) => {
- const cb = function (client) {
- exportDicomInstanceGcs(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId, opts.uriPrefix);
+ opts => {
+ const cb = function(client) {
+ exportDicomInstanceGcs(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId,
+ opts.uriPrefix
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
)
.wrap(120)
.recommendCommands()
- .epilogue(`For more information, see https://cloud.google.com/healthcare/docs`)
+ .epilogue(
+ `For more information, see https://cloud.google.com/healthcare/docs`
+ )
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/healthcare/dicom/dicomweb.js b/healthcare/dicom/dicomweb.js
index 93cd4f471e..092bf86188 100644
--- a/healthcare/dicom/dicomweb.js
+++ b/healthcare/dicom/dicomweb.js
@@ -13,19 +13,19 @@
* limitations under the License.
*/
-const { GoogleToken } = require('gtoken');
+const {GoogleToken} = require('gtoken');
const request = require('request-promise');
const fs = require('fs');
const BASE_URL = 'https://healthcare.googleapis.com/v1alpha';
-function getToken (serviceAccountJson, cb) {
+function getToken(serviceAccountJson, cb) {
const gtoken = new GoogleToken({
keyFile: `${serviceAccountJson}`,
- scope: ['https://www.googleapis.com/auth/cloud-platform'] // or space-delimited string of scopes
+ scope: ['https://www.googleapis.com/auth/cloud-platform'], // or space-delimited string of scopes
});
- gtoken.getToken(function (err, token) {
+ gtoken.getToken(function(err, token) {
if (err) {
console.log('ERROR: ', err);
return;
@@ -35,7 +35,15 @@ function getToken (serviceAccountJson, cb) {
}
// [START healthcare_dicomweb_store_instance]
-function dicomWebStoreInstance (token, projectId, cloudRegion, datasetId, dicomStoreId, dcmFile, boundary) {
+function dicomWebStoreInstance(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId,
+ dcmFile,
+ boundary
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -53,11 +61,11 @@ function dicomWebStoreInstance (token, projectId, cloudRegion, datasetId, dicomS
const options = {
url: dicomWebPath,
headers: {
- 'authorization': `Bearer ${token}`,
- 'Content-Type': `multipart/related; type=application/dicom; boundary=${boundary}`
+ authorization: `Bearer ${token}`,
+ 'Content-Type': `multipart/related; type=application/dicom; boundary=${boundary}`,
},
body: binaryData,
- method: 'POST'
+ method: 'POST',
};
request(options)
@@ -72,7 +80,13 @@ function dicomWebStoreInstance (token, projectId, cloudRegion, datasetId, dicomS
// [END healthcare_dicomweb_store_instance]
// [START healthcare_dicomweb_search_instances]
-function dicomWebSearchInstances (token, projectId, cloudRegion, datasetId, dicomStoreId) {
+function dicomWebSearchInstances(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -86,10 +100,10 @@ function dicomWebSearchInstances (token, projectId, cloudRegion, datasetId, dico
const options = {
url: dicomWebPath,
headers: {
- 'authorization': `Bearer ${token}`,
- 'Content-Type': 'application/dicom+json; charset=utf-8'
+ authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/dicom+json; charset=utf-8',
},
- method: 'GET'
+ method: 'GET',
};
request(options)
@@ -104,7 +118,14 @@ function dicomWebSearchInstances (token, projectId, cloudRegion, datasetId, dico
// [END healthcare_dicomweb_search_instances]
// [START healthcare_dicomweb_retrieve_study]
-function dicomWebRetrieveStudy (token, projectId, cloudRegion, datasetId, dicomStoreId, studyUid) {
+function dicomWebRetrieveStudy(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId,
+ studyUid
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -119,14 +140,14 @@ function dicomWebRetrieveStudy (token, projectId, cloudRegion, datasetId, dicomS
const options = {
url: dicomWebPath,
headers: {
- 'authorization': `Bearer ${token}`,
- 'Content-Type': 'application/dicom+json; charset=utf-8'
+ authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/dicom+json; charset=utf-8',
},
- method: 'GET'
+ method: 'GET',
};
request(options)
- .then(results => {
+ .then(() => {
console.log(`Retrieved study with UID: ${studyUid}`);
})
.catch(err => {
@@ -136,7 +157,14 @@ function dicomWebRetrieveStudy (token, projectId, cloudRegion, datasetId, dicomS
// [END healthcare_dicomweb_retrieve_study]
// [START healthcare_dicomweb_delete_study]
-function dicomWebDeleteStudy (token, projectId, cloudRegion, datasetId, dicomStoreId, studyUid) {
+function dicomWebDeleteStudy(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ dicomStoreId,
+ studyUid
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -151,14 +179,14 @@ function dicomWebDeleteStudy (token, projectId, cloudRegion, datasetId, dicomSto
const options = {
url: dicomWebPath,
headers: {
- 'authorization': `Bearer ${token}`,
- 'Content-Type': 'application/dicom+json; charset=utf-8'
+ authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/dicom+json; charset=utf-8',
},
- method: 'DELETE'
+ method: 'DELETE',
};
request(options)
- .then(results => {
+ .then(() => {
console.log('Deleted study.');
})
.catch(err => {
@@ -174,30 +202,39 @@ require(`yargs`) // eslint-disable-line
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
`dicomWebStoreInstance `,
`Handles the POST requests specified in the DICOMweb standard.`,
{},
- (opts) => {
- const cb = function (token) {
- dicomWebStoreInstance(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId, opts.dcmFile, opts.boundary);
+ opts => {
+ const cb = function(token) {
+ dicomWebStoreInstance(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId,
+ opts.dcmFile,
+ opts.boundary
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -206,9 +243,15 @@ require(`yargs`) // eslint-disable-line
`dicomWebSearchInstances `,
`Handles the GET requests specified in the DICOMweb standard.`,
{},
- (opts) => {
- const cb = function (token) {
- dicomWebSearchInstances(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId);
+ opts => {
+ const cb = function(token) {
+ dicomWebSearchInstances(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -217,9 +260,16 @@ require(`yargs`) // eslint-disable-line
`dicomWebRetrieveStudy `,
`Handles the GET requests specified in the DICOMweb standard.`,
{},
- (opts) => {
- const cb = function (token) {
- dicomWebRetrieveStudy(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId, opts.studyUid);
+ opts => {
+ const cb = function(token) {
+ dicomWebRetrieveStudy(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId,
+ opts.studyUid
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -228,16 +278,24 @@ require(`yargs`) // eslint-disable-line
`dicomWebDeleteStudy `,
`Handles DELETE requests.`,
{},
- (opts) => {
- const cb = function (token) {
- dicomWebDeleteStudy(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.dicomStoreId, opts.studyUid);
+ opts => {
+ const cb = function(token) {
+ dicomWebDeleteStudy(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.dicomStoreId,
+ opts.studyUid
+ );
};
getToken(opts.serviceAccount, cb);
}
)
.wrap(120)
.recommendCommands()
- .epilogue(`For more information, see https://cloud.google.com/healthcare/docs`)
+ .epilogue(
+ `For more information, see https://cloud.google.com/healthcare/docs`
+ )
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/healthcare/dicom/system-test/dicom_stores.test.js b/healthcare/dicom/system-test/dicom_stores.test.js
index 4e4f4bafdb..73c465ec4f 100644
--- a/healthcare/dicom/system-test/dicom_stores.test.js
+++ b/healthcare/dicom/system-test/dicom_stores.test.js
@@ -25,8 +25,14 @@ const cmd = `node dicom_stores.js`;
const cwdDatasets = path.join(__dirname, `../../datasets`);
const cwd = path.join(__dirname, `..`);
const datasetId = `nodejs-docs-samples-test-${uuid.v4()}`.replace(/-/gi, '_');
-const dicomStoreId = `nodejs-docs-samples-test-dicom-store${uuid.v4()}`.replace(/-/gi, '_');
-const pubsubTopic = `nodejs-docs-samples-test-pubsub${uuid.v4()}`.replace(/-/gi, '_');
+const dicomStoreId = `nodejs-docs-samples-test-dicom-store${uuid.v4()}`.replace(
+ /-/gi,
+ '_'
+);
+const pubsubTopic = `nodejs-docs-samples-test-pubsub${uuid.v4()}`.replace(
+ /-/gi,
+ '_'
+);
const bucketName = process.env.GCLOUD_STORAGE_BUCKET;
const dcmFileName = `IM-0002-0001-JPEG-BASELINE.dcm`;
@@ -34,56 +40,74 @@ const contentUri = bucketName + '/' + dcmFileName;
test.before(tools.checkCredentials);
test.before(async () => {
- return tools.runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
- .then((results) => {
+ return tools
+ .runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
+ .then(results => {
console.log(results);
return results;
});
});
test.after.always(async () => {
try {
- await tools.runAsync(`${cmdDataset} deleteDataset ${datasetId}`, cwdDatasets);
- } catch (err) { } // Ignore error
+ await tools.runAsync(
+ `${cmdDataset} deleteDataset ${datasetId}`,
+ cwdDatasets
+ );
+ } catch (err) {} // Ignore error
});
test.serial(`should create a DICOM store`, async t => {
const output = await tools.runAsync(
- `${cmd} createDicomStore ${datasetId} ${dicomStoreId}`, cwd);
+ `${cmd} createDicomStore ${datasetId} ${dicomStoreId}`,
+ cwd
+ );
t.regex(output, /Created DICOM store/);
});
test.serial(`should get a DICOM store`, async t => {
const output = await tools.runAsync(
- `${cmd} getDicomStore ${datasetId} ${dicomStoreId}`, cwd);
+ `${cmd} getDicomStore ${datasetId} ${dicomStoreId}`,
+ cwd
+ );
t.regex(output, /Got DICOM store/);
});
test.serial(`should patch a DICOM store`, async t => {
const output = await tools.runAsync(
- `${cmd} patchDicomStore ${datasetId} ${dicomStoreId} ${pubsubTopic}`, cwd);
+ `${cmd} patchDicomStore ${datasetId} ${dicomStoreId} ${pubsubTopic}`,
+ cwd
+ );
t.regex(output, /Patched DICOM store with Cloud Pub\/Sub topic/);
});
test.serial(`should list DICOM stores`, async t => {
const output = await tools.runAsync(
- `${cmd} listDicomStores ${datasetId}`, cwd);
+ `${cmd} listDicomStores ${datasetId}`,
+ cwd
+ );
t.regex(output, /DICOM stores/);
});
test.serial(`should export a DICOM instance`, async t => {
const output = await tools.runAsync(
- `${cmd} exportDicomInstanceGcs ${datasetId} ${dicomStoreId} ${bucketName}`, cwd);
+ `${cmd} exportDicomInstanceGcs ${datasetId} ${dicomStoreId} ${bucketName}`,
+ cwd
+ );
t.regex(output, /Exported DICOM instances to bucket/);
});
test.serial(`should import a DICOM object from GCS`, async t => {
const output = await tools.runAsync(
- `${cmd} importDicomObject ${datasetId} ${dicomStoreId} ${contentUri}`, cwd);
+ `${cmd} importDicomObject ${datasetId} ${dicomStoreId} ${contentUri}`,
+ cwd
+ );
t.regex(output, /Imported DICOM objects from bucket/);
});
test(`should delete a DICOM store`, async t => {
const output = await tools.runAsync(
- `${cmd} deleteDicomStore ${datasetId} ${dicomStoreId}`, cwd);
+ `${cmd} deleteDicomStore ${datasetId} ${dicomStoreId}`,
+ cwd
+ );
t.regex(output, /Deleted DICOM store/);
});
diff --git a/healthcare/dicom/system-test/dicomweb.test.js b/healthcare/dicom/system-test/dicomweb.test.js
index bd0f03187a..a12e2528f0 100644
--- a/healthcare/dicom/system-test/dicomweb.test.js
+++ b/healthcare/dicom/system-test/dicomweb.test.js
@@ -26,7 +26,10 @@ const cmd = `node dicomweb.js`;
const cwdDatasets = path.join(__dirname, `../../datasets`);
const cwd = path.join(__dirname, `..`);
const datasetId = `nodejs-docs-samples-test-${uuid.v4()}`.replace(/-/gi, '_');
-const dicomStoreId = `nodejs-docs-samples-test-dicom-store${uuid.v4()}`.replace(/-/gi, '_');
+const dicomStoreId = `nodejs-docs-samples-test-dicom-store${uuid.v4()}`.replace(
+ /-/gi,
+ '_'
+);
const dcmFile = `resources/IM-0002-0001-JPEG-BASELINE-edited.dcm`;
const boundary = `DICOMwebBoundary`;
@@ -36,42 +39,60 @@ const studyUid = `1.2.840.113619.2.176.3596.3364818.7819.1259708454.105`;
test.before(tools.checkCredentials);
test.before(async () => {
- return tools.runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
- .then((results) => {
+ return tools
+ .runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
+ .then(results => {
console.log(results);
return results;
});
});
test.after.always(async () => {
try {
- await tools.runAsync(`${cmdDataset} deleteDataset ${datasetId}`, cwdDatasets);
- } catch (err) { } // Ignore error
+ await tools.runAsync(
+ `${cmdDataset} deleteDataset ${datasetId}`,
+ cwdDatasets
+ );
+ } catch (err) {} // Ignore error
});
test.serial(`should store a DICOM instance`, async t => {
- await tools.runAsync(`${cmdDicomStore} createDicomStore ${datasetId} ${dicomStoreId}`, cwd);
+ await tools.runAsync(
+ `${cmdDicomStore} createDicomStore ${datasetId} ${dicomStoreId}`,
+ cwd
+ );
const output = await tools.runAsync(
- `${cmd} dicomWebStoreInstance ${datasetId} ${dicomStoreId} ${dcmFile} ${boundary}`, cwd);
+ `${cmd} dicomWebStoreInstance ${datasetId} ${dicomStoreId} ${dcmFile} ${boundary}`,
+ cwd
+ );
t.regex(output, /Stored instance/);
});
test.serial(`should search DICOM instances`, async t => {
const output = await tools.runAsync(
- `${cmd} dicomWebSearchInstances ${datasetId} ${dicomStoreId}`, cwd);
+ `${cmd} dicomWebSearchInstances ${datasetId} ${dicomStoreId}`,
+ cwd
+ );
t.regex(output, /Instances/);
});
test.serial(`should retrieve a DICOM study`, async t => {
const output = await tools.runAsync(
- `${cmd} dicomWebRetrieveStudy ${datasetId} ${dicomStoreId} ${studyUid}`, cwd);
+ `${cmd} dicomWebRetrieveStudy ${datasetId} ${dicomStoreId} ${studyUid}`,
+ cwd
+ );
t.regex(output, /Retrieved study/);
});
test.serial(`should delete a DICOM study`, async t => {
const output = await tools.runAsync(
- `${cmd} dicomWebDeleteStudy ${datasetId} ${dicomStoreId} ${studyUid}`, cwd);
+ `${cmd} dicomWebDeleteStudy ${datasetId} ${dicomStoreId} ${studyUid}`,
+ cwd
+ );
t.regex(output, /Deleted study/);
// Clean up
- await tools.runAsync(`${cmdDicomStore} deleteDicomStore ${datasetId} ${dicomStoreId}`, cwd);
+ await tools.runAsync(
+ `${cmdDicomStore} deleteDicomStore ${datasetId} ${dicomStoreId}`,
+ cwd
+ );
});
diff --git a/healthcare/fhir/fhir_resources.js b/healthcare/fhir/fhir_resources.js
index 2013ba3725..4a9c62b878 100644
--- a/healthcare/fhir/fhir_resources.js
+++ b/healthcare/fhir/fhir_resources.js
@@ -13,19 +13,19 @@
* limitations under the License.
*/
-const { GoogleToken } = require('gtoken');
+const {GoogleToken} = require('gtoken');
const request = require('request-promise');
const BASE_URL = 'https://healthcare.googleapis.com/v1alpha';
// [START healthcare_get_token]
-function getToken (serviceAccountJson, cb) {
+function getToken(serviceAccountJson, cb) {
const gtoken = new GoogleToken({
keyFile: `${serviceAccountJson}`,
- scope: ['https://www.googleapis.com/auth/cloud-platform'] // or space-delimited string of scopes
+ scope: ['https://www.googleapis.com/auth/cloud-platform'], // or space-delimited string of scopes
});
- gtoken.getToken(function (err, token) {
+ gtoken.getToken(function(err, token) {
if (err) {
console.log('ERROR: ', err);
return;
@@ -36,7 +36,14 @@ function getToken (serviceAccountJson, cb) {
// [END healthcare_get_token]
// [START healthcare_create_fhir_resource]
-function createResource (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType) {
+function createResource(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -49,18 +56,18 @@ function createResource (token, projectId, cloudRegion, datasetId, fhirStoreId,
const resourcePath = `${parentName}/datasets/${datasetId}/fhirStores/${fhirStoreId}/resources/${resourceType}`;
const postData = {
- 'resourceType': resourceType
+ resourceType: resourceType,
};
const options = {
url: resourcePath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/fhir+json; charset=utf-8'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/fhir+json; charset=utf-8',
},
body: postData,
json: true,
- method: 'POST'
+ method: 'POST',
};
request(options)
@@ -74,7 +81,15 @@ function createResource (token, projectId, cloudRegion, datasetId, fhirStoreId,
// [END healthcare_create_fhir_resource]
// [START healthcare_update_fhir_resource]
-function updateResource (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType, resourceId) {
+function updateResource(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType,
+ resourceId
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -88,20 +103,20 @@ function updateResource (token, projectId, cloudRegion, datasetId, fhirStoreId,
const resourcePath = `${parentName}/datasets/${datasetId}/fhirStores/${fhirStoreId}/resources/${resourceType}/${resourceId}`;
const patientData = {
- 'resourceType': resourceType,
- 'id': resourceId,
- 'active': true
+ resourceType: resourceType,
+ id: resourceId,
+ active: true,
};
const options = {
url: resourcePath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/fhir+json; charset=utf-8'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/fhir+json; charset=utf-8',
},
body: patientData,
json: true,
- method: 'PUT'
+ method: 'PUT',
};
request(options)
@@ -115,7 +130,15 @@ function updateResource (token, projectId, cloudRegion, datasetId, fhirStoreId,
// [END healthcare_update_fhir_resource]
// [START healthcare_patch_fhir_resource]
-function patchResource (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType, resourceId) {
+function patchResource(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType,
+ resourceId
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -128,17 +151,17 @@ function patchResource (token, projectId, cloudRegion, datasetId, fhirStoreId, r
const resourcePath = `${parentName}/datasets/${datasetId}/fhirStores/${fhirStoreId}/resources/${resourceType}/${resourceId}`;
- const patchOperations = [{ 'op': 'replace', 'path': '/active', 'value': false }];
+ const patchOperations = [{op: 'replace', path: '/active', value: false}];
const options = {
url: resourcePath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/json-patch+json'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/json-patch+json',
},
body: patchOperations,
json: true,
- method: 'PATCH'
+ method: 'PATCH',
};
request(options)
@@ -152,7 +175,15 @@ function patchResource (token, projectId, cloudRegion, datasetId, fhirStoreId, r
// [END healthcare_patch_fhir_resource]
// [START healthcare_delete_fhir_resource]
-function deleteResource (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType, resourceId) {
+function deleteResource(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType,
+ resourceId
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -168,11 +199,11 @@ function deleteResource (token, projectId, cloudRegion, datasetId, fhirStoreId,
const options = {
url: resourcePath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/fhir+json; charset=utf-8'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/fhir+json; charset=utf-8',
},
json: true,
- method: 'DELETE'
+ method: 'DELETE',
};
request(options)
@@ -186,7 +217,15 @@ function deleteResource (token, projectId, cloudRegion, datasetId, fhirStoreId,
// [END healthcare_delete_fhir_resource]
// [START healthcare_get_fhir_resource]
-function getResource (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType, resourceId) {
+function getResource(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType,
+ resourceId
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -202,15 +241,17 @@ function getResource (token, projectId, cloudRegion, datasetId, fhirStoreId, res
const options = {
url: resourcePath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/fhir+json; charset=utf-8'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/fhir+json; charset=utf-8',
},
- json: true
+ json: true,
};
request(options)
.then(results => {
- console.log(`Got ${resourceType} resource:\n${JSON.stringify(results, null, 2)}`);
+ console.log(
+ `Got ${resourceType} resource:\n${JSON.stringify(results, null, 2)}`
+ );
})
.catch(err => {
console.error(err);
@@ -219,7 +260,14 @@ function getResource (token, projectId, cloudRegion, datasetId, fhirStoreId, res
// [END healthcare_get_fhir_resource]
// [START healthcare_search_fhir_resources_get]
-function searchResourcesGet (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType) {
+function searchResourcesGet(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -234,10 +282,10 @@ function searchResourcesGet (token, projectId, cloudRegion, datasetId, fhirStore
const options = {
url: resourcesPath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/fhir+json; charset=utf-8'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/fhir+json; charset=utf-8',
},
- json: true
+ json: true,
};
request(options)
@@ -251,7 +299,14 @@ function searchResourcesGet (token, projectId, cloudRegion, datasetId, fhirStore
// [END healthcare_search_fhir_resources_get]
// [START healthcare_search_fhir_resources_post]
-function searchResourcesPost (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceType) {
+function searchResourcesPost(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceType
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -266,11 +321,11 @@ function searchResourcesPost (token, projectId, cloudRegion, datasetId, fhirStor
const options = {
url: resourcesPath,
headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/fhir+json; charset=utf-8'
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/fhir+json; charset=utf-8',
},
json: true,
- method: 'POST'
+ method: 'POST',
};
request(options)
@@ -284,7 +339,14 @@ function searchResourcesPost (token, projectId, cloudRegion, datasetId, fhirStor
// [END healthcare_search_fhir_resources_post]
// [START healthcare_fhir_get_patient_everything]
-function getPatientEverything (token, projectId, cloudRegion, datasetId, fhirStoreId, resourceId) {
+function getPatientEverything(
+ token,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ resourceId
+) {
// Token retrieved in callback
// getToken(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -299,9 +361,9 @@ function getPatientEverything (token, projectId, cloudRegion, datasetId, fhirSto
const options = {
url: fhirStorePath,
headers: {
- 'authorization': `Bearer ${token}`
+ authorization: `Bearer ${token}`,
},
- json: true
+ json: true,
};
request(options)
@@ -322,30 +384,38 @@ require(`yargs`) // eslint-disable-line
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
`createResource `,
`Creates a new resource in a FHIR store.`,
{},
- (opts) => {
- const cb = function (token) {
- createResource(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType);
+ opts => {
+ const cb = function(token) {
+ createResource(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -354,9 +424,17 @@ require(`yargs`) // eslint-disable-line
`updateResource `,
`Updates an existing resource in a FHIR store.`,
{},
- (opts) => {
- const cb = function (token) {
- updateResource(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType, opts.resourceId);
+ opts => {
+ const cb = function(token) {
+ updateResource(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType,
+ opts.resourceId
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -365,9 +443,17 @@ require(`yargs`) // eslint-disable-line
`patchResource `,
`Patches an existing resource in a FHIR store.`,
{},
- (opts) => {
- const cb = function (token) {
- patchResource(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType, opts.resourceId);
+ opts => {
+ const cb = function(token) {
+ patchResource(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType,
+ opts.resourceId
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -376,9 +462,17 @@ require(`yargs`) // eslint-disable-line
`deleteResource `,
`Deletes a FHIR resource or returns NOT_FOUND if it doesn't exist.`,
{},
- (opts) => {
- const cb = function (token) {
- deleteResource(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType, opts.resourceId);
+ opts => {
+ const cb = function(token) {
+ deleteResource(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType,
+ opts.resourceId
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -387,9 +481,17 @@ require(`yargs`) // eslint-disable-line
`getResource `,
`Gets a FHIR resource.`,
{},
- (opts) => {
- const cb = function (token) {
- getResource(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType, opts.resourceId);
+ opts => {
+ const cb = function(token) {
+ getResource(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType,
+ opts.resourceId
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -398,9 +500,16 @@ require(`yargs`) // eslint-disable-line
`searchResourcesGet `,
`Searches resources in the given FHIR store using the searchResources GET method.`,
{},
- (opts) => {
- const cb = function (token) {
- searchResourcesGet(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType);
+ opts => {
+ const cb = function(token) {
+ searchResourcesGet(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -409,9 +518,16 @@ require(`yargs`) // eslint-disable-line
`searchResourcesPost `,
`Searches resources in the given FHIR store using the _search POST method.`,
{},
- (opts) => {
- const cb = function (token) {
- searchResourcesPost(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceType);
+ opts => {
+ const cb = function(token) {
+ searchResourcesPost(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceType
+ );
};
getToken(opts.serviceAccount, cb);
}
@@ -420,16 +536,24 @@ require(`yargs`) // eslint-disable-line
`getPatientEverything `,
`Gets all the resources in the patient compartment.`,
{},
- (opts) => {
- const cb = function (token) {
- getPatientEverything(token, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.resourceId);
+ opts => {
+ const cb = function(token) {
+ getPatientEverything(
+ token,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.resourceId
+ );
};
getToken(opts.serviceAccount, cb);
}
)
.wrap(120)
.recommendCommands()
- .epilogue(`For more information, see https://cloud.google.com/healthcare/docs`)
+ .epilogue(
+ `For more information, see https://cloud.google.com/healthcare/docs`
+ )
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/healthcare/fhir/fhir_stores.js b/healthcare/fhir/fhir_stores.js
index 8a173ce293..4bdebc2f36 100644
--- a/healthcare/fhir/fhir_stores.js
+++ b/healthcare/fhir/fhir_stores.js
@@ -18,7 +18,13 @@
const {google} = require('googleapis');
// [START healthcare_create_fhir_store]
-function createFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId) {
+function createFhirStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -29,7 +35,8 @@ function createFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId
const request = {parent: parentName, fhirStoreId: fhirStoreId};
- client.projects.locations.datasets.fhirStores.create(request)
+ client.projects.locations.datasets.fhirStores
+ .create(request)
.then(() => {
console.log(`Created FHIR store: ${fhirStoreId}`);
})
@@ -40,19 +47,25 @@ function createFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId
// [END healthcare_create_fhir_store]
// [START healthcare_delete_fhir_store]
-function deleteFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId) {
+function deleteFhirStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
// const fhirStoreId = 'my-fhir-store';
- const fhirStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
+ const fhirStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
const request = {name: fhirStoreName};
- client.projects.locations.datasets.fhirStores.delete(request)
+ client.projects.locations.datasets.fhirStores
+ .delete(request)
.then(() => {
console.log(`Deleted FHIR store: ${fhirStoreId}`);
})
@@ -63,19 +76,19 @@ function deleteFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId
// [END healthcare_delete_fhir_store]
// [START healthcare_get_fhir_store]
-function getFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId) {
+function getFhirStore(client, projectId, cloudRegion, datasetId, fhirStoreId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
// const fhirStoreId = 'my-fhir-store';
- const fhirStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
+ const fhirStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
const request = {name: fhirStoreName};
- client.projects.locations.datasets.fhirStores.get(request)
+ client.projects.locations.datasets.fhirStores
+ .get(request)
.then(results => {
console.log('Got FHIR store:\n', results['data']);
})
@@ -86,7 +99,7 @@ function getFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId) {
// [END healthcare_get_fhir_store]
// [START healthcare_list_fhir_stores]
-function listFhirStores (client, projectId, cloudRegion, datasetId) {
+function listFhirStores(client, projectId, cloudRegion, datasetId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -96,7 +109,8 @@ function listFhirStores (client, projectId, cloudRegion, datasetId) {
const request = {parent: parentName};
- client.projects.locations.datasets.fhirStores.list(request)
+ client.projects.locations.datasets.fhirStores
+ .list(request)
.then(results => {
console.log('FHIR stores:\n', results['data']['fhirStores']);
})
@@ -107,7 +121,14 @@ function listFhirStores (client, projectId, cloudRegion, datasetId) {
// [END healthcare_list_fhir_stores]
// [START healthcare_patch_fhir_store]
-function patchFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId, pubsubTopic) {
+function patchFhirStore(
+ client,
+ projectId,
+ cloudRegion,
+ datasetId,
+ fhirStoreId,
+ pubsubTopic
+) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
@@ -115,19 +136,25 @@ function patchFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId,
// const datasetId = 'my-dataset';
// const fhirStoreId = 'my-fhir-store';
// const pubsubTopic = 'my-topic'
- const fhirStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
+ const fhirStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
const request = {
name: fhirStoreName,
updateMask: 'notificationConfig',
- resource: { 'notificationConfig': { pubsubTopic: `projects/${projectId}/locations/${cloudRegion}/topics/${pubsubTopic}` } }
+ resource: {
+ notificationConfig: {
+ pubsubTopic: `projects/${projectId}/locations/${cloudRegion}/topics/${pubsubTopic}`,
+ },
+ },
};
- client.projects.locations.datasets.fhirStores.patch(request)
+ client.projects.locations.datasets.fhirStores
+ .patch(request)
.then(results => {
console.log(
- 'Patched FHIR store with Cloud Pub/Sub topic', results['data']['notificationConfig']['pubsubTopic']);
+ 'Patched FHIR store with Cloud Pub/Sub topic',
+ results['data']['notificationConfig']['pubsubTopic']
+ );
})
.catch(err => {
console.error(err);
@@ -136,19 +163,19 @@ function patchFhirStore (client, projectId, cloudRegion, datasetId, fhirStoreId,
// [END healthcare_patch_fhir_store]
// [START healthcare_get_fhir_store_metadata]
-function getMetadata (client, projectId, cloudRegion, datasetId, fhirStoreId) {
+function getMetadata(client, projectId, cloudRegion, datasetId, fhirStoreId) {
// Client retrieved in callback
// getClient(serviceAccountJson, function(cb) {...});
// const cloudRegion = 'us-central1';
// const projectId = 'adjective-noun-123';
// const datasetId = 'my-dataset';
// const fhirStoreId = 'my-fhir-store';
- const fhirStoreName =
- `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
+ const fhirStoreName = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/fhirStores/${fhirStoreId}`;
const request = {name: fhirStoreName};
- client.projects.locations.datasets.fhirStores.getMetadata(request)
+ client.projects.locations.datasets.fhirStores
+ .getMetadata(request)
.then(results => {
console.log(`Capabilities statement for FHIR store ${fhirStoreId}:`);
console.log(results);
@@ -162,23 +189,23 @@ function getMetadata (client, projectId, cloudRegion, datasetId, fhirStoreId) {
// Returns an authorized API client by discovering the Healthcare API with
// the provided API key.
// [START healthcare_get_client]
-function getClient (apiKey, serviceAccountJson, cb) {
+function getClient(apiKey, serviceAccountJson, cb) {
const API_VERSION = 'v1alpha';
const DISCOVERY_API = 'https://healthcare.googleapis.com/$discovery/rest';
google.auth
.getClient({scopes: ['https://www.googleapis.com/auth/cloud-platform']})
.then(authClient => {
- const discoveryUrl = `${DISCOVERY_API}?labels=CHC_ALPHA&version=${
- API_VERSION}&key=${apiKey}`;
+ const discoveryUrl = `${DISCOVERY_API}?labels=CHC_ALPHA&version=${API_VERSION}&key=${apiKey}`;
google.options({auth: authClient});
- google.discoverAPI(discoveryUrl)
- .then((client) => {
+ google
+ .discoverAPI(discoveryUrl)
+ .then(client => {
cb(client);
})
- .catch((err) => {
+ .catch(err => {
console.error(err);
});
});
@@ -193,36 +220,43 @@ require(`yargs`) // eslint-disable-line
default: process.env.API_KEY,
description: 'The API key used for discovering the API.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
cloudRegion: {
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
`createFhirStore `,
`Creates a new FHIR store within the parent dataset.`,
{},
- (opts) => {
- const cb = function (client) {
- createFhirStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId);
+ opts => {
+ const cb = function(client) {
+ createFhirStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -231,9 +265,15 @@ require(`yargs`) // eslint-disable-line
`deleteFhirStore `,
`Deletes the FHIR store and removes all resources that are contained within it.`,
{},
- (opts) => {
- const cb = function (client) {
- deleteFhirStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId);
+ opts => {
+ const cb = function(client) {
+ deleteFhirStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -242,9 +282,15 @@ require(`yargs`) // eslint-disable-line
`getFhirStore `,
`Gets the specified FHIR store or returns NOT_FOUND if it doesn't exist.`,
{},
- (opts) => {
- const cb = function (client) {
- getFhirStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId);
+ opts => {
+ const cb = function(client) {
+ getFhirStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -253,9 +299,14 @@ require(`yargs`) // eslint-disable-line
`listFhirStores `,
`Lists the FHIR stores in the given dataset.`,
{},
- (opts) => {
- const cb = function (client) {
- listFhirStores(client, opts.projectId, opts.cloudRegion, opts.datasetId);
+ opts => {
+ const cb = function(client) {
+ listFhirStores(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -264,9 +315,16 @@ require(`yargs`) // eslint-disable-line
`patchFhirStore `,
`Updates the FHIR store.`,
{},
- (opts) => {
- const cb = function (client) {
- patchFhirStore(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId, opts.pubsubTopic);
+ opts => {
+ const cb = function(client) {
+ patchFhirStore(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId,
+ opts.pubsubTopic
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
@@ -275,16 +333,23 @@ require(`yargs`) // eslint-disable-line
`getMetadata `,
`Gets the capabilities statement for a FHIR store.`,
{},
- (opts) => {
- const cb = function (client) {
- getMetadata(client, opts.projectId, opts.cloudRegion, opts.datasetId, opts.fhirStoreId);
+ opts => {
+ const cb = function(client) {
+ getMetadata(
+ client,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.datasetId,
+ opts.fhirStoreId
+ );
};
getClient(opts.apiKey, opts.serviceAccount, cb);
}
)
.wrap(120)
.recommendCommands()
- .epilogue(`For more information, see https://cloud.google.com/healthcare/docs`)
+ .epilogue(
+ `For more information, see https://cloud.google.com/healthcare/docs`
+ )
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/healthcare/fhir/system-test/fhir_resources.test.js b/healthcare/fhir/system-test/fhir_resources.test.js
index 90c233657f..216ed48699 100644
--- a/healthcare/fhir/system-test/fhir_resources.test.js
+++ b/healthcare/fhir/system-test/fhir_resources.test.js
@@ -26,67 +26,81 @@ const cmd = 'node fhir_resources.js';
const cwd = path.join(__dirname, '..');
const cwdDatasets = path.join(__dirname, `../../datasets`);
const datasetId = `nodejs-docs-samples-test-${uuid.v4()}`.replace(/-/gi, '_');
-const fhirStoreId =
- `nodejs-docs-samples-test-fhir-store${uuid.v4()}`.replace(/-/gi, '_');
+const fhirStoreId = `nodejs-docs-samples-test-fhir-store${uuid.v4()}`.replace(
+ /-/gi,
+ '_'
+);
const resourceType = 'Patient';
let resourceId;
test.before(tools.checkCredentials);
test.before(async () => {
- return tools.runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
- .then((results) => {
+ return tools
+ .runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
+ .then(results => {
console.log(results);
return results;
});
});
test.after.always(async () => {
try {
- await tools.runAsync(`${cmdDataset} deleteDataset ${datasetId}`, cwdDatasets);
- } catch (err) {
- } // Ignore error
+ await tools.runAsync(
+ `${cmdDataset} deleteDataset ${datasetId}`,
+ cwdDatasets
+ );
+ } catch (err) {} // Ignore error
});
-test.serial(`should create a FHIR resource`, async (t) => {
- await tools.runAsync(`${cmdFhirStores} createFhirStore ${datasetId} ${fhirStoreId}`, cwd);
+test.serial(`should create a FHIR resource`, async t => {
+ await tools.runAsync(
+ `${cmdFhirStores} createFhirStore ${datasetId} ${fhirStoreId}`,
+ cwd
+ );
const output = await tools.runAsync(
- `${cmd} createResource ${datasetId} ${fhirStoreId} ${resourceType}`, cwd);
- const createdMessage =
- new RegExp(`Created resource ${resourceType} with ID (.*).`);
+ `${cmd} createResource ${datasetId} ${fhirStoreId} ${resourceType}`,
+ cwd
+ );
+ const createdMessage = new RegExp(
+ `Created resource ${resourceType} with ID (.*).`
+ );
t.regex(output, createdMessage);
resourceId = createdMessage.exec(output)[1];
});
-test.serial(`should get a FHIR resource`, async (t) => {
+test.serial(`should get a FHIR resource`, async t => {
const output = await tools.runAsync(
- `${cmd} getResource ${datasetId} ${fhirStoreId} ${resourceType} ${
- resourceId}`,
- cwd);
+ `${cmd} getResource ${datasetId} ${fhirStoreId} ${resourceType} ${resourceId}`,
+ cwd
+ );
t.regex(output, new RegExp(`Got ${resourceType} resource`));
});
-test.serial(`should update a FHIR resource`, async (t) => {
+test.serial(`should update a FHIR resource`, async t => {
const output = await tools.runAsync(
- `${cmd} updateResource ${datasetId} ${fhirStoreId} ${resourceType} ${
- resourceId}`,
- cwd);
+ `${cmd} updateResource ${datasetId} ${fhirStoreId} ${resourceType} ${resourceId}`,
+ cwd
+ );
t.is(output, `Updated ${resourceType} with ID ${resourceId}`);
});
-test.serial(`should patch a FHIR resource`, async (t) => {
+test.serial(`should patch a FHIR resource`, async t => {
const output = await tools.runAsync(
- `${cmd} patchResource ${datasetId} ${fhirStoreId} ${resourceType} ${
- resourceId}`,
- cwd);
+ `${cmd} patchResource ${datasetId} ${fhirStoreId} ${resourceType} ${resourceId}`,
+ cwd
+ );
t.is(output, `Patched ${resourceType} with ID ${resourceId}`);
});
-test.serial(`should delete a FHIR resource`, async (t) => {
+test.serial(`should delete a FHIR resource`, async t => {
const output = await tools.runAsync(
- `${cmd} deleteResource ${datasetId} ${fhirStoreId} ${resourceType} ${
- resourceId}`,
- cwd);
+ `${cmd} deleteResource ${datasetId} ${fhirStoreId} ${resourceType} ${resourceId}`,
+ cwd
+ );
t.is(output, `Deleted ${resourceType} with ID ${resourceId}.`);
// Clean up
- await tools.runAsync(`${cmdFhirStores} deleteFhirStore ${datasetId} ${fhirStoreId}`, cwd);
+ await tools.runAsync(
+ `${cmdFhirStores} deleteFhirStore ${datasetId} ${fhirStoreId}`,
+ cwd
+ );
});
diff --git a/healthcare/fhir/system-test/fhir_stores.test.js b/healthcare/fhir/system-test/fhir_stores.test.js
index 2194d87f8d..54f00b0cc6 100644
--- a/healthcare/fhir/system-test/fhir_stores.test.js
+++ b/healthcare/fhir/system-test/fhir_stores.test.js
@@ -25,58 +25,77 @@ const cmd = `node fhir_stores.js`;
const cwdDatasets = path.join(__dirname, `../../datasets`);
const cwd = path.join(__dirname, `..`);
const datasetId = `nodejs-docs-samples-test-${uuid.v4()}`.replace(/-/gi, '_');
-const fhirStoreId =
- `nodejs-docs-samples-test-fhir-store${uuid.v4()}`.replace(/-/gi, '_');
-const pubsubTopic =
- `nodejs-docs-samples-test-pubsub${uuid.v4()}`.replace(/-/gi, '_');
+const fhirStoreId = `nodejs-docs-samples-test-fhir-store${uuid.v4()}`.replace(
+ /-/gi,
+ '_'
+);
+const pubsubTopic = `nodejs-docs-samples-test-pubsub${uuid.v4()}`.replace(
+ /-/gi,
+ '_'
+);
test.before(tools.checkCredentials);
test.before(async () => {
- return tools.runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
- .then((results) => {
+ return tools
+ .runAsync(`${cmdDataset} createDataset ${datasetId}`, cwdDatasets)
+ .then(results => {
console.log(results);
return results;
});
});
test.after.always(async () => {
try {
- await tools.runAsync(`${cmdDataset} deleteDataset ${datasetId}`, cwdDatasets);
- } catch (err) {
- } // Ignore error
+ await tools.runAsync(
+ `${cmdDataset} deleteDataset ${datasetId}`,
+ cwdDatasets
+ );
+ } catch (err) {} // Ignore error
});
test.serial(`should create a FHIR store`, async t => {
const output = await tools.runAsync(
- `${cmd} createFhirStore ${datasetId} ${fhirStoreId}`, cwd);
+ `${cmd} createFhirStore ${datasetId} ${fhirStoreId}`,
+ cwd
+ );
t.regex(output, /Created FHIR store/);
});
test.serial(`should get a FHIR store`, async t => {
const output = await tools.runAsync(
- `${cmd} getFhirStore ${datasetId} ${fhirStoreId}`, cwd);
+ `${cmd} getFhirStore ${datasetId} ${fhirStoreId}`,
+ cwd
+ );
t.regex(output, /Got FHIR store/);
});
test.serial(`should list FHIR stores`, async t => {
- const output =
- await tools.runAsync(`${cmd} listFhirStores ${datasetId}`, cwd);
+ const output = await tools.runAsync(
+ `${cmd} listFhirStores ${datasetId}`,
+ cwd
+ );
t.regex(output, /FHIR stores/);
});
test.serial(`should patch a FHIR store`, async t => {
const output = await tools.runAsync(
- `${cmd} patchFhirStore ${datasetId} ${fhirStoreId} ${pubsubTopic}`, cwd);
+ `${cmd} patchFhirStore ${datasetId} ${fhirStoreId} ${pubsubTopic}`,
+ cwd
+ );
t.regex(output, /Patched FHIR store/);
});
test.serial(`should get FHIR store metadata`, async t => {
const output = await tools.runAsync(
- `${cmd} getMetadata ${datasetId} ${fhirStoreId}`, cwd);
+ `${cmd} getMetadata ${datasetId} ${fhirStoreId}`,
+ cwd
+ );
t.regex(output, /Capabilities statement for FHIR store/);
});
test(`should delete a FHIR store`, async t => {
const output = await tools.runAsync(
- `${cmd} deleteFhirStore ${datasetId} ${fhirStoreId}`, cwd);
+ `${cmd} deleteFhirStore ${datasetId} ${fhirStoreId}`,
+ cwd
+ );
t.regex(output, /Deleted FHIR store/);
});
diff --git a/iot/beta-features/commands/receive/receive.js b/iot/beta-features/commands/receive/receive.js
index c15d54ffd9..45126784b2 100644
--- a/iot/beta-features/commands/receive/receive.js
+++ b/iot/beta-features/commands/receive/receive.js
@@ -26,90 +26,95 @@ let argv = require(`yargs`)
.options({
projectId: {
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
cloudRegion: {
default: 'us-central1',
description: 'GCP cloud region.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
registryId: {
description: 'Cloud IoT registry ID.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
deviceId: {
description: 'Cloud IoT device ID.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
privateKeyFile: {
description: 'Path to private key file.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
algorithm: {
description: 'Encryption algorithm to generate the JWT.',
requiresArg: true,
demandOption: true,
choices: ['RS256', 'ES256'],
- type: 'string'
+ type: 'string',
},
maxDuration: {
default: -1,
- description: 'Max number of minutes to run before ending the client. Set to -1 for no maximum',
+ description:
+ 'Max number of minutes to run before ending the client. Set to -1 for no maximum',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
tokenExpMins: {
default: 20,
description: 'Minutes to JWT token expiration.',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
mqttBridgeHostname: {
default: 'mqtt.googleapis.com',
description: 'MQTT bridge hostname.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
mqttBridgePort: {
default: 8883,
description: 'MQTT bridge port.',
requiresArg: true,
- type: 'number'
- }
+ type: 'number',
+ },
})
- .example(`node $0 --projectId=blue-jet-123 \\\n\t--registryId=my-registry --deviceId=my-node-device \\\n\t--privateKeyFile=../rsa_private.pem --algorithm=RS256 \\\n\t --cloudRegion=us-central1`)
+ .example(
+ `node $0 --projectId=blue-jet-123 \\\n\t--registryId=my-registry --deviceId=my-node-device \\\n\t--privateKeyFile=../rsa_private.pem --algorithm=RS256 \\\n\t --cloudRegion=us-central1`
+ )
.wrap(120)
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/iot-core/docs`)
.help()
- .strict()
- .argv;
+ .strict().argv;
// Create a Cloud IoT Core JWT for the given project id, signed with the given
// private key.
// [START iot_mqtt_jwt]
-function createJwt (projectId, privateKeyFile, algorithm) {
+function createJwt(projectId, privateKeyFile, algorithm) {
const token = {
- 'iat': parseInt(Date.now() / 1000),
- 'exp': parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
- 'aud': projectId
+ iat: parseInt(Date.now() / 1000),
+ exp: parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
+ aud: projectId,
};
const privateKey = fs.readFileSync(privateKeyFile);
- return jwt.sign(token, privateKey, { algorithm: algorithm });
+ return jwt.sign(token, privateKey, {algorithm: algorithm});
}
// [END iot_mqtt_jwt]
// [START iot_mqtt_run]
-const mqttClientId = `projects/${argv.projectId}/locations/${argv.cloudRegion}/registries/${argv.registryId}/devices/${argv.deviceId}`;
+const mqttClientId = `projects/${argv.projectId}/locations/${
+ argv.cloudRegion
+}/registries/${argv.registryId}/devices/${argv.deviceId}`;
let connectionArgs = {
host: argv.mqttBridgeHostname,
port: argv.mqttBridgePort,
@@ -118,7 +123,7 @@ let connectionArgs = {
password: createJwt(argv.projectId, argv.privateKeyFile, argv.algorithm),
protocol: 'mqtts',
qos: 1,
- secureProtocol: 'TLSv1_2_method'
+ secureProtocol: 'TLSv1_2_method',
};
// Create a client, and connect to the Google MQTT bridge.
@@ -129,12 +134,14 @@ client.subscribe(`/devices/${argv.deviceId}/commands/#`);
if (argv.maxDuration > 0) {
setTimeout(() => {
- console.log(`Closing connection to MQTT after ${argv.maxDuration} seconds.`);
+ console.log(
+ `Closing connection to MQTT after ${argv.maxDuration} seconds.`
+ );
client.end();
}, argv.maxDuration * 60 * 1000);
}
-client.on('connect', (success) => {
+client.on('connect', success => {
console.log('connect');
if (!success) {
console.log('Client not connected...');
@@ -148,12 +155,15 @@ client.on('close', () => {
console.log('close');
});
-client.on('error', (err) => {
+client.on('error', err => {
console.log('error', err);
});
client.on('message', (topic, message, packet) => {
- console.log('message received: ', Buffer.from(message, 'base64').toString('ascii'));
+ console.log(
+ 'message received: ',
+ Buffer.from(message, 'base64').toString('ascii')
+ );
});
client.on('packetsend', () => {
diff --git a/iot/beta-features/commands/receive/receiveTest.js b/iot/beta-features/commands/receive/receiveTest.js
index 43e8ec3345..7f80338fa9 100644
--- a/iot/beta-features/commands/receive/receiveTest.js
+++ b/iot/beta-features/commands/receive/receiveTest.js
@@ -50,28 +50,39 @@ test.after.always(async () => {
console.log(`Topic ${topic.name} deleted.`);
});
-test(`should receive command message`, async (t) => {
+test(`should receive command message`, async t => {
await tools.runAsync(installDeps, cwdHelper);
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwdHelper);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwdHelper);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwdHelper
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} ./resources/rsa_cert.pem`, cwdHelper);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} ./resources/rsa_cert.pem`,
+ cwdHelper
+ );
// This command needs to run asynchronously without await to ensure the send comand happens while
// mqtt client is available. Limit client to last only 15 seconds (0.25 minutes)
let out = tools.runAsync(
- `${receiveCmd} --deviceId=${localDevice} --registryId=${localRegName} --maxDuration=0.25 ${receiveCmdSuffix}`);
+ `${receiveCmd} --deviceId=${localDevice} --registryId=${localRegName} --maxDuration=0.25 ${receiveCmdSuffix}`
+ );
await tools.runAsync(
- `${sendCmd} sendCommand ${localDevice} ${localRegName} "me want cookies"`, cwdSend);
+ `${sendCmd} sendCommand ${localDevice} ${localRegName} "me want cookies"`,
+ cwdSend
+ );
// await for original command to resolve before checking regex
t.regex(await out, new RegExp(`me want cookies`));
await tools.runAsync(
- `${helper} getDeviceState ${localDevice} ${localRegName}`, cwdHelper);
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwdHelper
+ );
await tools.runAsync(
- `${helper} deleteDevice ${localDevice} ${localRegName}`, cwdHelper);
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwdHelper
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwdHelper);
});
diff --git a/iot/beta-features/commands/send/send.js b/iot/beta-features/commands/send/send.js
index 6d993060c3..015fe1fb2f 100644
--- a/iot/beta-features/commands/send/send.js
+++ b/iot/beta-features/commands/send/send.js
@@ -19,28 +19,28 @@ const {google} = require('googleapis');
const API_VERSION = 'v1';
const DISCOVERY_API = 'https://cloudiot.googleapis.com/$discovery/rest';
-function sendCommand (deviceId, registryId, projectId, region, command) {
+function sendCommand(deviceId, registryId, projectId, region, command) {
// [START iot_send_command]
const parentName = `projects/${projectId}/locations/${region}`;
const registryName = `${parentName}/registries/${registryId}`;
const binaryData = Buffer.from(command).toString('base64');
const request = {
name: `${registryName}/devices/${deviceId}`,
- binaryData: binaryData
+ binaryData: binaryData,
};
- google.auth.getClient().then((authClient) => {
+ google.auth.getClient().then(authClient => {
const discoveryUrl = `${DISCOVERY_API}?version=${API_VERSION}`;
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
// Scopes can be specified either as an array or as a single,
// space-delimited string.
authClient = authClient.createScoped([
- 'https://www.googleapis.com/auth/cloud-platform'
+ 'https://www.googleapis.com/auth/cloud-platform',
]);
}
google.options({
- auth: authClient
+ auth: authClient,
});
google.discoverAPI(discoveryUrl).then((client, err) => {
@@ -48,7 +48,8 @@ function sendCommand (deviceId, registryId, projectId, region, command) {
console.log('Error during API discovery', err);
return undefined;
}
- client.projects.locations.registries.devices.sendCommandToDevice(request,
+ client.projects.locations.registries.devices.sendCommandToDevice(
+ request,
(err, data) => {
if (err) {
console.log('Could not send command:', request);
@@ -56,7 +57,8 @@ function sendCommand (deviceId, registryId, projectId, region, command) {
} else {
console.log('Success :', data.statusText);
}
- });
+ }
+ );
});
});
// [END iot_send_command]
@@ -69,7 +71,7 @@ require(`yargs`) // eslint-disable-line
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
@@ -77,23 +79,28 @@ require(`yargs`) // eslint-disable-line
description: `The Project ID to use. Defaults to the value of
the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.`,
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
`sendCommand `,
`Sends a command to a device.`,
{},
- (opts) => {
- sendCommand(opts.deviceId, opts.registryId, opts.projectId,
- opts.cloudRegion, opts.command);
+ opts => {
+ sendCommand(
+ opts.deviceId,
+ opts.registryId,
+ opts.projectId,
+ opts.cloudRegion,
+ opts.command
+ );
}
)
.example(`node $0 sendCommand my-device my-registry "test"`)
@@ -101,5 +108,4 @@ require(`yargs`) // eslint-disable-line
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/iot-core/docs`)
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/iot/beta-features/commands/send/sendTest.js b/iot/beta-features/commands/send/sendTest.js
index de8b1d37cb..630f75a831 100644
--- a/iot/beta-features/commands/send/sendTest.js
+++ b/iot/beta-features/commands/send/sendTest.js
@@ -50,27 +50,39 @@ test.after.always(async () => {
console.log(`Topic ${topic.name} deleted.`);
});
-test(`should send command message`, async (t) => {
+test(`should send command message`, async t => {
// Create topic, registry, and device
await tools.runAsync(installDeps, cwdHelper);
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwdHelper);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwdHelper);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwdHelper
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} ./resources/rsa_cert.pem`, cwdHelper);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} ./resources/rsa_cert.pem`,
+ cwdHelper
+ );
// Let the client run asynchronously since we don't need to test the output here
tools.runAsync(
- `${receiveCmd} --deviceId=${localDevice} --registryId=${localRegName} ${receiveCmdSuffix}`, cwdRcv);
+ `${receiveCmd} --deviceId=${localDevice} --registryId=${localRegName} ${receiveCmdSuffix}`,
+ cwdRcv
+ );
let out = await tools.runAsync(
- `${sendCmd} sendCommand ${localDevice} ${localRegName} "me want cookies"`, cwdSend);
+ `${sendCmd} sendCommand ${localDevice} ${localRegName} "me want cookies"`,
+ cwdSend
+ );
t.regex(out, new RegExp(`Success : OK`));
await tools.runAsync(
- `${helper} getDeviceState ${localDevice} ${localRegName}`, cwdHelper);
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwdHelper
+ );
await tools.runAsync(
- `${helper} deleteDevice ${localDevice} ${localRegName}`, cwdHelper);
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwdHelper
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwdHelper);
});
diff --git a/iot/http_example/cloudiot_http_example.js b/iot/http_example/cloudiot_http_example.js
index 33ed09fe78..6f2a6e462a 100644
--- a/iot/http_example/cloudiot_http_example.js
+++ b/iot/http_example/cloudiot_http_example.js
@@ -25,74 +25,76 @@ var argv = require(`yargs`)
.options({
projectId: {
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
cloudRegion: {
default: 'us-central1',
description: 'GCP cloud region.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
registryId: {
description: 'Cloud IoT registry ID.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
deviceId: {
description: 'Cloud IoT device ID.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
privateKeyFile: {
description: 'Path to private key file.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
algorithm: {
description: 'Encryption algorithm to generate the RSA or EC JWT.',
requiresArg: true,
demandOption: true,
choices: ['RS256', 'ES256'],
- type: 'string'
+ type: 'string',
},
numMessages: {
default: 100,
description: 'Number of messages to publish.',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
tokenExpMins: {
default: 20,
description: 'Minutes to JWT token expiration.',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
httpBridgeAddress: {
default: 'cloudiotdevice.googleapis.com',
description: 'HTTP bridge address.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
messageType: {
default: 'events',
description: 'Message type to publish.',
requiresArg: true,
choices: ['events', 'state'],
- type: 'string'
- }
+ type: 'string',
+ },
})
- .example(`node $0 cloudiotHttp_example_nodejs.js --projectId=blue-jet-123 --registryId=my-registry --deviceId=my-node-device --privateKeyFile=../rsaPrivate.pem --algorithm=RS256`)
+ .example(
+ `node $0 cloudiotHttp_example_nodejs.js --projectId=blue-jet-123 --registryId=my-registry --deviceId=my-node-device --privateKeyFile=../rsaPrivate.pem --algorithm=RS256`
+ )
.wrap(120)
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/iot-core/docs`)
.help()
- .strict()
- .argv;
+ .strict().argv;
// [START iot_http_variables]
// A unique string that identifies this device. For Google Cloud IoT Core, it
@@ -100,11 +102,13 @@ var argv = require(`yargs`)
let iatTime = parseInt(Date.now() / 1000);
let authToken = createJwt(argv.projectId, argv.privateKeyFile, argv.algorithm);
-const devicePath = `projects/${argv.projectId}/locations/${argv.cloudRegion}/registries/${argv.registryId}/devices/${argv.deviceId}`;
+const devicePath = `projects/${argv.projectId}/locations/${
+ argv.cloudRegion
+}/registries/${argv.registryId}/devices/${argv.deviceId}`;
// The request path, set accordingly depending on the message type.
-const pathSuffix = argv.messageType === 'events'
- ? ':publishEvent' : ':setState';
+const pathSuffix =
+ argv.messageType === 'events' ? ':publishEvent' : ':setState';
const urlBase = `https://${argv.httpBridgeAddress}/v1/${devicePath}`;
const url = `${urlBase}${pathSuffix}`;
// [END iot_http_variables]
@@ -112,17 +116,17 @@ const url = `${urlBase}${pathSuffix}`;
// Create a Cloud IoT Core JWT for the given project ID, signed with the given
// private key.
// [START iot_http_jwt]
-function createJwt (projectId, privateKeyFile, algorithm) {
+function createJwt(projectId, privateKeyFile, algorithm) {
// Create a JWT to authenticate this device. The device will be disconnected
// after the token expires, and will have to reconnect with a new token. The
// audience field should always be set to the GCP project ID.
const token = {
- 'iat': parseInt(Date.now() / 1000),
- 'exp': parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
- 'aud': projectId
+ iat: parseInt(Date.now() / 1000),
+ exp: parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
+ aud: projectId,
};
const privateKey = fs.readFileSync(privateKeyFile);
- return jwt.sign(token, privateKey, { algorithm: algorithm });
+ return jwt.sign(token, privateKey, {algorithm: algorithm});
}
// [END iot_http_jwt]
@@ -130,39 +134,41 @@ function createJwt (projectId, privateKeyFile, algorithm) {
// messageCount. Telemetry events are published at a rate of 1 per second and
// states at a rate of 1 every 2 seconds.
// [START iot_http_publish]
-function publishAsync (authToken, messageCount, numMessages) {
+function publishAsync(authToken, messageCount, numMessages) {
const payload = `${argv.registryId}/${argv.deviceId}-payload-${messageCount}`;
console.log('Publishing message:', payload);
const binaryData = Buffer.from(payload).toString('base64');
- const postData = argv.messageType === 'events' ? {
- binary_data: binaryData
- } : {
- state: {
- binary_data: binaryData
- }
- };
+ const postData =
+ argv.messageType === 'events'
+ ? {
+ binary_data: binaryData,
+ }
+ : {
+ state: {
+ binary_data: binaryData,
+ },
+ };
const options = {
url: url,
headers: {
- 'authorization': `Bearer ${authToken}`,
+ authorization: `Bearer ${authToken}`,
'content-type': 'application/json',
- 'cache-control': 'no-cache'
+ 'cache-control': 'no-cache',
},
body: postData,
json: true,
method: 'POST',
retries: 5,
- shouldRetryFn:
- function (incomingHttpMessage) {
- return incomingHttpMessage.statusMessage !== 'OK';
- }
+ shouldRetryFn: function(incomingHttpMessage) {
+ return incomingHttpMessage.statusMessage !== 'OK';
+ },
};
// Send events for high-frequency updates, update state only occasionally.
const delayMs = argv.messageType === 'events' ? 1000 : 2000;
console.log(JSON.stringify(request));
- request(options, function (error, response, body) {
+ request(options, function(error, response, body) {
if (error) {
console.error('Received error: ', error);
} else if (response.body.error) {
@@ -173,12 +179,16 @@ function publishAsync (authToken, messageCount, numMessages) {
if (messageCount < numMessages) {
// If we have published fewer than numMessage messages, publish payload
// messageCount + 1.
- setTimeout(function () {
+ setTimeout(function() {
let secsFromIssue = parseInt(Date.now() / 1000) - iatTime;
if (secsFromIssue > argv.tokenExpMins * 60) {
iatTime = parseInt(Date.now() / 1000);
console.log(`\tRefreshing token after ${secsFromIssue} seconds.`);
- authToken = createJwt(argv.projectId, argv.privateKeyFile, argv.algorithm);
+ authToken = createJwt(
+ argv.projectId,
+ argv.privateKeyFile,
+ argv.algorithm
+ );
}
publishAsync(authToken, messageCount + 1, numMessages);
@@ -189,27 +199,25 @@ function publishAsync (authToken, messageCount, numMessages) {
// [END iot_http_publish]
// [START iot_http_getconfig]
-function getConfig (authToken, version) {
+function getConfig(authToken, version) {
console.log(`Getting config from URL: ${urlBase}`);
const options = {
url: urlBase + '/config?local_version=' + version,
headers: {
- 'authorization': `Bearer ${authToken}`,
+ authorization: `Bearer ${authToken}`,
'content-type': 'application/json',
- 'cache-control': 'no-cache'
-
+ 'cache-control': 'no-cache',
},
json: true,
retries: 5,
- shouldRetryFn:
- function (incomingHttpMessage) {
- console.log('Retry?');
- return incomingHttpMessage.statusMessage !== 'OK';
- }
+ shouldRetryFn: function(incomingHttpMessage) {
+ console.log('Retry?');
+ return incomingHttpMessage.statusMessage !== 'OK';
+ },
};
console.log(JSON.stringify(request.RetryStrategies));
- request(options, function (error, response, body) {
+ request(options, function(error, response, body) {
if (error) {
console.error('Received error: ', error);
} else if (response.body.error) {
diff --git a/iot/http_example/package.json b/iot/http_example/package.json
index 57e179b9b5..cee9ca2bff 100644
--- a/iot/http_example/package.json
+++ b/iot/http_example/package.json
@@ -6,8 +6,6 @@
"author": "Google Inc.",
"main": "cloudiot_http_example_nodejs.js",
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 3m --verbose system-test/*.test.js"
},
"dependencies": {
@@ -21,7 +19,6 @@
"@google-cloud/pubsub": "0.13.2",
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
- "semistandard": "^12.0.1",
"uuid": "3.3.2"
}
}
diff --git a/iot/http_example/system-test/cloudiot_http_example.test.js b/iot/http_example/system-test/cloudiot_http_example.test.js
index 276c7b5070..528739238b 100644
--- a/iot/http_example/system-test/cloudiot_http_example.test.js
+++ b/iot/http_example/system-test/cloudiot_http_example.test.js
@@ -33,80 +33,114 @@ test.todo(tools.run(installDeps, `${cwd}/../manager`));
test.before(tools.checkCredentials);
test.before(async () => {
const pubsub = PubSub();
- return pubsub.createTopic(topicName)
- .then((results) => {
- const topic = results[0];
- console.log(`Topic ${topic.name} created.`);
- return topic;
- });
+ return pubsub.createTopic(topicName).then(results => {
+ const topic = results[0];
+ console.log(`Topic ${topic.name} created.`);
+ return topic;
+ });
});
test.after.always(async () => {
const pubsub = PubSub();
const topic = pubsub.topic(topicName);
- return topic.delete()
- .then(() => {
- console.log(`Topic ${topic.name} deleted.`);
- });
+ return topic.delete().then(() => {
+ console.log(`Topic ${topic.name} deleted.`);
+ });
});
-test(`should receive configuration message`, async (t) => {
+test(`should receive configuration message`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwd);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
const output = await tools.runAsync(
- `${cmd} --messageType=events --numMessages=1 --privateKeyFile=resources/rsa_private.pem --algorithm=RS256`, cwd);
+ `${cmd} --messageType=events --numMessages=1 --privateKeyFile=resources/rsa_private.pem --algorithm=RS256`,
+ cwd
+ );
t.regex(output, new RegExp(/Getting config/));
// Check / cleanup
- await tools.runAsync(`${helper} getDeviceState ${localDevice} ${localRegName}`, cwd);
- await tools.runAsync(`${helper} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ await tools.runAsync(
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
+ await tools.runAsync(
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwd);
});
-test(`should send event message`, async (t) => {
+test(`should send event message`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwd);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
const output = await tools.runAsync(
- `${cmd} --messageType=events --numMessages=1 --privateKeyFile=resources/rsa_private.pem --algorithm=RS256`, cwd);
+ `${cmd} --messageType=events --numMessages=1 --privateKeyFile=resources/rsa_private.pem --algorithm=RS256`,
+ cwd
+ );
t.regex(output, new RegExp(/Publishing message/));
// Check / cleanup
- await tools.runAsync(`${helper} getDeviceState ${localDevice} ${localRegName}`, cwd);
- await tools.runAsync(`${helper} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ await tools.runAsync(
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
+ await tools.runAsync(
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwd);
});
-test(`should send event message`, async (t) => {
+test(`should send event message`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwd);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
const output = await tools.runAsync(
- `${cmd} --messageType=state --numMessages=1 --privateKeyFile=resources/rsa_private.pem --algorithm=RS256`, cwd);
+ `${cmd} --messageType=state --numMessages=1 --privateKeyFile=resources/rsa_private.pem --algorithm=RS256`,
+ cwd
+ );
t.regex(output, new RegExp(/Publishing message/));
// Check / cleanup
- await tools.runAsync(`${helper} getDeviceState ${localDevice} ${localRegName}`, cwd);
- await tools.runAsync(`${helper} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ await tools.runAsync(
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
+ await tools.runAsync(
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwd);
});
diff --git a/iot/manager/manager.js b/iot/manager/manager.js
index 9b5a9c14a8..4583924f7b 100644
--- a/iot/manager/manager.js
+++ b/iot/manager/manager.js
@@ -22,15 +22,16 @@ const API_VERSION = 'v1';
const DISCOVERY_API = 'https://cloudiot.googleapis.com/$discovery/rest';
// Configures the topic for Cloud IoT Core.
-function setupIotTopic (topicName) {
+function setupIotTopic(topicName) {
const PubSub = require('@google-cloud/pubsub');
const pubsub = PubSub();
const topic = pubsub.topic(topicName);
const serviceAccount = `serviceAccount:cloud-iot@system.gserviceaccount.com`;
- topic.iam.getPolicy()
- .then((results) => {
+ topic.iam
+ .getPolicy()
+ .then(results => {
const policy = results[0] || {};
policy.bindings || (policy.bindings = []);
console.log(JSON.stringify(policy, null, 2));
@@ -38,10 +39,10 @@ function setupIotTopic (topicName) {
let hasRole = false;
let binding = {
role: 'roles/pubsub.publisher',
- members: [serviceAccount]
+ members: [serviceAccount],
};
- policy.bindings.forEach((_binding) => {
+ policy.bindings.forEach(_binding => {
if (_binding.role === binding.role) {
binding = _binding;
hasRole = true;
@@ -61,31 +62,30 @@ function setupIotTopic (topicName) {
// Updates the IAM policy for the topic
return topic.iam.setPolicy(policy);
})
- .then((results) => {
+ .then(results => {
const updatedPolicy = results[0];
console.log(JSON.stringify(updatedPolicy, null, 2));
})
- .catch((err) => {
+ .catch(err => {
console.error('ERROR:', err);
});
}
-function createIotTopic (topicName) {
+function createIotTopic(topicName) {
// Imports the Google Cloud client library
const PubSub = require('@google-cloud/pubsub');
// Instantiates a client
const pubsub = PubSub();
- pubsub.createTopic(topicName)
- .then((results) => {
- setupIotTopic(topicName);
- });
+ pubsub.createTopic(topicName).then(results => {
+ setupIotTopic(topicName);
+ });
}
// Lookup the registry, assuming that it exists.
-function lookupRegistry (client, registryId, projectId, cloudRegion, cb) {
+function lookupRegistry(client, registryId, projectId, cloudRegion, cb) {
// [START iot_lookup_registry]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -95,7 +95,7 @@ function lookupRegistry (client, registryId, projectId, cloudRegion, cb) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: registryName
+ name: registryName,
};
client.projects.locations.registries.get(request, (err, res) => {
@@ -110,7 +110,7 @@ function lookupRegistry (client, registryId, projectId, cloudRegion, cb) {
// [END iot_lookup_registry]
}
-function createRegistry (
+function createRegistry(
client,
registryId,
projectId,
@@ -131,11 +131,13 @@ function createRegistry (
const request = {
parent: parentName,
resource: {
- eventNotificationConfigs: [{
- 'pubsubTopicName': pubsubTopic
- }],
- 'id': registryId
- }
+ eventNotificationConfigs: [
+ {
+ pubsubTopicName: pubsubTopic,
+ },
+ ],
+ id: registryId,
+ },
};
client.projects.locations.registries.create(request, (err, res) => {
@@ -156,7 +158,7 @@ function createRegistry (
}
// Create a new registry, or look up an existing one if it doesn't exist.
-function lookupOrCreateRegistry (
+function lookupOrCreateRegistry(
client,
registryId,
projectId,
@@ -184,7 +186,7 @@ function lookupOrCreateRegistry (
// Create a new device with the given id. The body defines the parameters for
// the device, such as authentication.
-function createUnauthDevice (
+function createUnauthDevice(
client,
deviceId,
registryId,
@@ -205,7 +207,7 @@ function createUnauthDevice (
const request = {
parent: registryName,
- resource: {id: deviceId}
+ resource: {id: deviceId},
};
client.projects.locations.registries.devices.create(request, (err, res) => {
@@ -221,7 +223,7 @@ function createUnauthDevice (
}
// Create a device using RSA256 for authentication.
-function createRsaDevice (
+function createRsaDevice(
client,
deviceId,
registryId,
@@ -244,15 +246,15 @@ function createRsaDevice (
{
publicKey: {
format: 'RSA_X509_PEM',
- key: fs.readFileSync(rsaCertificateFile).toString()
- }
- }
- ]
+ key: fs.readFileSync(rsaCertificateFile).toString(),
+ },
+ },
+ ],
};
const request = {
parent: registryName,
- resource: body
+ resource: body,
};
console.log(JSON.stringify(request));
@@ -270,7 +272,7 @@ function createRsaDevice (
}
// Create a device using ES256 for authentication.
-function createEsDevice (
+function createEsDevice(
client,
deviceId,
registryId,
@@ -293,15 +295,15 @@ function createEsDevice (
{
publicKey: {
format: 'ES256_PEM',
- key: fs.readFileSync(esCertificateFile).toString()
- }
- }
- ]
+ key: fs.readFileSync(esCertificateFile).toString(),
+ },
+ },
+ ],
};
const request = {
parent: registryName,
- resource: body
+ resource: body,
};
client.projects.locations.registries.devices.create(request, (err, res) => {
@@ -317,7 +319,7 @@ function createEsDevice (
}
// Add RSA256 authentication to the given device.
-function patchRsa256ForAuth (
+function patchRsa256ForAuth(
client,
deviceId,
registryId,
@@ -332,8 +334,7 @@ function patchRsa256ForAuth (
// const deviceId = 'my-rsa-device';
// const projectId = 'adjective-noun-123';
// const registryId = 'my-registry';
- const parentName =
- `projects/${projectId}/locations/${cloudRegion}`;
+ const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
name: `${registryName}/devices/${deviceId}`,
@@ -343,11 +344,11 @@ function patchRsa256ForAuth (
{
publicKey: {
format: 'RSA_X509_PEM',
- key: fs.readFileSync(rsaPublicKeyFile).toString()
- }
- }
- ]
- }
+ key: fs.readFileSync(rsaPublicKeyFile).toString(),
+ },
+ },
+ ],
+ },
};
client.projects.locations.registries.devices.patch(request, (err, res) => {
@@ -363,7 +364,7 @@ function patchRsa256ForAuth (
}
// Add ES256 authentication to the given device.
-function patchEs256ForAuth (
+function patchEs256ForAuth(
client,
deviceId,
registryId,
@@ -378,8 +379,7 @@ function patchEs256ForAuth (
// const deviceId = 'my-es-device';
// const projectId = 'adjective-noun-123';
// const registryId = 'my-registry';
- const parentName =
- `projects/${projectId}/locations/${cloudRegion}`;
+ const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
name: `${registryName}/devices/${deviceId}`,
@@ -389,11 +389,11 @@ function patchEs256ForAuth (
{
publicKey: {
format: 'ES256_PEM',
- key: fs.readFileSync(esPublicKeyFile).toString()
- }
- }
- ]
- }
+ key: fs.readFileSync(esPublicKeyFile).toString(),
+ },
+ },
+ ],
+ },
};
client.projects.locations.registries.devices.patch(request, (err, res) => {
@@ -409,7 +409,7 @@ function patchEs256ForAuth (
}
// List all of the devices in the given registry.
-function listDevices (client, registryId, projectId, cloudRegion) {
+function listDevices(client, registryId, projectId, cloudRegion) {
// [START iot_list_devices]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -420,7 +420,7 @@ function listDevices (client, registryId, projectId, cloudRegion) {
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- parent: registryName
+ parent: registryName,
};
client.projects.locations.registries.devices.list(request, (err, res) => {
@@ -436,7 +436,7 @@ function listDevices (client, registryId, projectId, cloudRegion) {
}
// List all of the registries in the given project.
-function listRegistries (client, projectId, cloudRegion) {
+function listRegistries(client, projectId, cloudRegion) {
// [START iot_list_registries]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -445,7 +445,7 @@ function listRegistries (client, projectId, cloudRegion) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const request = {
- parent: parentName
+ parent: parentName,
};
client.projects.locations.registries.list(request, (err, res) => {
@@ -461,7 +461,7 @@ function listRegistries (client, projectId, cloudRegion) {
}
// Delete the given device from the registry.
-function deleteDevice (
+function deleteDevice(
client,
deviceId,
registryId,
@@ -478,7 +478,7 @@ function deleteDevice (
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: `${registryName}/devices/${deviceId}`
+ name: `${registryName}/devices/${deviceId}`,
};
client.projects.locations.registries.devices.delete(request, (err, res) => {
@@ -497,14 +497,14 @@ function deleteDevice (
}
// Clear the given registry by removing all devices and deleting the registry.
-function clearRegistry (client, registryId, projectId, cloudRegion) {
+function clearRegistry(client, registryId, projectId, cloudRegion) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const requestDelete = {
- name: registryName
+ name: registryName,
};
- const after = function () {
+ const after = function() {
client.projects.locations.registries.delete(requestDelete, (err, res) => {
if (err) {
console.log('Could not delete registry');
@@ -517,7 +517,7 @@ function clearRegistry (client, registryId, projectId, cloudRegion) {
};
const request = {
- parent: registryName
+ parent: registryName,
};
client.projects.locations.registries.devices.list(request, (err, res) => {
@@ -541,13 +541,7 @@ function clearRegistry (client, registryId, projectId, cloudRegion) {
after
);
} else {
- deleteDevice(
- client,
- device.id,
- registryId,
- projectId,
- cloudRegion
- );
+ deleteDevice(client, device.id, registryId, projectId, cloudRegion);
}
});
} else {
@@ -559,7 +553,7 @@ function clearRegistry (client, registryId, projectId, cloudRegion) {
// Delete the given registry. Note that this will only succeed if the registry
// is empty.
-function deleteRegistry (client, registryId, projectId, cloudRegion) {
+function deleteRegistry(client, registryId, projectId, cloudRegion) {
// [START iot_delete_registry]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -569,7 +563,7 @@ function deleteRegistry (client, registryId, projectId, cloudRegion) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: registryName
+ name: registryName,
};
client.projects.locations.registries.delete(request, (err, res) => {
@@ -585,7 +579,7 @@ function deleteRegistry (client, registryId, projectId, cloudRegion) {
}
// Retrieve the given device from the registry.
-function getDevice (client, deviceId, registryId, projectId, cloudRegion) {
+function getDevice(client, deviceId, registryId, projectId, cloudRegion) {
// [START iot_get_device]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -596,7 +590,7 @@ function getDevice (client, deviceId, registryId, projectId, cloudRegion) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: `${registryName}/devices/${deviceId}`
+ name: `${registryName}/devices/${deviceId}`,
};
client.projects.locations.registries.devices.get(request, (err, res) => {
@@ -612,13 +606,7 @@ function getDevice (client, deviceId, registryId, projectId, cloudRegion) {
}
// Retrieve the given device's state from the registry.
-function getDeviceState (
- client,
- deviceId,
- registryId,
- projectId,
- cloudRegion
-) {
+function getDeviceState(client, deviceId, registryId, projectId, cloudRegion) {
// [START iot_get_device_state]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -629,10 +617,11 @@ function getDeviceState (
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: `${registryName}/devices/${deviceId}`
+ name: `${registryName}/devices/${deviceId}`,
};
- client.projects.locations.registries.devices.states.list(request,
+ client.projects.locations.registries.devices.states.list(
+ request,
(err, data) => {
if (err) {
console.log('Could not find device:', deviceId);
@@ -640,12 +629,13 @@ function getDeviceState (
} else {
console.log('State:', data.data);
}
- });
+ }
+ );
// [END iot_get_device_state]
}
// Retrieve the given device's configuration history from the registry.
-function getDeviceConfigs (
+function getDeviceConfigs(
client,
deviceId,
registryId,
@@ -662,10 +652,11 @@ function getDeviceConfigs (
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: `${registryName}/devices/${deviceId}`
+ name: `${registryName}/devices/${deviceId}`,
};
- client.projects.locations.registries.devices.configVersions.list(request,
+ client.projects.locations.registries.devices.configVersions.list(
+ request,
(err, data) => {
if (err) {
console.log('Could not find device:', deviceId);
@@ -673,12 +664,13 @@ function getDeviceConfigs (
} else {
console.log('Configs:', data.data);
}
- });
+ }
+ );
// [END iot_get_device_configs]
}
// Send configuration data to device.
-function setDeviceConfig (
+function setDeviceConfig(
client,
deviceId,
registryId,
@@ -703,10 +695,11 @@ function setDeviceConfig (
const request = {
name: `${registryName}/devices/${deviceId}`,
versionToUpdate: version,
- binaryData: binaryData
+ binaryData: binaryData,
};
- client.projects.locations.registries.devices.modifyCloudToDeviceConfig(request,
+ client.projects.locations.registries.devices.modifyCloudToDeviceConfig(
+ request,
(err, data) => {
if (err) {
console.log('Could not update config:', deviceId);
@@ -714,12 +707,13 @@ function setDeviceConfig (
} else {
console.log('Success :', data);
}
- });
+ }
+ );
// [END iot_set_device_config]
}
// Retrieve the given device from the registry.
-function getRegistry (client, registryId, projectId, cloudRegion) {
+function getRegistry(client, registryId, projectId, cloudRegion) {
// [START iot_get_registry]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -729,7 +723,7 @@ function getRegistry (client, registryId, projectId, cloudRegion) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- name: `${registryName}`
+ name: `${registryName}`,
};
client.projects.locations.registries.get(request, (err, data) => {
@@ -746,27 +740,31 @@ function getRegistry (client, registryId, projectId, cloudRegion) {
// Returns an authorized API client by discovering the Cloud IoT Core API with
// the provided API key.
-function getClient (serviceAccountJson, cb) {
- google.auth.getClient({
- scopes: ['https://www.googleapis.com/auth/cloud-platform']
- }).then(authClient => {
- const discoveryUrl =
- `${DISCOVERY_API}?version=${API_VERSION}`;
-
- google.options({
- auth: authClient
- });
+function getClient(serviceAccountJson, cb) {
+ google.auth
+ .getClient({
+ scopes: ['https://www.googleapis.com/auth/cloud-platform'],
+ })
+ .then(authClient => {
+ const discoveryUrl = `${DISCOVERY_API}?version=${API_VERSION}`;
- google.discoverAPI(discoveryUrl).then((client) => {
- cb(client);
- }).catch((err) => {
- console.log('Error during API discovery.', err);
+ google.options({
+ auth: authClient,
+ });
+
+ google
+ .discoverAPI(discoveryUrl)
+ .then(client => {
+ cb(client);
+ })
+ .catch(err => {
+ console.log('Error during API discovery.', err);
+ });
});
- });
}
// Retrieves the IAM policy for a given registry.
-function getIamPolicy (client, registryId, projectId, cloudRegion) {
+function getIamPolicy(client, registryId, projectId, cloudRegion) {
// [START iot_get_iam_policy]
// Client retrieved in callback
// getClient(serviceAccountJson, function(client) {...});
@@ -776,7 +774,7 @@ function getIamPolicy (client, registryId, projectId, cloudRegion) {
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- 'resource_': `${registryName}`
+ resource_: `${registryName}`,
};
client.projects.locations.registries.getIamPolicy(request, (err, data) => {
@@ -787,10 +785,10 @@ function getIamPolicy (client, registryId, projectId, cloudRegion) {
data = data.data;
console.log(`ETAG: ${data.etag}`);
data.bindings = data.bindings || [];
- data.bindings.forEach((_binding) => {
+ data.bindings.forEach(_binding => {
console.log(`Role: ${_binding.role}`);
_binding.members || (_binding.members = []);
- _binding.members.forEach((_member) => {
+ _binding.members.forEach(_member => {
console.log(`\t${_member}`);
});
});
@@ -800,7 +798,7 @@ function getIamPolicy (client, registryId, projectId, cloudRegion) {
}
// Sets the IAM permissions for a given registry to a single member / role.
-function setIamPolicy (
+function setIamPolicy(
client,
registryId,
projectId,
@@ -817,13 +815,17 @@ function setIamPolicy (
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
- 'resource_': `${registryName}`,
- 'resource': {'policy': {
- 'bindings': [{
- 'members': member,
- 'role': role
- }]
- }}
+ resource_: `${registryName}`,
+ resource: {
+ policy: {
+ bindings: [
+ {
+ members: member,
+ role: role,
+ },
+ ],
+ },
+ },
};
client.projects.locations.registries.setIamPolicy(request, (err, data) => {
@@ -834,10 +836,10 @@ function setIamPolicy (
console.log(`ETAG: ${data.etag}`);
console.log(JSON.stringify(data));
data.bindings = data.bindings || [];
- data.bindings.forEach((_binding) => {
+ data.bindings.forEach(_binding => {
console.log(`Role: ${_binding.role}`);
_binding.members || (_binding.members = []);
- _binding.members.forEach((_member) => {
+ _binding.members.forEach(_member => {
console.log(`\t${_member}`);
});
});
@@ -853,29 +855,30 @@ require(`yargs`) // eslint-disable-line
alias: 'c',
default: 'us-central1',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
serviceAccount: {
alias: 's',
default: process.env.GOOGLE_APPLICATION_CREDENTIALS,
description: 'The path to your service credentials JSON.',
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.command(
`createRsa256Device `,
`Creates an RSA256 device.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
createRsaDevice(
client,
opts.deviceId,
@@ -892,8 +895,8 @@ require(`yargs`) // eslint-disable-line
`createEs256Device `,
`Creates an ES256 device.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
createEsDevice(
client,
opts.deviceId,
@@ -910,8 +913,8 @@ require(`yargs`) // eslint-disable-line
`createUnauthDevice `,
`Creates a device without authorization.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
createUnauthDevice(
client,
opts.deviceId,
@@ -927,8 +930,8 @@ require(`yargs`) // eslint-disable-line
`createRegistry `,
`Creates a device registry.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
lookupOrCreateRegistry(
client,
opts.registryId,
@@ -944,20 +947,20 @@ require(`yargs`) // eslint-disable-line
`createIotTopic `,
`Creates and configures a PubSub topic for Cloud IoT Core.`,
{},
- (opts) => createIotTopic(opts.pubsubTopic)
+ opts => createIotTopic(opts.pubsubTopic)
)
.command(
`setupIotTopic `,
`Configures the PubSub topic for Cloud IoT Core.`,
{},
- (opts) => setupIotTopic(opts.pubsubTopic)
+ opts => setupIotTopic(opts.pubsubTopic)
)
.command(
`deleteDevice `,
`Deletes a device from the device registry.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
deleteDevice(
client,
opts.deviceId,
@@ -973,8 +976,8 @@ require(`yargs`) // eslint-disable-line
`clearRegistry `,
`!!Be careful! Removes all devices and then deletes a device registry!!`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
clearRegistry(
client,
opts.registryId,
@@ -989,8 +992,8 @@ require(`yargs`) // eslint-disable-line
`deleteRegistry `,
`Deletes a device registry.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
deleteRegistry(
client,
opts.registryId,
@@ -1005,8 +1008,8 @@ require(`yargs`) // eslint-disable-line
`getDevice `,
`Retrieves device info given a device ID.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
getDevice(
client,
opts.deviceId,
@@ -1022,8 +1025,8 @@ require(`yargs`) // eslint-disable-line
`getDeviceConfigs `,
`Retrieves device configurations given a device ID.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
getDeviceConfigs(
client,
opts.deviceId,
@@ -1039,8 +1042,8 @@ require(`yargs`) // eslint-disable-line
`getDeviceState `,
`Retrieves device state given a device ID.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
getDeviceState(
client,
opts.deviceId,
@@ -1052,23 +1055,18 @@ require(`yargs`) // eslint-disable-line
getClient(opts.serviceAccount, cb);
}
)
- .command(
- `getRegistry `,
- `Retrieves a registry.`,
- {},
- (opts) => {
- const cb = function (client) {
- getRegistry(client, opts.registryId, opts.projectId, opts.cloudRegion);
- };
- getClient(opts.serviceAccount, cb);
- }
- )
+ .command(`getRegistry `, `Retrieves a registry.`, {}, opts => {
+ const cb = function(client) {
+ getRegistry(client, opts.registryId, opts.projectId, opts.cloudRegion);
+ };
+ getClient(opts.serviceAccount, cb);
+ })
.command(
`listDevices `,
`Lists the devices in a given registry.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
listDevices(client, opts.registryId, opts.projectId, opts.cloudRegion);
};
getClient(opts.serviceAccount, cb);
@@ -1078,8 +1076,8 @@ require(`yargs`) // eslint-disable-line
`listRegistries`,
`Lists the registries in a given project.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
listRegistries(client, opts.projectId, opts.cloudRegion);
};
getClient(opts.serviceAccount, cb);
@@ -1089,8 +1087,8 @@ require(`yargs`) // eslint-disable-line
`patchEs256 `,
`Patches a device with ES256 authorization credentials.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
patchEs256ForAuth(
client,
opts.deviceId,
@@ -1107,8 +1105,8 @@ require(`yargs`) // eslint-disable-line
`patchRsa256 `,
`Patches a device with RSA256 authentication credentials.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
patchRsa256ForAuth(
client,
opts.deviceId,
@@ -1125,8 +1123,8 @@ require(`yargs`) // eslint-disable-line
`setConfig `,
`Sets a devices configuration to the specified data.`,
{},
- (opts) => {
- const cb = function (client) {
+ opts => {
+ const cb = function(client) {
setDeviceConfig(
client,
opts.deviceId,
@@ -1144,14 +1142,9 @@ require(`yargs`) // eslint-disable-line
`getIamPolicy `,
`Gets the IAM permissions for a given registry`,
{},
- (opts) => {
- const cb = function (client) {
- getIamPolicy(
- client,
- opts.registryId,
- opts.projectId,
- opts.cloudRegion
- );
+ opts => {
+ const cb = function(client) {
+ getIamPolicy(client, opts.registryId, opts.projectId, opts.cloudRegion);
};
getClient(opts.serviceAccount, cb);
}
@@ -1160,9 +1153,10 @@ require(`yargs`) // eslint-disable-line
`setIamPolicy `,
`Gets the IAM permissions for a given registry`,
{},
- (opts) => {
- const cb = function (client) {
- setIamPolicy(client,
+ opts => {
+ const cb = function(client) {
+ setIamPolicy(
+ client,
opts.registryId,
opts.projectId,
opts.cloudRegion,
@@ -1173,9 +1167,15 @@ require(`yargs`) // eslint-disable-line
getClient(opts.serviceAccount, cb);
}
)
- .example(`node $0 createEs256Device my-es-device my-registry ../ec_public.pem`)
- .example(`node $0 createRegistry my-registry my-iot-topic --serviceAccount=$secure/svc.json --projectId=my-project-id`)
- .example(`node $0 createRsa256Device my-rsa-device my-registry ../rsa_cert.pem`)
+ .example(
+ `node $0 createEs256Device my-es-device my-registry ../ec_public.pem`
+ )
+ .example(
+ `node $0 createRegistry my-registry my-iot-topic --serviceAccount=$secure/svc.json --projectId=my-project-id`
+ )
+ .example(
+ `node $0 createRsa256Device my-rsa-device my-registry ../rsa_cert.pem`
+ )
.example(`node $0 createUnauthDevice my-device my-registry`)
.example(`node $0 deleteDevice my-device my-registry`)
.example(`node $0 deleteRegistry my-device my-registry`)
@@ -1183,16 +1183,23 @@ require(`yargs`) // eslint-disable-line
.example(`node $0 getDeviceState my-device my-registry`)
.example(`node $0 getIamPolicy my-registry`)
.example(`node $0 getRegistry my-registry`)
- .example(`node $0 listDevices -s path/svc.json -p your-project-id -c asia-east1 my-registry`)
- .example(`node $0 listRegistries -s path/svc.json -p your-project-id -c europe-west1`)
+ .example(
+ `node $0 listDevices -s path/svc.json -p your-project-id -c asia-east1 my-registry`
+ )
+ .example(
+ `node $0 listRegistries -s path/svc.json -p your-project-id -c europe-west1`
+ )
.example(`node $0 patchRsa256 my-device my-registry ../rsa_cert.pem`)
.example(`node $0 patchEs256 my-device my-registry ../ec_public.pem`)
.example(`node $0 setConfig my-device my-registry "test" 0`)
- .example(`node $0 setIamPolicy my-registry user:example@example.com roles/viewer`)
- .example(`node $0 setupTopic my-iot-topic --serviceAccount=$HOME/creds_iot.json --projectId=my-project-id`)
+ .example(
+ `node $0 setIamPolicy my-registry user:example@example.com roles/viewer`
+ )
+ .example(
+ `node $0 setupTopic my-iot-topic --serviceAccount=$HOME/creds_iot.json --projectId=my-project-id`
+ )
.wrap(120)
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/iot-core/docs`)
.help()
- .strict()
- .argv;
+ .strict().argv;
diff --git a/iot/manager/package.json b/iot/manager/package.json
index 197a154a6d..2749571db8 100644
--- a/iot/manager/package.json
+++ b/iot/manager/package.json
@@ -10,11 +10,9 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=4.3.2"
+ "node": ">=8.0.0"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 3m --verbose system-test/*.test.js"
},
"dependencies": {
@@ -25,7 +23,6 @@
"devDependencies": {
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
- "semistandard": "^12.0.1",
"uuid": "3.3.2"
},
"cloud-repo-tools": {
diff --git a/iot/manager/system-test/manager.test.js b/iot/manager/system-test/manager.test.js
index afdbb6348f..02f16c0494 100644
--- a/iot/manager/system-test/manager.test.js
+++ b/iot/manager/system-test/manager.test.js
@@ -29,184 +29,247 @@ const cwd = path.join(__dirname, `..`);
test.before(tools.checkCredentials);
test.before(async () => {
const pubsub = PubSub();
- return pubsub.createTopic(topicName)
- .then((results) => {
- const topic = results[0];
- console.log(`Topic ${topic.name} created.`);
- return topic;
- });
+ return pubsub.createTopic(topicName).then(results => {
+ const topic = results[0];
+ console.log(`Topic ${topic.name} created.`);
+ return topic;
+ });
});
test.after.always(async () => {
const pubsub = PubSub();
const topic = pubsub.topic(topicName);
- return topic.delete()
- .then(() => {
- console.log(`Topic ${topic.name} deleted.`);
- });
+ return topic.delete().then(() => {
+ console.log(`Topic ${topic.name} deleted.`);
+ });
});
-test(`should create and delete an unauthorized device`, async (t) => {
+test(`should create and delete an unauthorized device`, async t => {
const localDevice = `test-device`;
const localRegName = `${registryName}-unauth`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createUnauthDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} createUnauthDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should list configs for a device`, async (t) => {
+test(`should list configs for a device`, async t => {
const localDevice = `test-device-configs`;
const localRegName = `${registryName}-unauth`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createUnauthDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} createUnauthDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} getDeviceConfigs ${localDevice} ${localRegName}`, cwd);
+ `${cmd} getDeviceConfigs ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Configs`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should create and delete an RSA256 device`, async (t) => {
+test(`should create and delete an RSA256 device`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${cmd} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} getDeviceState ${localDevice} ${localRegName}`, cwd);
+ `${cmd} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`State`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should create and delete an EC256 device`, async (t) => {
+test(`should create and delete an EC256 device`, async t => {
const localDevice = `test-es-device`;
const localRegName = `${registryName}-es256`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createEs256Device ${localDevice} ${localRegName} resources/ec_public.pem`, cwd);
+ `${cmd} createEs256Device ${localDevice} ${localRegName} resources/ec_public.pem`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} getDeviceState ${localDevice} ${localRegName}`, cwd);
+ `${cmd} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`State`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should patch an unauthorized device with RSA256`, async (t) => {
+test(`should patch an unauthorized device with RSA256`, async t => {
const localDevice = `patchme`;
const localRegName = `${registryName}-patchRSA`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createUnauthDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} createUnauthDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} patchRsa256 ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${cmd} patchRsa256 ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
t.regex(output, new RegExp(`Patched device:`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should patch an unauthorized device with RSA256`, async (t) => {
+test(`should patch an unauthorized device with RSA256`, async t => {
const localDevice = `patchme`;
const localRegName = `${registryName}-patchES`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createUnauthDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} createUnauthDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} patchEs256 ${localDevice} ${localRegName} resources/ec_public.pem`, cwd);
+ `${cmd} patchEs256 ${localDevice} ${localRegName} resources/ec_public.pem`,
+ cwd
+ );
t.regex(output, new RegExp(`Patched device:`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should create and list devices`, async (t) => {
+test(`should create and list devices`, async t => {
const localDevice = `test-device`;
const localRegName = `${registryName}-list`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createUnauthDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} createUnauthDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
- output = await tools.runAsync(
- `${cmd} listDevices ${localRegName}`, cwd);
+ output = await tools.runAsync(`${cmd} listDevices ${localRegName}`, cwd);
t.regex(output, /Current devices in registry:/);
t.regex(output, new RegExp(localDevice));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should create and get a device`, async (t) => {
+test(`should create and get a device`, async t => {
const localDevice = `test-device`;
const localRegName = `${registryName}-get`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} createUnauthDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} createUnauthDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Created device`));
output = await tools.runAsync(
- `${cmd} getDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} getDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Found device: ${localDevice}`));
output = await tools.runAsync(
- `${cmd} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${cmd} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully deleted device`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should create and get an iam policy`, async (t) => {
+test(`should create and get an iam policy`, async t => {
const localMember = `group:dpebot@google.com`;
const localRole = `roles/viewer`;
const localRegName = `${registryName}-get`;
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
output = await tools.runAsync(
- `${cmd} setIamPolicy ${localRegName} ${localMember} ${localRole}`, cwd);
+ `${cmd} setIamPolicy ${localRegName} ${localMember} ${localRole}`,
+ cwd
+ );
t.regex(output, new RegExp(`ETAG`));
output = await tools.runAsync(`${cmd} getIamPolicy ${localRegName}`, cwd);
t.regex(output, new RegExp(`dpebot`));
output = await tools.runAsync(`${cmd} deleteRegistry ${localRegName}`, cwd);
});
-test(`should create and delete a registry`, async (t) => {
+test(`should create and delete a registry`, async t => {
let output = await tools.runAsync(`${cmd} setupIotTopic ${topicName}`, cwd);
output = await tools.runAsync(
- `${cmd} createRegistry ${registryName} ${topicName}`, cwd);
+ `${cmd} createRegistry ${registryName} ${topicName}`,
+ cwd
+ );
t.regex(output, new RegExp(`Successfully created registry`));
output = await tools.runAsync(`${cmd} deleteRegistry ${registryName}`, cwd);
t.regex(output, new RegExp(`Successfully deleted registry`));
diff --git a/iot/mqtt_example/cloudiot_mqtt_example_nodejs.js b/iot/mqtt_example/cloudiot_mqtt_example_nodejs.js
index 7a8143c36f..9160015bed 100644
--- a/iot/mqtt_example/cloudiot_mqtt_example_nodejs.js
+++ b/iot/mqtt_example/cloudiot_mqtt_example_nodejs.js
@@ -41,102 +41,104 @@ var argv = require(`yargs`)
.options({
projectId: {
default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT,
- description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
+ description:
+ 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
cloudRegion: {
default: 'us-central1',
description: 'GCP cloud region.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
registryId: {
description: 'Cloud IoT registry ID.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
deviceId: {
description: 'Cloud IoT device ID.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
privateKeyFile: {
description: 'Path to private key file.',
requiresArg: true,
demandOption: true,
- type: 'string'
+ type: 'string',
},
algorithm: {
description: 'Encryption algorithm to generate the JWT.',
requiresArg: true,
demandOption: true,
choices: ['RS256', 'ES256'],
- type: 'string'
+ type: 'string',
},
numMessages: {
default: 100,
description: 'Number of messages to publish.',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
tokenExpMins: {
default: 20,
description: 'Minutes to JWT token expiration.',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
mqttBridgeHostname: {
default: 'mqtt.googleapis.com',
description: 'MQTT bridge hostname.',
requiresArg: true,
- type: 'string'
+ type: 'string',
},
mqttBridgePort: {
default: 8883,
description: 'MQTT bridge port.',
requiresArg: true,
- type: 'number'
+ type: 'number',
},
messageType: {
default: 'events',
description: 'Message type to publish.',
requiresArg: true,
choices: ['events', 'state'],
- type: 'string'
- }
+ type: 'string',
+ },
})
- .example(`node $0 cloudiot_mqtt_example_nodejs.js --projectId=blue-jet-123 \\\n\t--registryId=my-registry --deviceId=my-node-device \\\n\t--privateKeyFile=../rsa_private.pem --algorithm=RS256 \\\n\t --cloudRegion=us-central1`)
+ .example(
+ `node $0 cloudiot_mqtt_example_nodejs.js --projectId=blue-jet-123 \\\n\t--registryId=my-registry --deviceId=my-node-device \\\n\t--privateKeyFile=../rsa_private.pem --algorithm=RS256 \\\n\t --cloudRegion=us-central1`
+ )
.wrap(120)
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/iot-core/docs`)
.help()
- .strict()
- .argv;
+ .strict().argv;
// Create a Cloud IoT Core JWT for the given project id, signed with the given
// private key.
// [START iot_mqtt_jwt]
-function createJwt (projectId, privateKeyFile, algorithm) {
+function createJwt(projectId, privateKeyFile, algorithm) {
// Create a JWT to authenticate this device. The device will be disconnected
// after the token expires, and will have to reconnect with a new token. The
// audience field should always be set to the GCP project id.
const token = {
- 'iat': parseInt(Date.now() / 1000),
- 'exp': parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
- 'aud': projectId
+ iat: parseInt(Date.now() / 1000),
+ exp: parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
+ aud: projectId,
};
const privateKey = fs.readFileSync(privateKeyFile);
- return jwt.sign(token, privateKey, { algorithm: algorithm });
+ return jwt.sign(token, privateKey, {algorithm: algorithm});
}
// [END iot_mqtt_jwt]
// Publish numMessages messages asynchronously, starting from message
// messagesSent.
// [START iot_mqtt_publish]
-function publishAsync (messagesSent, numMessages) {
+function publishAsync(messagesSent, numMessages) {
// If we have published enough messages or backed off too many times, stop.
if (messagesSent > numMessages || backoffTime >= MAXIMUM_BACKOFF_TIME) {
if (backoffTime >= MAXIMUM_BACKOFF_TIME) {
@@ -157,13 +159,15 @@ function publishAsync (messagesSent, numMessages) {
console.log(`Backing off for ${publishDelayMs}ms before publishing.`);
}
- setTimeout(function () {
- const payload = `${argv.registryId}/${argv.deviceId}-payload-${messagesSent}`;
+ setTimeout(function() {
+ const payload = `${argv.registryId}/${
+ argv.deviceId
+ }-payload-${messagesSent}`;
// Publish "payload" to the MQTT topic. qos=1 means at least once delivery.
// Cloud IoT Core also supports qos=0 for at most once delivery.
console.log('Publishing message:', payload);
- client.publish(mqttTopic, payload, { qos: 1 }, function (err) {
+ client.publish(mqttTopic, payload, {qos: 1}, function(err) {
if (!err) {
shouldBackoff = false;
backoffTime = MINIMUM_BACKOFF_TIME;
@@ -171,7 +175,7 @@ function publishAsync (messagesSent, numMessages) {
});
var schedulePublishDelayMs = argv.messageType === 'events' ? 1000 : 2000;
- setTimeout(function () {
+ setTimeout(function() {
// [START iot_mqtt_jwt_refresh]
let secsFromIssue = parseInt(Date.now() / 1000) - iatTime;
if (secsFromIssue > argv.tokenExpMins * 60) {
@@ -179,13 +183,17 @@ function publishAsync (messagesSent, numMessages) {
console.log(`\tRefreshing token after ${secsFromIssue} seconds.`);
client.end();
- connectionArgs.password = createJwt(argv.projectId, argv.privateKeyFile, argv.algorithm);
+ connectionArgs.password = createJwt(
+ argv.projectId,
+ argv.privateKeyFile,
+ argv.algorithm
+ );
connectionArgs.protocolId = 'MQTT';
connectionArgs.protocolVersion = 4;
connectionArgs.clean = true;
client = mqtt.connect(connectionArgs);
- client.on('connect', (success) => {
+ client.on('connect', success => {
console.log('connect');
if (!success) {
console.log('Client not connected...');
@@ -199,12 +207,15 @@ function publishAsync (messagesSent, numMessages) {
shouldBackoff = true;
});
- client.on('error', (err) => {
+ client.on('error', err => {
console.log('error', err);
});
- client.on('message', (topic, message, packet) => {
- console.log('message received: ', Buffer.from(message, 'base64').toString('ascii'));
+ client.on('message', (topic, message) => {
+ console.log(
+ 'message received: ',
+ Buffer.from(message, 'base64').toString('ascii')
+ );
});
client.on('packetsend', () => {
@@ -221,7 +232,9 @@ function publishAsync (messagesSent, numMessages) {
// [START iot_mqtt_run]
// The mqttClientId is a unique string that identifies this device. For Google
// Cloud IoT Core, it must be in the format below.
-const mqttClientId = `projects/${argv.projectId}/locations/${argv.cloudRegion}/registries/${argv.registryId}/devices/${argv.deviceId}`;
+const mqttClientId = `projects/${argv.projectId}/locations/${
+ argv.cloudRegion
+}/registries/${argv.registryId}/devices/${argv.deviceId}`;
// With Google Cloud IoT Core, the username field is ignored, however it must be
// non-empty. The password field is used to transmit a JWT to authorize the
@@ -234,7 +247,7 @@ let connectionArgs = {
username: 'unused',
password: createJwt(argv.projectId, argv.privateKeyFile, argv.algorithm),
protocol: 'mqtts',
- secureProtocol: 'TLSv1_2_method'
+ secureProtocol: 'TLSv1_2_method',
};
// Create a client, and connect to the Google MQTT bridge.
@@ -250,7 +263,7 @@ client.subscribe(`/devices/${argv.deviceId}/config`, {qos: 1});
// not the same as the device registry's Cloud Pub/Sub topic.
const mqttTopic = `/devices/${argv.deviceId}/${argv.messageType}`;
-client.on('connect', (success) => {
+client.on('connect', success => {
console.log('connect');
if (!success) {
console.log('Client not connected...');
@@ -264,12 +277,15 @@ client.on('close', () => {
shouldBackoff = true;
});
-client.on('error', (err) => {
+client.on('error', err => {
console.log('error', err);
});
-client.on('message', (topic, message, packet) => {
- console.log('message received: ', Buffer.from(message, 'base64').toString('ascii'));
+client.on('message', (topic, message) => {
+ console.log(
+ 'message received: ',
+ Buffer.from(message, 'base64').toString('ascii')
+ );
});
client.on('packetsend', () => {
diff --git a/iot/mqtt_example/package.json b/iot/mqtt_example/package.json
index 68d00762e0..ad035bb3da 100644
--- a/iot/mqtt_example/package.json
+++ b/iot/mqtt_example/package.json
@@ -6,8 +6,6 @@
"main": "cloudiot_mqtt_example_nodejs.js",
"name": "nodejs-docs-samples-iot-mqtt-example",
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 3m --verbose system-test/*.test.js"
},
"dependencies": {
@@ -21,7 +19,6 @@
"@google-cloud/pubsub": "0.16.4",
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
- "semistandard": "^12.0.1",
"uuid": "3.3.2"
}
}
diff --git a/iot/mqtt_example/system-test/cloudiot_mqtt_example.test.js b/iot/mqtt_example/system-test/cloudiot_mqtt_example.test.js
index 48c3bd1414..61d37cf512 100644
--- a/iot/mqtt_example/system-test/cloudiot_mqtt_example.test.js
+++ b/iot/mqtt_example/system-test/cloudiot_mqtt_example.test.js
@@ -33,88 +33,116 @@ test.todo(tools.run(installDeps, `${cwd}/../manager`));
test.before(tools.checkCredentials);
test.before(async () => {
const pubsub = PubSub();
- return pubsub.createTopic(topicName)
- .then((results) => {
- const topic = results[0];
- console.log(`Topic ${topic.name} created.`);
- return topic;
- });
+ return pubsub.createTopic(topicName).then(results => {
+ const topic = results[0];
+ console.log(`Topic ${topic.name} created.`);
+ return topic;
+ });
});
test.after.always(async () => {
const pubsub = PubSub();
const topic = pubsub.topic(topicName);
- return topic.delete()
- .then(() => {
- console.log(`Topic ${topic.name} deleted.`);
- });
+ return topic.delete().then(() => {
+ console.log(`Topic ${topic.name} deleted.`);
+ });
});
-test(`should receive configuration message`, async (t) => {
+test(`should receive configuration message`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
- let output = await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwd);
+ let output = await tools.runAsync(
+ `${helper} setupIotTopic ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
output = await tools.runAsync(
`${cmd} --messageType=events --registryId="${localRegName}" --deviceId="${localDevice}" ${cmdSuffix}`,
- cwd);
+ cwd
+ );
// TODO: Figure out how to guarantee configuration update happens on connect
t.regex(output, new RegExp(`connect`));
// Check / cleanup
await tools.runAsync(
- `${helper} getDeviceState ${localDevice} ${localRegName}`, cwd);
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwd);
});
-test(`should send event message`, async (t) => {
+test(`should send event message`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwd);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
const output = await tools.runAsync(
`${cmd} --messageType=events --registryId="${localRegName}" --deviceId="${localDevice}" ${cmdSuffix}`,
- cwd);
+ cwd
+ );
t.regex(output, new RegExp(`Publishing message:`));
// Check / cleanup
await tools.runAsync(
- `${helper} getDeviceState ${localDevice} ${localRegName}`, cwd);
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwd);
});
-test(`should send state message`, async (t) => {
+test(`should send state message`, async t => {
const localDevice = `test-rsa-device`;
const localRegName = `${registryName}-rsa256`;
await tools.runAsync(`${helper} setupIotTopic ${topicName}`, cwd);
await tools.runAsync(
- `${helper} createRegistry ${localRegName} ${topicName}`, cwd);
+ `${helper} createRegistry ${localRegName} ${topicName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`, cwd);
+ `${helper} createRsa256Device ${localDevice} ${localRegName} resources/rsa_cert.pem`,
+ cwd
+ );
const output = await tools.runAsync(
`${cmd} --messageType=state --registryId="${localRegName}" --deviceId="${localDevice}" ${cmdSuffix}`,
- cwd);
+ cwd
+ );
t.regex(output, new RegExp(`Publishing message:`));
// Check / cleanup
await tools.runAsync(
- `${helper} getDeviceState ${localDevice} ${localRegName}`, cwd);
+ `${helper} getDeviceState ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(
- `${helper} deleteDevice ${localDevice} ${localRegName}`, cwd);
+ `${helper} deleteDevice ${localDevice} ${localRegName}`,
+ cwd
+ );
await tools.runAsync(`${helper} deleteRegistry ${localRegName}`, cwd);
});
diff --git a/iot/scripts/iam.js b/iot/scripts/iam.js
index aac1bee4ce..13cca1a170 100644
--- a/iot/scripts/iam.js
+++ b/iot/scripts/iam.js
@@ -1,5 +1,3 @@
-#!/usr/bin/env node
-
/**
* Copyright 2017, Google, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,7 +20,7 @@
* For more information, see https://cloud.google.com/iot.
*/
-function setTopicPolicy (topicName) {
+function setTopicPolicy(topicName) {
// Imports the Google Cloud client library
const PubSub = require('@google-cloud/pubsub');
@@ -35,8 +33,9 @@ function setTopicPolicy (topicName) {
// The new IAM policy
const serviceAccount = 'serviceAccount:cloud-iot@system.gserviceaccount.com';
- topic.iam.getPolicy()
- .then((results) => {
+ topic.iam
+ .getPolicy()
+ .then(results => {
const policy = results[0] || {};
policy.bindings || (policy.bindings = []);
console.log(JSON.stringify(policy, null, 2));
@@ -44,10 +43,10 @@ function setTopicPolicy (topicName) {
let hasRole = false;
let binding = {
role: 'roles/pubsub.publisher',
- members: [serviceAccount]
+ members: [serviceAccount],
};
- policy.bindings.forEach((_binding) => {
+ policy.bindings.forEach(_binding => {
if (_binding.role === binding.role) {
binding = _binding;
hasRole = true;
@@ -67,12 +66,12 @@ function setTopicPolicy (topicName) {
// Updates the IAM policy for the topic
return topic.iam.setPolicy(policy);
})
- .then((results) => {
+ .then(results => {
const updatedPolicy = results[0];
console.log(JSON.stringify(updatedPolicy, null, 2));
})
- .catch((err) => {
+ .catch(err => {
console.error('ERROR:', err);
});
}
diff --git a/jobs/cjd_sample/package.json b/jobs/cjd_sample/package.json
index a35a1894b9..72718c11eb 100644
--- a/jobs/cjd_sample/package.json
+++ b/jobs/cjd_sample/package.json
@@ -9,11 +9,9 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=4.3.2"
+ "node": ">=8.0.0"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 20s --verbose system-test/*.test.js"
},
"dependencies": {
@@ -21,7 +19,6 @@
},
"devDependencies": {
"@google-cloud/nodejs-repo-tools": "^3.0.0",
- "ava": "0.25.0",
- "semistandard": "^13.0.1"
+ "ava": "0.25.0"
}
}
diff --git a/jobs/cjd_sample/quickstart.js b/jobs/cjd_sample/quickstart.js
index acd28577ac..a5be7d494a 100755
--- a/jobs/cjd_sample/quickstart.js
+++ b/jobs/cjd_sample/quickstart.js
@@ -18,32 +18,34 @@
// [START quickstart]
// Imports the Google APIs client library
-const { google } = require('googleapis');
-
-google.auth.getClient({ scopes: ['https://www.googleapis.com/auth/jobs'] }).then((auth) => {
- // Instantiates an authorized client
- const jobs = google.jobs({
- version: 'v2',
- auth
- });
-
- // Lists companies
- jobs.companies.list((err, result) => {
- if (err) {
- console.error(err);
- return;
- }
-
- console.log(`Request ID: ${result.data.metadata.requestId}`);
-
- const companies = result.data.companies || [];
-
- if (companies.length) {
- console.log('Companies:');
- companies.forEach((company) => console.log(company.name));
- } else {
- console.log(`No companies found.`);
- }
+const {google} = require('googleapis');
+
+google.auth
+ .getClient({scopes: ['https://www.googleapis.com/auth/jobs']})
+ .then(auth => {
+ // Instantiates an authorized client
+ const jobs = google.jobs({
+ version: 'v2',
+ auth,
+ });
+
+ // Lists companies
+ jobs.companies.list((err, result) => {
+ if (err) {
+ console.error(err);
+ return;
+ }
+
+ console.log(`Request ID: ${result.data.metadata.requestId}`);
+
+ const companies = result.data.companies || [];
+
+ if (companies.length) {
+ console.log('Companies:');
+ companies.forEach(company => console.log(company.name));
+ } else {
+ console.log(`No companies found.`);
+ }
+ });
});
-});
// [END quickstart]
diff --git a/kms/keys.js b/kms/keys.js
index 9effd9edd8..e1590cea46 100644
--- a/kms/keys.js
+++ b/kms/keys.js
@@ -17,7 +17,7 @@
const Buffer = require('safe-buffer').Buffer;
-function createKeyRing (projectId, locationId, keyRingId) {
+function createKeyRing(projectId, locationId, keyRingId) {
// [START kms_create_keyring]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -39,7 +39,7 @@ function createKeyRing (projectId, locationId, keyRingId) {
// This will be a path parameter in the request URL
parent: `projects/${projectId}/locations/${locationId}`,
// This will be a path parameter in the request URL
- keyRingId: keyRingId
+ keyRingId: keyRingId,
};
// Creates a new key ring
@@ -55,7 +55,7 @@ function createKeyRing (projectId, locationId, keyRingId) {
// [END kms_create_keyring]
}
-function listKeyRings (projectId, locationId) {
+function listKeyRings(projectId, locationId) {
// [START kms_list_keyrings]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -72,7 +72,7 @@ function listKeyRings (projectId, locationId) {
const request = {
// This will be a path parameter in the request URL
- parent: `projects/${projectId}/locations/${locationId}`
+ parent: `projects/${projectId}/locations/${locationId}`,
};
// Lists key rings
@@ -85,7 +85,7 @@ function listKeyRings (projectId, locationId) {
const keyRings = result.data.keyRings || [];
if (keyRings.length) {
- keyRings.forEach((keyRing) => {
+ keyRings.forEach(keyRing => {
console.log(`${keyRing.name}:`);
console.log(` Created: ${new Date(keyRing.createTime)}`);
});
@@ -97,7 +97,7 @@ function listKeyRings (projectId, locationId) {
// [END kms_list_keyrings]
}
-function getKeyRing (projectId, locationId, keyRingId) {
+function getKeyRing(projectId, locationId, keyRingId) {
// [START kms_get_keyring]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -117,7 +117,7 @@ function getKeyRing (projectId, locationId, keyRingId) {
const request = {
// This will be a path parameter in the request URL
- name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`
+ name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
};
// Gets a key ring
@@ -135,7 +135,7 @@ function getKeyRing (projectId, locationId, keyRingId) {
// [END kms_get_keyring]
}
-function getKeyRingIamPolicy (projectId, locationId, keyRingId) {
+function getKeyRingIamPolicy(projectId, locationId, keyRingId) {
// [START kms_get_keyring_policy]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -155,34 +155,43 @@ function getKeyRingIamPolicy (projectId, locationId, keyRingId) {
const request = {
// This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
};
// Gets the IAM policy of a key ring
- cloudkms.projects.locations.keyRings.getIamPolicy(request, (err, policy) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.getIamPolicy(
+ request,
+ (err, policy) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- if (policy.data.bindings) {
- policy.data.bindings.forEach((binding) => {
- if (binding.members && binding.members.length) {
- console.log(`${binding.role}:`);
- binding.members.forEach((member) => {
- console.log(` ${member}`);
- });
- }
- });
- } else {
- console.log(`Policy for key ring ${keyRingId} is empty.`);
+ if (policy.data.bindings) {
+ policy.data.bindings.forEach(binding => {
+ if (binding.members && binding.members.length) {
+ console.log(`${binding.role}:`);
+ binding.members.forEach(member => {
+ console.log(` ${member}`);
+ });
+ }
+ });
+ } else {
+ console.log(`Policy for key ring ${keyRingId} is empty.`);
+ }
}
- });
+ );
});
// [END kms_get_keyring_policy]
}
-function addMemberToKeyRingPolicy (projectId, locationId, keyRingId, member, role) {
+function addMemberToKeyRingPolicy(
+ projectId,
+ locationId,
+ keyRingId,
+ member,
+ role
+) {
// [START kms_add_member_to_keyring_policy]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -208,68 +217,87 @@ function addMemberToKeyRingPolicy (projectId, locationId, keyRingId, member, rol
let request = {
// This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
};
// Gets the IAM policy of a key ring
- cloudkms.projects.locations.keyRings.getIamPolicy(request, (err, policy) => {
- if (err) {
- console.log(err);
- return;
- }
-
- policy = Object.assign({ bindings: [] }, policy.data);
+ cloudkms.projects.locations.keyRings.getIamPolicy(
+ request,
+ (err, policy) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const index = policy.bindings.findIndex((binding) => binding.role === role);
+ policy = Object.assign({bindings: []}, policy.data);
- // Add the role/member combo to the policy
- const binding = Object.assign({
- role: role,
- members: []
- }, policy.bindings[index]);
- if (index === -1) {
- policy.bindings.push(binding);
- }
- if (!binding.members.includes(member)) {
- binding.members.push(member);
- }
+ const index = policy.bindings.findIndex(
+ binding => binding.role === role
+ );
- request = {
- // This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
- // This will be the request body
- resource: {
- policy: policy
+ // Add the role/member combo to the policy
+ const binding = Object.assign(
+ {
+ role: role,
+ members: [],
+ },
+ policy.bindings[index]
+ );
+ if (index === -1) {
+ policy.bindings.push(binding);
}
- };
-
- // Adds the member/role combo to the policy of the key ring
- cloudkms.projects.locations.keyRings.setIamPolicy(request, (err, policy) => {
- if (err) {
- console.log(err);
- return;
+ if (!binding.members.includes(member)) {
+ binding.members.push(member);
}
- console.log(`${member}/${role} combo added to policy for key ring ${keyRingId}.`);
- if (policy.data.bindings) {
- policy.data.bindings.forEach((binding) => {
- if (binding.members && binding.members.length) {
- console.log(`${binding.role}:`);
- binding.members.forEach((member) => {
- console.log(` ${member}`);
+ request = {
+ // This will be a path parameter in the request URL
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
+ // This will be the request body
+ resource: {
+ policy: policy,
+ },
+ };
+
+ // Adds the member/role combo to the policy of the key ring
+ cloudkms.projects.locations.keyRings.setIamPolicy(
+ request,
+ (err, policy) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `${member}/${role} combo added to policy for key ring ${keyRingId}.`
+ );
+ if (policy.data.bindings) {
+ policy.data.bindings.forEach(binding => {
+ if (binding.members && binding.members.length) {
+ console.log(`${binding.role}:`);
+ binding.members.forEach(member => {
+ console.log(` ${member}`);
+ });
+ }
});
+ } else {
+ console.log(`Policy for key ring ${keyRingId} is empty.`);
}
- });
- } else {
- console.log(`Policy for key ring ${keyRingId} is empty.`);
- }
- });
- });
+ }
+ );
+ }
+ );
});
// [END kms_add_member_to_keyring_policy]
}
-function removeMemberFromKeyRingPolicy (projectId, locationId, keyRingId, member, role) {
+function removeMemberFromKeyRingPolicy(
+ projectId,
+ locationId,
+ keyRingId,
+ member,
+ role
+) {
// [START kms_remove_member_from_keyring_policy]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -295,71 +323,82 @@ function removeMemberFromKeyRingPolicy (projectId, locationId, keyRingId, member
let request = {
// This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
};
// Gets the IAM policy of a key ring
- cloudkms.projects.locations.keyRings.getIamPolicy(request, (err, policy) => {
- if (err) {
- console.log(err);
- return;
- }
-
- policy = Object.assign({ bindings: [] }, policy.data);
-
- let index = policy.bindings.findIndex((binding) => binding.role === role);
+ cloudkms.projects.locations.keyRings.getIamPolicy(
+ request,
+ (err, policy) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const binding = Object.assign({
- role: role,
- members: []
- }, policy.bindings[index]);
- if (index === -1) {
- return;
- }
- if (!binding.members.includes(member)) {
- return;
- }
+ policy = Object.assign({bindings: []}, policy.data);
- // Remove the role/member combo from the policy
- binding.members.splice(binding.members.indexOf(member), 1);
+ let index = policy.bindings.findIndex(binding => binding.role === role);
- request = {
- // This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
- // This will be the request body
- resource: {
- policy: policy
+ const binding = Object.assign(
+ {
+ role: role,
+ members: [],
+ },
+ policy.bindings[index]
+ );
+ if (index === -1) {
+ return;
}
- };
-
- // Removes the role/member combo from the policy of the key ring
- cloudkms.projects.locations.keyRings.setIamPolicy(request, (err, response) => {
- if (err) {
- console.log(err);
+ if (!binding.members.includes(member)) {
return;
}
- console.log(`${member}/${role} combo removed from policy for key ring ${keyRingId}.`);
- const policy = response.data;
- if (policy.bindings) {
- policy.bindings.forEach((binding) => {
- if (binding.members && binding.members.length) {
- console.log(`${binding.role}:`);
- binding.members.forEach((member) => {
- console.log(` ${member}`);
+ // Remove the role/member combo from the policy
+ binding.members.splice(binding.members.indexOf(member), 1);
+
+ request = {
+ // This will be a path parameter in the request URL
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
+ // This will be the request body
+ resource: {
+ policy: policy,
+ },
+ };
+
+ // Removes the role/member combo from the policy of the key ring
+ cloudkms.projects.locations.keyRings.setIamPolicy(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `${member}/${role} combo removed from policy for key ring ${keyRingId}.`
+ );
+ const policy = response.data;
+ if (policy.bindings) {
+ policy.bindings.forEach(binding => {
+ if (binding.members && binding.members.length) {
+ console.log(`${binding.role}:`);
+ binding.members.forEach(member => {
+ console.log(` ${member}`);
+ });
+ }
});
+ } else {
+ console.log(`Policy for key ring ${keyRingId} is empty.`);
}
- });
- } else {
- console.log(`Policy for key ring ${keyRingId} is empty.`);
- }
- });
- });
+ }
+ );
+ }
+ );
});
// [END kms_remove_member_from_keyring_policy]
}
-function createCryptoKey (projectId, locationId, keyRingId, cryptoKeyId) {
+function createCryptoKey(projectId, locationId, keyRingId, cryptoKeyId) {
// [START kms_create_cryptokey]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -388,25 +427,28 @@ function createCryptoKey (projectId, locationId, keyRingId, cryptoKeyId) {
resource: {
// This will allow the API access to the key for encryption and decryption
- purpose: 'ENCRYPT_DECRYPT'
- }
+ purpose: 'ENCRYPT_DECRYPT',
+ },
};
// Creates a new key ring
- cloudkms.projects.locations.keyRings.cryptoKeys.create(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.create(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const cryptoKey = response.data;
- console.log(`Key ${cryptoKey.name} created.`);
- });
+ const cryptoKey = response.data;
+ console.log(`Key ${cryptoKey.name} created.`);
+ }
+ );
});
// [END kms_create_cryptokey]
}
-function listCryptoKeys (projectId, locationId, keyRingId) {
+function listCryptoKeys(projectId, locationId, keyRingId) {
// [START kms_list_cryptokeys]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -426,36 +468,48 @@ function listCryptoKeys (projectId, locationId, keyRingId) {
const request = {
// This will be a path parameter in the request URL
- parent: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`
+ parent: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}`,
};
// Creates a new key ring
- cloudkms.projects.locations.keyRings.cryptoKeys.list(request, (err, result) => {
- if (err) {
- console.log(err);
- return;
- }
-
- const cryptoKeys = result.data.cryptoKeys || [];
+ cloudkms.projects.locations.keyRings.cryptoKeys.list(
+ request,
+ (err, result) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- if (cryptoKeys.length) {
- cryptoKeys.forEach((cryptoKey) => {
- console.log(`${cryptoKey.name}:`);
- console.log(` Created: ${new Date(cryptoKey.createTime)}`);
- console.log(` Purpose: ${cryptoKey.purpose}`);
- console.log(` Primary: ${cryptoKey.primary.name}`);
- console.log(` State: ${cryptoKey.primary.state}`);
- console.log(` Created: ${new Date(cryptoKey.primary.createTime)}`);
- });
- } else {
- console.log('No crypto keys found.');
+ const cryptoKeys = result.data.cryptoKeys || [];
+
+ if (cryptoKeys.length) {
+ cryptoKeys.forEach(cryptoKey => {
+ console.log(`${cryptoKey.name}:`);
+ console.log(` Created: ${new Date(cryptoKey.createTime)}`);
+ console.log(` Purpose: ${cryptoKey.purpose}`);
+ console.log(` Primary: ${cryptoKey.primary.name}`);
+ console.log(` State: ${cryptoKey.primary.state}`);
+ console.log(
+ ` Created: ${new Date(cryptoKey.primary.createTime)}`
+ );
+ });
+ } else {
+ console.log('No crypto keys found.');
+ }
}
- });
+ );
});
// [END kms_list_cryptokeys]
}
-function encrypt (projectId, locationId, keyRingId, cryptoKeyId, plaintextFileName, ciphertextFileName) {
+function encrypt(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ plaintextFileName,
+ ciphertextFileName
+) {
// [START kms_encrypt]
const fs = require('fs');
@@ -496,35 +550,51 @@ function encrypt (projectId, locationId, keyRingId, cryptoKeyId, plaintextFileNa
name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
// This will be the request body
resource: {
- plaintext: contentsBuffer.toString('base64')
- }
+ plaintext: contentsBuffer.toString('base64'),
+ },
};
// Encrypts the file using the specified crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.encrypt(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
-
- // Writes the encrypted file to disk
- const result = response.data;
- fs.writeFile(ciphertextFileName, Buffer.from(result.ciphertext, 'base64'), (err) => {
+ cloudkms.projects.locations.keyRings.cryptoKeys.encrypt(
+ request,
+ (err, response) => {
if (err) {
console.log(err);
return;
}
- console.log(`Encrypted ${plaintextFileName} using ${result.name}.`);
- console.log(`Result saved to ${ciphertextFileName}.`);
- });
- });
+ // Writes the encrypted file to disk
+ const result = response.data;
+ fs.writeFile(
+ ciphertextFileName,
+ Buffer.from(result.ciphertext, 'base64'),
+ err => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `Encrypted ${plaintextFileName} using ${result.name}.`
+ );
+ console.log(`Result saved to ${ciphertextFileName}.`);
+ }
+ );
+ }
+ );
});
});
// [END kms_encrypt]
}
-function decrypt (projectId, locationId, keyRingId, cryptoKeyId, ciphertextFileName, plaintextFileName) {
+function decrypt(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ ciphertextFileName,
+ plaintextFileName
+) {
// [START kms_decrypt]
const fs = require('fs');
@@ -565,34 +635,43 @@ function decrypt (projectId, locationId, keyRingId, cryptoKeyId, ciphertextFileN
name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
// This will be the request body
resource: {
- ciphertext: contentsBuffer.toString('base64')
- }
+ ciphertext: contentsBuffer.toString('base64'),
+ },
};
// Dencrypts the file using the specified crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.decrypt(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
-
- // Writes the dencrypted file to disk
- const result = response.data;
- fs.writeFile(plaintextFileName, Buffer.from(result.plaintext, 'base64'), (err) => {
+ cloudkms.projects.locations.keyRings.cryptoKeys.decrypt(
+ request,
+ (err, response) => {
if (err) {
console.log(err);
return;
}
- console.log(`Decrypted ${ciphertextFileName}, result saved to ${plaintextFileName}.`);
- });
- });
+ // Writes the dencrypted file to disk
+ const result = response.data;
+ fs.writeFile(
+ plaintextFileName,
+ Buffer.from(result.plaintext, 'base64'),
+ err => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `Decrypted ${ciphertextFileName}, result saved to ${plaintextFileName}.`
+ );
+ }
+ );
+ }
+ );
});
});
// [END kms_decrypt]
}
-function getCryptoKey (projectId, locationId, keyRingId, cryptoKeyId) {
+function getCryptoKey(projectId, locationId, keyRingId, cryptoKeyId) {
// [START kms_get_cryptokey]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -615,29 +694,38 @@ function getCryptoKey (projectId, locationId, keyRingId, cryptoKeyId) {
const request = {
// This will be a path parameter in the request URL
- name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`
+ name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
};
// Gets a crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.get(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.get(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const cryptoKey = response.data;
- console.log(`Name: ${cryptoKey.name}:`);
- console.log(`Created: ${new Date(cryptoKey.createTime)}`);
- console.log(`Purpose: ${cryptoKey.purpose}`);
- console.log(`Primary: ${cryptoKey.primary.name}`);
- console.log(` State: ${cryptoKey.primary.state}`);
- console.log(` Created: ${new Date(cryptoKey.primary.createTime)}`);
- });
+ const cryptoKey = response.data;
+ console.log(`Name: ${cryptoKey.name}:`);
+ console.log(`Created: ${new Date(cryptoKey.createTime)}`);
+ console.log(`Purpose: ${cryptoKey.purpose}`);
+ console.log(`Primary: ${cryptoKey.primary.name}`);
+ console.log(` State: ${cryptoKey.primary.state}`);
+ console.log(` Created: ${new Date(cryptoKey.primary.createTime)}`);
+ }
+ );
});
// [END kms_get_cryptokey]
}
-function setPrimaryCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId, version) {
+function setPrimaryCryptoKeyVersion(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ version
+) {
// [START kms_set_cryptokey_primary_version]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -666,31 +754,36 @@ function setPrimaryCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKey
name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
// This will be the request body
resource: {
- cryptoKeyVersionId: `${version}`
- }
+ cryptoKeyVersionId: `${version}`,
+ },
};
// Sets a crypto key's primary version
- cloudkms.projects.locations.keyRings.cryptoKeys.updatePrimaryVersion(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.updatePrimaryVersion(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const cryptoKey = response.data;
- console.log(`Set ${version} as primary version for crypto key ${cryptoKeyId}.\n`);
- console.log(`Name: ${cryptoKey.name}:`);
- console.log(`Created: ${new Date(cryptoKey.createTime)}`);
- console.log(`Purpose: ${cryptoKey.purpose}`);
- console.log(`Primary: ${cryptoKey.primary.name}`);
- console.log(` State: ${cryptoKey.primary.state}`);
- console.log(` Created: ${new Date(cryptoKey.primary.createTime)}`);
- });
+ const cryptoKey = response.data;
+ console.log(
+ `Set ${version} as primary version for crypto key ${cryptoKeyId}.\n`
+ );
+ console.log(`Name: ${cryptoKey.name}:`);
+ console.log(`Created: ${new Date(cryptoKey.createTime)}`);
+ console.log(`Purpose: ${cryptoKey.purpose}`);
+ console.log(`Primary: ${cryptoKey.primary.name}`);
+ console.log(` State: ${cryptoKey.primary.state}`);
+ console.log(` Created: ${new Date(cryptoKey.primary.createTime)}`);
+ }
+ );
});
// [END kms_set_cryptokey_primary_version]
}
-function getCryptoKeyIamPolicy (projectId, locationId, keyRingId, cryptoKeyId) {
+function getCryptoKeyIamPolicy(projectId, locationId, keyRingId, cryptoKeyId) {
// [START kms_get_cryptokey_policy]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -713,36 +806,46 @@ function getCryptoKeyIamPolicy (projectId, locationId, keyRingId, cryptoKeyId) {
const request = {
// This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
};
// Gets the IAM policy of a crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.getIamPolicy(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.getIamPolicy(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- console.log(response);
- const policy = response.data;
- if (policy.bindings) {
- policy.bindings.forEach((binding) => {
- if (binding.members && binding.members.length) {
- console.log(`${binding.role}:`);
- binding.members.forEach((member) => {
- console.log(` ${member}`);
- });
- }
- });
- } else {
- console.log(`Policy for crypto key ${cryptoKeyId} is empty.`);
+ console.log(response);
+ const policy = response.data;
+ if (policy.bindings) {
+ policy.bindings.forEach(binding => {
+ if (binding.members && binding.members.length) {
+ console.log(`${binding.role}:`);
+ binding.members.forEach(member => {
+ console.log(` ${member}`);
+ });
+ }
+ });
+ } else {
+ console.log(`Policy for crypto key ${cryptoKeyId} is empty.`);
+ }
}
- });
+ );
});
// [END kms_get_cryptokey_policy]
}
-function addMemberToCryptoKeyPolicy (projectId, locationId, keyRingId, cryptoKeyId, member, role) {
+function addMemberToCryptoKeyPolicy(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ member,
+ role
+) {
// [START kms_add_member_to_cryptokey_policy]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -771,69 +874,89 @@ function addMemberToCryptoKeyPolicy (projectId, locationId, keyRingId, cryptoKey
let request = {
// This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
};
// Gets the IAM policy of a crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.getIamPolicy(request, (err, getResponse) => {
- if (err) {
- console.log(err);
- return;
- }
-
- let policy = Object.assign({ bindings: [] }, getResponse.data);
+ cloudkms.projects.locations.keyRings.cryptoKeys.getIamPolicy(
+ request,
+ (err, getResponse) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const index = policy.bindings.findIndex((binding) => binding.role === role);
+ let policy = Object.assign({bindings: []}, getResponse.data);
- // Add the role/member combo to the policy
- const binding = Object.assign({
- role: role,
- members: []
- }, policy.bindings[index]);
- if (index === -1) {
- policy.bindings.push(binding);
- }
- if (!binding.members.includes(member)) {
- binding.members.push(member);
- }
+ const index = policy.bindings.findIndex(
+ binding => binding.role === role
+ );
- request = {
- // This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
- // This will be the request body
- resource: {
- policy: policy
+ // Add the role/member combo to the policy
+ const binding = Object.assign(
+ {
+ role: role,
+ members: [],
+ },
+ policy.bindings[index]
+ );
+ if (index === -1) {
+ policy.bindings.push(binding);
}
- };
-
- // Adds the member/role combo to the policy of the crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.setIamPolicy(request, (err, setResponse) => {
- if (err) {
- console.log(err);
- return;
+ if (!binding.members.includes(member)) {
+ binding.members.push(member);
}
- policy = setResponse.data;
- console.log(`${member}/${role} combo added to policy for crypto key ${cryptoKeyId}.`);
- if (policy.bindings) {
- policy.bindings.forEach((binding) => {
- if (binding.members && binding.members.length) {
- console.log(`${binding.role}:`);
- binding.members.forEach((member) => {
- console.log(` ${member}`);
+ request = {
+ // This will be a path parameter in the request URL
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
+ // This will be the request body
+ resource: {
+ policy: policy,
+ },
+ };
+
+ // Adds the member/role combo to the policy of the crypto key
+ cloudkms.projects.locations.keyRings.cryptoKeys.setIamPolicy(
+ request,
+ (err, setResponse) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ policy = setResponse.data;
+ console.log(
+ `${member}/${role} combo added to policy for crypto key ${cryptoKeyId}.`
+ );
+ if (policy.bindings) {
+ policy.bindings.forEach(binding => {
+ if (binding.members && binding.members.length) {
+ console.log(`${binding.role}:`);
+ binding.members.forEach(member => {
+ console.log(` ${member}`);
+ });
+ }
});
+ } else {
+ console.log(`Policy for crypto key ${cryptoKeyId} is empty.`);
}
- });
- } else {
- console.log(`Policy for crypto key ${cryptoKeyId} is empty.`);
- }
- });
- });
+ }
+ );
+ }
+ );
});
// [END kms_add_member_to_cryptokey_policy]
}
-function removeMemberFromCryptoKeyPolicy (projectId, locationId, keyRingId, cryptoKeyId, member, role) {
+function removeMemberFromCryptoKeyPolicy(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ member,
+ role
+) {
// [START kms_remove_member_from_cryptokey_policy]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -862,73 +985,84 @@ function removeMemberFromCryptoKeyPolicy (projectId, locationId, keyRingId, cryp
let request = {
// This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
};
// Gets the IAM policy of a crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.getIamPolicy(request, (err, getResponse) => {
- if (err) {
- console.log(err);
- return;
- }
-
- let policy = Object.assign({ bindings: [] }, getResponse.data);
-
- let index = policy.bindings.findIndex((binding) => binding.role === role);
+ cloudkms.projects.locations.keyRings.cryptoKeys.getIamPolicy(
+ request,
+ (err, getResponse) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const binding = Object.assign({
- role: role,
- members: []
- }, policy.bindings[index]);
- if (index === -1) {
- return;
- }
- if (!binding.members.includes(member)) {
- return;
- }
+ let policy = Object.assign({bindings: []}, getResponse.data);
- // Remove the role/member combo from the policy
- binding.members.splice(binding.members.indexOf(member), 1);
+ let index = policy.bindings.findIndex(binding => binding.role === role);
- request = {
- // This will be a path parameter in the request URL
- resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
- // This will be the request body
- resource: {
- policy: policy
+ const binding = Object.assign(
+ {
+ role: role,
+ members: [],
+ },
+ policy.bindings[index]
+ );
+ if (index === -1) {
+ return;
}
- };
-
- console.log(JSON.stringify(request, null, 2));
-
- // Removes the member/role combo from the policy of the crypto key
- cloudkms.projects.locations.keyRings.cryptoKeys.setIamPolicy(request, (err, setResponse) => {
- if (err) {
- console.log(err);
+ if (!binding.members.includes(member)) {
return;
}
- console.log(`${member}/${role} combo removed from policy for crypto key ${cryptoKeyId}.`);
- const policy = setResponse.data;
- if (policy.bindings) {
- policy.bindings.forEach((binding) => {
- if (binding.members && binding.members.length) {
- console.log(`${binding.role}:`);
- binding.members.forEach((member) => {
- console.log(` ${member}`);
+ // Remove the role/member combo from the policy
+ binding.members.splice(binding.members.indexOf(member), 1);
+
+ request = {
+ // This will be a path parameter in the request URL
+ resource_: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
+ // This will be the request body
+ resource: {
+ policy: policy,
+ },
+ };
+
+ console.log(JSON.stringify(request, null, 2));
+
+ // Removes the member/role combo from the policy of the crypto key
+ cloudkms.projects.locations.keyRings.cryptoKeys.setIamPolicy(
+ request,
+ (err, setResponse) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `${member}/${role} combo removed from policy for crypto key ${cryptoKeyId}.`
+ );
+ const policy = setResponse.data;
+ if (policy.bindings) {
+ policy.bindings.forEach(binding => {
+ if (binding.members && binding.members.length) {
+ console.log(`${binding.role}:`);
+ binding.members.forEach(member => {
+ console.log(` ${member}`);
+ });
+ }
});
+ } else {
+ console.log(`Policy for crypto key ${cryptoKeyId} is empty.`);
}
- });
- } else {
- console.log(`Policy for crypto key ${cryptoKeyId} is empty.`);
- }
- });
- });
+ }
+ );
+ }
+ );
});
// [END kms_remove_member_from_cryptokey_policy]
}
-function createCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId) {
+function createCryptoKeyVersion(projectId, locationId, keyRingId, cryptoKeyId) {
// [START kms_create_cryptokey_version]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -951,23 +1085,26 @@ function createCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId)
const request = {
// This will be a path parameter in the request URL
- parent: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`
+ parent: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
};
// Creates a new crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.create(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.create(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- console.log(`Crypto key version ${response.data.name} created.`);
- });
+ console.log(`Crypto key version ${response.data.name} created.`);
+ }
+ );
});
// [END kms_create_cryptokey_version]
}
-function listCryptoKeyVersions (projectId, locationId, keyRingId, cryptoKeyId) {
+function listCryptoKeyVersions(projectId, locationId, keyRingId, cryptoKeyId) {
// [START kms_list_cryptokey_versions]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -990,33 +1127,42 @@ function listCryptoKeyVersions (projectId, locationId, keyRingId, cryptoKeyId) {
const request = {
// This will be a path parameter in the request URL
- parent: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`
+ parent: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}`,
};
// Creates a new key ring
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.list(request, (err, result) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.list(
+ request,
+ (err, result) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- const cryptoKeyVersions = result.data.cryptoKeyVersions || [];
+ const cryptoKeyVersions = result.data.cryptoKeyVersions || [];
- if (cryptoKeyVersions.length) {
- cryptoKeyVersions.forEach((version) => {
- console.log(`${version.name}:`);
- console.log(` Created: ${new Date(version.createTime)}`);
- console.log(` State: ${version.state}`);
- });
- } else {
- console.log('No crypto key versions found.');
+ if (cryptoKeyVersions.length) {
+ cryptoKeyVersions.forEach(version => {
+ console.log(`${version.name}:`);
+ console.log(` Created: ${new Date(version.createTime)}`);
+ console.log(` State: ${version.state}`);
+ });
+ } else {
+ console.log('No crypto key versions found.');
+ }
}
- });
+ );
});
// [END kms_list_cryptokey_versions]
}
-function destroyCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId, version) {
+function destroyCryptoKeyVersion(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ version
+) {
// [START kms_destroy_cryptokey_version]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -1042,23 +1188,32 @@ function destroyCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId,
const request = {
// This will be a path parameter in the request URL
- name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`
+ name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`,
};
// Destroys a crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.destroy(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.destroy(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- console.log(`Crypto key version ${response.data.name} destroyed.`);
- });
+ console.log(`Crypto key version ${response.data.name} destroyed.`);
+ }
+ );
});
// [END kms_destroy_cryptokey_version]
}
-function restoreCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId, version) {
+function restoreCryptoKeyVersion(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ version
+) {
// [START kms_restore_cryptokey_version]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -1084,23 +1239,32 @@ function restoreCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId,
const request = {
// This will be a path parameter in the request URL
- name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`
+ name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`,
};
// Restores a crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.restore(request, (err, response) => {
- if (err) {
- console.log(err);
- return;
- }
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.restore(
+ request,
+ (err, response) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
- console.log(`Crypto key version ${response.data.name} restored.`);
- });
+ console.log(`Crypto key version ${response.data.name} restored.`);
+ }
+ );
});
// [END kms_restore_cryptokey_version]
}
-function enableCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId, version) {
+function enableCryptoKeyVersion(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ version
+) {
// [START kms_enable_cryptokey_version]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -1126,43 +1290,57 @@ function enableCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId,
let request = {
// This will be a path parameter in the request URL
- name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`
+ name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`,
};
// Gets a crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.get(request, (err, getResponse) => {
- if (err) {
- console.log(err);
- return;
- }
-
- const cryptoKeyVersion = getResponse.data;
- cryptoKeyVersion.state = 'ENABLED';
-
- request = {
- // This will be a path parameter in the request URL
- name: request.name,
- // This will be a query parameter in the request URL
- updateMask: 'state',
- // This will be the request body
- resource: cryptoKeyVersion
- };
-
- // Enables a crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.patch(request, (err, patchResponse) => {
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.get(
+ request,
+ (err, getResponse) => {
if (err) {
console.log(err);
return;
}
- console.log(`Crypto key version ${patchResponse.data.name} enabled.`);
- });
- });
+ const cryptoKeyVersion = getResponse.data;
+ cryptoKeyVersion.state = 'ENABLED';
+
+ request = {
+ // This will be a path parameter in the request URL
+ name: request.name,
+ // This will be a query parameter in the request URL
+ updateMask: 'state',
+ // This will be the request body
+ resource: cryptoKeyVersion,
+ };
+
+ // Enables a crypto key version
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.patch(
+ request,
+ (err, patchResponse) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `Crypto key version ${patchResponse.data.name} enabled.`
+ );
+ }
+ );
+ }
+ );
});
// [END kms_enable_cryptokey_version]
}
-function disableCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId, version) {
+function disableCryptoKeyVersion(
+ projectId,
+ locationId,
+ keyRingId,
+ cryptoKeyId,
+ version
+) {
// [START kms_disable_cryptokey_version]
// Your Google Cloud Platform project ID
// const projectId = 'YOUR_PROJECT_ID';
@@ -1188,243 +1366,344 @@ function disableCryptoKeyVersion (projectId, locationId, keyRingId, cryptoKeyId,
let request = {
// This will be a path parameter in the request URL
- name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`
+ name: `projects/${projectId}/locations/${locationId}/keyRings/${keyRingId}/cryptoKeys/${cryptoKeyId}/cryptoKeyVersions/${version}`,
};
// Gets a crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.get(request, (err, getResponse) => {
- if (err) {
- console.log(err);
- return;
- }
-
- const cryptoKeyVersion = getResponse.data;
- cryptoKeyVersion.state = 'DISABLED';
-
- request = {
- // This will be a path parameter in the request URL
- name: request.name,
- // This will be a query parameter in the request URL
- updateMask: 'state',
- // This will be the request body
- resource: cryptoKeyVersion
- };
-
- // Disables a crypto key version
- cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.patch(request, (err, patchResponse) => {
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.get(
+ request,
+ (err, getResponse) => {
if (err) {
console.log(err);
return;
}
- console.log(`Crypto key version ${patchResponse.data.name} disabled.`);
- });
- });
+ const cryptoKeyVersion = getResponse.data;
+ cryptoKeyVersion.state = 'DISABLED';
+
+ request = {
+ // This will be a path parameter in the request URL
+ name: request.name,
+ // This will be a query parameter in the request URL
+ updateMask: 'state',
+ // This will be the request body
+ resource: cryptoKeyVersion,
+ };
+
+ // Disables a crypto key version
+ cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.patch(
+ request,
+ (err, patchResponse) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+
+ console.log(
+ `Crypto key version ${patchResponse.data.name} disabled.`
+ );
+ }
+ );
+ }
+ );
});
// [END kms_disable_cryptokey_version]
}
/* eslint-disable indent */
- // [START kms_create_keyring]
- // [START kms_list_keyrings]
- // [START kms_get_keyring]
- // [START kms_get_keyring_policy]
- // [START kms_add_member_to_keyring_policy]
- // [START kms_remove_member_from_keyring_policy]
- // [START kms_create_cryptokey]
- // [START kms_list_cryptokeys]
- // [START kms_encrypt]
- // [START kms_decrypt]
- // [START kms_get_cryptokey]
- // [START kms_set_cryptokey_primary_version]
- // [START kms_get_cryptokey_policy]
- // [START kms_add_member_to_cryptokey_policy]
- // [START kms_remove_member_from_cryptokey_policy]
- // [START kms_list_cryptokey_versions]
- // [START kms_create_cryptokey_version]
- // [START kms_destroy_cryptokey_version]
- // [START kms_restore_cryptokey_version]
- // [START kms_enable_cryptokey_version]
- // [START kms_disable_cryptokey_version]
-
- function buildAndAuthorizeService (callback) {
- // Imports the Google APIs client library
- const { google } = require('googleapis');
+// [START kms_create_keyring]
+// [START kms_list_keyrings]
+// [START kms_get_keyring]
+// [START kms_get_keyring_policy]
+// [START kms_add_member_to_keyring_policy]
+// [START kms_remove_member_from_keyring_policy]
+// [START kms_create_cryptokey]
+// [START kms_list_cryptokeys]
+// [START kms_encrypt]
+// [START kms_decrypt]
+// [START kms_get_cryptokey]
+// [START kms_set_cryptokey_primary_version]
+// [START kms_get_cryptokey_policy]
+// [START kms_add_member_to_cryptokey_policy]
+// [START kms_remove_member_from_cryptokey_policy]
+// [START kms_list_cryptokey_versions]
+// [START kms_create_cryptokey_version]
+// [START kms_destroy_cryptokey_version]
+// [START kms_restore_cryptokey_version]
+// [START kms_enable_cryptokey_version]
+// [START kms_disable_cryptokey_version]
+
+function buildAndAuthorizeService(callback) {
+ // Imports the Google APIs client library
+ const {google} = require('googleapis');
+
+ // Acquires credentials
+ google.auth.getApplicationDefault((err, authClient) => {
+ if (err) {
+ callback(err);
+ return;
+ }
- // Acquires credentials
- google.auth.getClient({ scopes: ['https://www.googleapis.com/auth/cloud-platform'] }).then(auth => {
- // Instantiates an authorized client
- const cloudkms = google.cloudkms({
- version: 'v1',
- auth
- });
+ if (authClient.createScopedRequired && authClient.createScopedRequired()) {
+ authClient = authClient.createScoped([
+ 'https://www.googleapis.com/auth/cloud-platform',
+ ]);
+ }
- callback(null, cloudkms);
+ // Instantiates an authorized client
+ const cloudkms = google.cloudkms({
+ version: 'v1',
+ auth: authClient,
});
- }
- // [END kms_create_keyring]
- // [END kms_list_keyrings]
- // [END kms_get_keyring]
- // [END kms_get_keyring_policy]
- // [END kms_add_member_to_keyring_policy]
- // [END kms_remove_member_from_keyring_policy]
- // [END kms_create_cryptokey]
- // [END kms_list_cryptokeys]
- // [END kms_encrypt]
- // [END kms_decrypt]
- // [END kms_get_cryptokey]
- // [END kms_set_cryptokey_primary_version]
- // [END kms_get_cryptokey_policy]
- // [END kms_add_member_to_cryptokey_policy]
- // [END kms_remove_member_from_cryptokey_policy]
- // [END kms_list_cryptokey_versions]
- // [END kms_create_cryptokey_version]
- // [END kms_destroy_cryptokey_version]
- // [END kms_restore_cryptokey_version]
- // [END kms_enable_cryptokey_version]
- // [END kms_disable_cryptokey_version]
+
+ callback(null, cloudkms);
+ });
+}
+// [END kms_create_keyring]
+// [END kms_list_keyrings]
+// [END kms_get_keyring]
+// [END kms_get_keyring_policy]
+// [END kms_add_member_to_keyring_policy]
+// [END kms_remove_member_from_keyring_policy]
+// [END kms_create_cryptokey]
+// [END kms_list_cryptokeys]
+// [END kms_encrypt]
+// [END kms_decrypt]
+// [END kms_get_cryptokey]
+// [END kms_set_cryptokey_primary_version]
+// [END kms_get_cryptokey_policy]
+// [END kms_add_member_to_cryptokey_policy]
+// [END kms_remove_member_from_cryptokey_policy]
+// [END kms_list_cryptokey_versions]
+// [END kms_create_cryptokey_version]
+// [END kms_destroy_cryptokey_version]
+// [END kms_restore_cryptokey_version]
+// [END kms_enable_cryptokey_version]
+// [END kms_disable_cryptokey_version]
/* eslint-disable indent */
const cli = require(`yargs`)
.demand(1)
- .command(
- `create `,
- `Creates a crypto key.`,
- {},
- (opts) => createCryptoKey(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
+ .command(`create `, `Creates a crypto key.`, {}, opts =>
+ createCryptoKey(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
)
.command(
`decrypt `,
`Decrypts a file.`,
{},
- (opts) => decrypt(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.ciphertextFileName, opts.plaintextFileName)
+ opts =>
+ decrypt(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.ciphertextFileName,
+ opts.plaintextFileName
+ )
)
.command(
`encrypt `,
`Encrypts a file.`,
{},
- (opts) => encrypt(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.plaintextFileName, opts.ciphertextFileName)
+ opts =>
+ encrypt(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.plaintextFileName,
+ opts.ciphertextFileName
+ )
)
- .command(
- `get `,
- `Gets a crypto key.`,
- {},
- (opts) => getCryptoKey(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
+ .command(`get `, `Gets a crypto key.`, {}, opts =>
+ getCryptoKey(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
)
.command(
`get-policy `,
`Gets a crypto key's IAM policy.`,
{},
- (opts) => getCryptoKeyIamPolicy(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
+ opts =>
+ getCryptoKeyIamPolicy(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey
+ )
)
.command(
`grant-access `,
`Adds a members to a crypto key's IAM policy.`,
{},
- (opts) => addMemberToCryptoKeyPolicy(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.member, opts.role)
+ opts =>
+ addMemberToCryptoKeyPolicy(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.member,
+ opts.role
+ )
)
.command(
`keyrings `,
`Access key rings subcommands.`,
- (yargs) => {
+ yargs => {
yargs
- .command(
- `create `,
- `Creates a key ring.`,
- {},
- (opts) => createKeyRing(opts.projectId, opts.location, opts.keyRing)
+ .command(`create `, `Creates a key ring.`, {}, opts =>
+ createKeyRing(opts.projectId, opts.location, opts.keyRing)
)
- .command(
- `list`,
- `Lists key rings.`,
- {},
- (opts) => listKeyRings(opts.projectId, opts.location)
+ .command(`list`, `Lists key rings.`, {}, opts =>
+ listKeyRings(opts.projectId, opts.location)
)
- .command(
- `get `,
- `Gets a key ring.`,
- {},
- (opts) => getKeyRing(opts.projectId, opts.location, opts.keyRing)
+ .command(`get `, `Gets a key ring.`, {}, opts =>
+ getKeyRing(opts.projectId, opts.location, opts.keyRing)
)
.command(
`get-policy `,
`Gets a key ring's IAM policy.`,
{},
- (opts) => getKeyRingIamPolicy(opts.projectId, opts.location, opts.keyRing)
+ opts =>
+ getKeyRingIamPolicy(opts.projectId, opts.location, opts.keyRing)
)
.command(
`grant-access `,
`Adds a members to a key ring's IAM policy.`,
{},
- (opts) => addMemberToKeyRingPolicy(opts.projectId, opts.location, opts.keyRing, opts.member, opts.role)
+ opts =>
+ addMemberToKeyRingPolicy(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.member,
+ opts.role
+ )
)
.command(
`revoke-access `,
`Removes a member from a key ring's IAM policy.`,
{},
- (opts) => removeMemberFromKeyRingPolicy(opts.projectId, opts.location, opts.keyRing, opts.member, opts.role)
+ opts =>
+ removeMemberFromKeyRingPolicy(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.member,
+ opts.role
+ )
);
},
() => {}
)
- .command(
- `list `,
- `Lists crypto keys.`,
- {},
- (opts) => listCryptoKeys(opts.projectId, opts.location, opts.keyRing)
+ .command(`list `, `Lists crypto keys.`, {}, opts =>
+ listCryptoKeys(opts.projectId, opts.location, opts.keyRing)
)
.command(
`revoke-access `,
`Removes a member from a crypto key's IAM policy.`,
{},
- (opts) => removeMemberFromCryptoKeyPolicy(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.member, opts.role)
+ opts =>
+ removeMemberFromCryptoKeyPolicy(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.member,
+ opts.role
+ )
)
.command(
`set-primary `,
`Sets a crypto key's primary version.`,
{},
- (opts) => setPrimaryCryptoKeyVersion(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.keyVersion)
+ opts =>
+ setPrimaryCryptoKeyVersion(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.keyVersion
+ )
)
.command(
`versions `,
`Access crypto key versions subcommands.`,
- (yargs) => {
+ yargs => {
yargs
.command(
`create `,
`Creates a crypto key version.`,
{},
- (opts) => createCryptoKeyVersion(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
+ opts =>
+ createCryptoKeyVersion(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey
+ )
)
.command(
`destroy `,
`Destroys a crypto key version.`,
{},
- (opts) => destroyCryptoKeyVersion(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.keyVersion)
+ opts =>
+ destroyCryptoKeyVersion(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.keyVersion
+ )
)
.command(
`disable `,
`Disables a crypto key version.`,
{},
- (opts) => disableCryptoKeyVersion(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.keyVersion)
+ opts =>
+ disableCryptoKeyVersion(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.keyVersion
+ )
)
.command(
`enable `,
`Enables a crypto key version.`,
{},
- (opts) => enableCryptoKeyVersion(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.keyVersion)
+ opts =>
+ enableCryptoKeyVersion(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.keyVersion
+ )
)
.command(
`list `,
`Lists crypto key versions.`,
{},
- (opts) => listCryptoKeyVersions(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey)
+ opts =>
+ listCryptoKeyVersions(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey
+ )
)
.command(
`restore `,
`Restores a crypto key version.`,
{},
- (opts) => restoreCryptoKeyVersion(opts.projectId, opts.location, opts.keyRing, opts.cryptoKey, opts.keyVersion)
+ opts =>
+ restoreCryptoKeyVersion(
+ opts.projectId,
+ opts.location,
+ opts.keyRing,
+ opts.cryptoKey,
+ opts.keyVersion
+ )
);
},
() => {}
@@ -1435,29 +1714,41 @@ const cli = require(`yargs`)
default: 'global',
global: true,
requiresArg: true,
- type: 'string'
+ type: 'string',
},
projectId: {
alias: 'p',
default: process.env.GCLOUD_PROJECT,
global: true,
requiresArg: true,
- type: 'string'
- }
+ type: 'string',
+ },
})
.example(`node $0 keyrings create "my-key-ring"`)
.example(`node $0 keyrings list`)
.example(`node $0 keyrings get-policy "my-key-ring"`)
- .example(`node $0 keyrings grant-access "my-key-ring" "user:developer@company.com" "roles/viewer"`)
- .example(`node $0 keyrings revoke-access "my-key-ring" "user:developer@company.com" "roles/viewer"`)
+ .example(
+ `node $0 keyrings grant-access "my-key-ring" "user:developer@company.com" "roles/viewer"`
+ )
+ .example(
+ `node $0 keyrings revoke-access "my-key-ring" "user:developer@company.com" "roles/viewer"`
+ )
.example(`node $0 create "my-key-ring" "my-key"`)
.example(`node $0 list`)
- .example(`node $0 encrypt "my-key-ring" "my-key" ./resources/plaintext.txt ./resources/plaintext.txt.encrypted`)
- .example(`node $0 decrypt "my-key-ring" "my-key" ./resources/plaintext.txt.encrypted ./resources/plaintext.txt.decrypted`)
+ .example(
+ `node $0 encrypt "my-key-ring" "my-key" ./resources/plaintext.txt ./resources/plaintext.txt.encrypted`
+ )
+ .example(
+ `node $0 decrypt "my-key-ring" "my-key" ./resources/plaintext.txt.encrypted ./resources/plaintext.txt.decrypted`
+ )
.example(`node $0 set-primary "my-key-ring" "my-key" 123`)
.example(`node $0 get-policy "my-key-ring" "my-key"`)
- .example(`node $0 grant-access "my-key-ring" "my-key" "user:developer@company.com" "roles/viewer"`)
- .example(`node $0 revoke-access "my-key-ring" "my-key" "user:developer@company.com" "roles/viewer"`)
+ .example(
+ `node $0 grant-access "my-key-ring" "my-key" "user:developer@company.com" "roles/viewer"`
+ )
+ .example(
+ `node $0 revoke-access "my-key-ring" "my-key" "user:developer@company.com" "roles/viewer"`
+ )
.example(`node $0 versions create "my-key-ring" "my-key"`)
.example(`node $0 versions list "my-key-ring" "my-key"`)
.example(`node $0 versions destroy "my-key-ring" "my-key" 123`)
diff --git a/kms/package.json b/kms/package.json
index b48d1278e9..64b141be08 100644
--- a/kms/package.json
+++ b/kms/package.json
@@ -9,11 +9,9 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=6"
+ "node": ">=8"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 20s --verbose system-test/*.test.js"
},
"dependencies": {
@@ -24,7 +22,6 @@
"devDependencies": {
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
- "semistandard": "^13.0.1",
"uuid": "^3.2.1"
},
"cloud-repo-tools": {
diff --git a/kms/quickstart.js b/kms/quickstart.js
index aa3b78ad6b..2ffa8be958 100644
--- a/kms/quickstart.js
+++ b/kms/quickstart.js
@@ -17,7 +17,7 @@
// [START kms_quickstart]
// Imports the Google APIs client library
-const { google } = require('googleapis');
+const {google} = require('googleapis');
// Your Google Cloud Platform project ID
const projectId = process.env.GCLOUD_PROJECT;
@@ -26,31 +26,33 @@ const projectId = process.env.GCLOUD_PROJECT;
const location = 'global';
// Acquires credentials
-google.auth.getClient({ scopes: ['https://www.googleapis.com/auth/cloud-platform'] }).then(auth => {
- // Instantiates an authorized client
- const cloudkms = google.cloudkms({
- version: 'v1',
- auth
+google.auth
+ .getClient({scopes: ['https://www.googleapis.com/auth/cloud-platform']})
+ .then(auth => {
+ // Instantiates an authorized client
+ const cloudkms = google.cloudkms({
+ version: 'v1',
+ auth,
+ });
+ const request = {
+ parent: `projects/${projectId}/locations/${location}`,
+ };
+
+ // Lists key rings
+ cloudkms.projects.locations.keyRings.list(request, (err, result) => {
+ if (err) {
+ console.error(err);
+ return;
+ }
+
+ const keyRings = result.data.keyRings || [];
+
+ if (keyRings.length) {
+ console.log('Key rings:');
+ keyRings.forEach(keyRing => console.log(keyRing.name));
+ } else {
+ console.log(`No key rings found.`);
+ }
+ });
});
- const request = {
- parent: `projects/${projectId}/locations/${location}`
- };
-
- // Lists key rings
- cloudkms.projects.locations.keyRings.list(request, (err, result) => {
- if (err) {
- console.error(err);
- return;
- }
-
- const keyRings = result.data.keyRings || [];
-
- if (keyRings.length) {
- console.log('Key rings:');
- keyRings.forEach((keyRing) => console.log(keyRing.name));
- } else {
- console.log(`No key rings found.`);
- }
- });
-});
// [END kms_quickstart]
diff --git a/kms/system-test/keys.test.js b/kms/system-test/keys.test.js
index 116406916e..2ab6eb6472 100644
--- a/kms/system-test/keys.test.js
+++ b/kms/system-test/keys.test.js
@@ -38,7 +38,7 @@ const formattedKeyRingName = `projects/${projectId}/locations/global/keyRings/${
const formattedKeyName = `${formattedKeyRingName}/cryptoKeys/${keyNameOne}`;
test.before(tools.checkCredentials);
-test.before.cb((t) => {
+test.before.cb(t => {
// Delete the ciphertext file, if it exists
fs.unlink(ciphertext, () => {
// Delete the decrypted file, if it exists
@@ -46,7 +46,7 @@ test.before.cb((t) => {
});
});
-test.after.always.cb((t) => {
+test.after.always.cb(t => {
// Delete the ciphertext file, if it exists
fs.unlink(ciphertext, () => {
// Delete the decrypted file, if it exists
@@ -59,8 +59,11 @@ test.afterEach.always(tools.restoreConsole);
// Key ring tests
-test.serial(`should create a key ring`, async (t) => {
- const output = await tools.runAsync(`${cmd} keyrings create "${keyRingName}"`, cwd);
+test.serial(`should create a key ring`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} keyrings create "${keyRingName}"`,
+ cwd
+ );
if (!output.includes(`KeyRing ${formattedKeyRingName} already exists`)) {
t.regex(output, new RegExp(`Key ring ${formattedKeyRingName} created.`));
} else {
@@ -68,47 +71,79 @@ test.serial(`should create a key ring`, async (t) => {
}
});
-test.serial(`should list key rings`, async (t) => {
- await tools.tryTest(async () => {
- const output = await tools.runAsync(`${cmd} keyrings list`, cwd);
- t.regex(output, new RegExp(unspecifiedKeyRingName));
- }).start();
+test.serial(`should list key rings`, async t => {
+ await tools
+ .tryTest(async () => {
+ const output = await tools.runAsync(`${cmd} keyrings list`, cwd);
+ t.regex(output, new RegExp(unspecifiedKeyRingName));
+ })
+ .start();
});
-test.serial(`should get a key ring`, async (t) => {
- const output = await tools.runAsync(`${cmd} keyrings get "${keyRingName}"`, cwd);
+test.serial(`should get a key ring`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} keyrings get "${keyRingName}"`,
+ cwd
+ );
t.regex(output, new RegExp(`Name: ${formattedKeyRingName}`));
t.regex(output, new RegExp(`Created: `));
});
// Key ring IAM tests
-test.serial(`should get a key ring's empty IAM policy`, async (t) => {
- const output = await tools.runAsync(`${cmd} keyrings get-policy "${keyRingName}"`, cwd);
+test.serial(`should get a key ring's empty IAM policy`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} keyrings get-policy "${keyRingName}"`,
+ cwd
+ );
t.regex(output, new RegExp(`Policy for key ring ${keyRingName} is empty.`));
});
-test.serial(`should grant access to a key ring`, async (t) => {
- const output = await tools.runAsync(`${cmd} keyrings grant-access "${keyRingName}" ${member} ${role}`, cwd);
- t.regex(output, new RegExp(`${member}/${role} combo added to policy for key ring ${keyRingName}.`));
+test.serial(`should grant access to a key ring`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} keyrings grant-access "${keyRingName}" ${member} ${role}`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `${member}/${role} combo added to policy for key ring ${keyRingName}.`
+ )
+ );
});
-test.serial(`should get a key ring's updated IAM policy`, async (t) => {
- await tools.tryTest(async () => {
- const output = await tools.runAsync(`${cmd} keyrings get-policy "${keyRingName}"`, cwd);
- t.regex(output, new RegExp(`${role}:`));
- t.regex(output, new RegExp(` ${member}`));
- }).start();
+test.serial(`should get a key ring's updated IAM policy`, async t => {
+ await tools
+ .tryTest(async () => {
+ const output = await tools.runAsync(
+ `${cmd} keyrings get-policy "${keyRingName}"`,
+ cwd
+ );
+ t.regex(output, new RegExp(`${role}:`));
+ t.regex(output, new RegExp(` ${member}`));
+ })
+ .start();
});
-test.serial(`should revoke access to a key ring`, async (t) => {
- const output = await tools.runAsync(`${cmd} keyrings revoke-access "${keyRingName}" ${member} ${role}`, cwd);
- t.regex(output, new RegExp(`${member}/${role} combo removed from policy for key ring ${keyRingName}.`));
+test.serial(`should revoke access to a key ring`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} keyrings revoke-access "${keyRingName}" ${member} ${role}`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `${member}/${role} combo removed from policy for key ring ${keyRingName}.`
+ )
+ );
});
// Crypto key tests
-test.serial(`should create a key`, async (t) => {
- const output = await tools.runAsync(`${cmd} create "${keyRingName}" "${keyNameOne}"`, cwd);
+test.serial(`should create a key`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} create "${keyRingName}" "${keyNameOne}"`,
+ cwd
+ );
if (!output.includes(`CryptoKey ${formattedKeyName} already exists`)) {
t.regex(output, new RegExp(`Key ${formattedKeyName} created.`));
} else {
@@ -116,90 +151,182 @@ test.serial(`should create a key`, async (t) => {
}
});
-test.serial(`should list keys`, async (t) => {
- await tools.tryTest(async () => {
- const output = await tools.runAsync(`${cmd} list "${keyRingName}"`, cwd);
- t.regex(output, new RegExp(formattedKeyName));
- }).start();
+test.serial(`should list keys`, async t => {
+ await tools
+ .tryTest(async () => {
+ const output = await tools.runAsync(`${cmd} list "${keyRingName}"`, cwd);
+ t.regex(output, new RegExp(formattedKeyName));
+ })
+ .start();
});
-test.serial(`should get a key`, async (t) => {
- const output = await tools.runAsync(`${cmd} get "${keyRingName}" "${keyNameOne}"`, cwd);
+test.serial(`should get a key`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} get "${keyRingName}" "${keyNameOne}"`,
+ cwd
+ );
t.regex(output, new RegExp(`Name: ${formattedKeyName}`));
t.regex(output, new RegExp(`Created: `));
});
-test.serial(`should set a crypto key's primary version`, async (t) => {
- const output = await tools.runAsync(`${cmd} set-primary "${keyRingName}" "${keyNameOne}" 1`, cwd);
- t.regex(output, new RegExp(`Set 1 as primary version for crypto key ${keyNameOne}.\n`));
+test.serial(`should set a crypto key's primary version`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} set-primary "${keyRingName}" "${keyNameOne}" 1`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(`Set 1 as primary version for crypto key ${keyNameOne}.\n`)
+ );
});
-test.serial(`should encrypt a file`, async (t) => {
- const output = await tools.runAsync(`${cmd} encrypt "${keyRingName}" "${keyNameOne}" "${plaintext}" "${ciphertext}"`, cwd);
- t.regex(output, new RegExp(`Encrypted ${plaintext} using ${formattedKeyName}/cryptoKeyVersions/1.`));
+test.serial(`should encrypt a file`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} encrypt "${keyRingName}" "${keyNameOne}" "${plaintext}" "${ciphertext}"`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `Encrypted ${plaintext} using ${formattedKeyName}/cryptoKeyVersions/1.`
+ )
+ );
t.regex(output, new RegExp(`Result saved to ${ciphertext}.`));
});
-test.serial(`should decrypt a file`, async (t) => {
- const output = await tools.runAsync(`${cmd} decrypt "${keyRingName}" "${keyNameOne}" "${ciphertext}" "${decrypted}"`, cwd);
- t.regex(output, new RegExp(`Decrypted ${ciphertext}, result saved to ${decrypted}.`));
+test.serial(`should decrypt a file`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} decrypt "${keyRingName}" "${keyNameOne}" "${ciphertext}" "${decrypted}"`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(`Decrypted ${ciphertext}, result saved to ${decrypted}.`)
+ );
t.is(fs.readFileSync(plaintext, 'utf8'), fs.readFileSync(decrypted, 'utf8'));
});
-test.serial(`should create a crypto key version`, async (t) => {
- const output = await tools.runAsync(`${cmd} versions create "${keyRingName}" "${keyNameOne}"`, cwd);
- t.regex(output, new RegExp(`Crypto key version ${formattedKeyName}/cryptoKeyVersions/`));
+test.serial(`should create a crypto key version`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} versions create "${keyRingName}" "${keyNameOne}"`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(`Crypto key version ${formattedKeyName}/cryptoKeyVersions/`)
+ );
t.regex(output, new RegExp(` created.`));
});
-test.serial(`should list crypto key versions`, async (t) => {
- await tools.tryTest(async () => {
- const output = await tools.runAsync(`${cmd} versions list "${keyRingName}" "${keyNameOne}"`, cwd);
- t.regex(output, new RegExp(`${formattedKeyName}/cryptoKeyVersions/1`));
- }).start();
+test.serial(`should list crypto key versions`, async t => {
+ await tools
+ .tryTest(async () => {
+ const output = await tools.runAsync(
+ `${cmd} versions list "${keyRingName}" "${keyNameOne}"`,
+ cwd
+ );
+ t.regex(output, new RegExp(`${formattedKeyName}/cryptoKeyVersions/1`));
+ })
+ .start();
});
-test.serial(`should destroy a crypto key version`, async (t) => {
- const output = await tools.runAsync(`${cmd} versions destroy "${keyRingName}" "${keyNameOne}" 2`, cwd);
- t.regex(output, new RegExp(`Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 destroyed.`));
+test.serial(`should destroy a crypto key version`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} versions destroy "${keyRingName}" "${keyNameOne}" 2`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 destroyed.`
+ )
+ );
});
-test.serial(`should restore a crypto key version`, async (t) => {
- const output = await tools.runAsync(`${cmd} versions restore "${keyRingName}" "${keyNameOne}" 2`, cwd);
- t.regex(output, new RegExp(`Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 restored.`));
+test.serial(`should restore a crypto key version`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} versions restore "${keyRingName}" "${keyNameOne}" 2`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 restored.`
+ )
+ );
});
-test.serial(`should enable a crypto key version`, async (t) => {
- const output = await tools.runAsync(`${cmd} versions enable "${keyRingName}" "${keyNameOne}" 2`, cwd);
- t.regex(output, new RegExp(`Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 enabled.`));
+test.serial(`should enable a crypto key version`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} versions enable "${keyRingName}" "${keyNameOne}" 2`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 enabled.`
+ )
+ );
});
-test.serial(`should disable a crypto key version`, async (t) => {
- const output = await tools.runAsync(`${cmd} versions disable "${keyRingName}" "${keyNameOne}" 2`, cwd);
- t.regex(output, new RegExp(`Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 disabled.`));
+test.serial(`should disable a crypto key version`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} versions disable "${keyRingName}" "${keyNameOne}" 2`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `Crypto key version ${formattedKeyName}/cryptoKeyVersions/2 disabled.`
+ )
+ );
});
// Crypto key IAM tests
-test.serial(`should get a crypto key's empty IAM policy`, async (t) => {
- const output = await tools.runAsync(`${cmd} get-policy "${keyRingName}" "${keyNameOne}"`, cwd);
+test.serial(`should get a crypto key's empty IAM policy`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} get-policy "${keyRingName}" "${keyNameOne}"`,
+ cwd
+ );
t.regex(output, new RegExp(`Policy for crypto key ${keyNameOne} is empty.`));
});
-test.serial(`should grant access to a crypto key`, async (t) => {
- const output = await tools.runAsync(`${cmd} grant-access "${keyRingName}" "${keyNameOne}" ${member} ${role}`, cwd);
- t.regex(output, new RegExp(`${member}/${role} combo added to policy for crypto key ${keyNameOne}.`));
+test.serial(`should grant access to a crypto key`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} grant-access "${keyRingName}" "${keyNameOne}" ${member} ${role}`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `${member}/${role} combo added to policy for crypto key ${keyNameOne}.`
+ )
+ );
});
-test.serial(`should get a crypto key's updated IAM policy`, async (t) => {
- await tools.tryTest(async () => {
- const output = await tools.runAsync(`${cmd} get-policy "${keyRingName}" "${keyNameOne}"`, cwd);
- t.regex(output, new RegExp(`${role}:`));
- t.regex(output, new RegExp(` ${member}`));
- }).start();
+test.serial(`should get a crypto key's updated IAM policy`, async t => {
+ await tools
+ .tryTest(async () => {
+ const output = await tools.runAsync(
+ `${cmd} get-policy "${keyRingName}" "${keyNameOne}"`,
+ cwd
+ );
+ t.regex(output, new RegExp(`${role}:`));
+ t.regex(output, new RegExp(` ${member}`));
+ })
+ .start();
});
-test.serial(`should revoke access to a crypto key`, async (t) => {
- const output = await tools.runAsync(`${cmd} revoke-access "${keyRingName}" "${keyNameOne}" ${member} ${role}`, cwd);
- t.regex(output, new RegExp(`${member}/${role} combo removed from policy for crypto key ${keyNameOne}.`));
+test.serial(`should revoke access to a crypto key`, async t => {
+ const output = await tools.runAsync(
+ `${cmd} revoke-access "${keyRingName}" "${keyNameOne}" ${member} ${role}`,
+ cwd
+ );
+ t.regex(
+ output,
+ new RegExp(
+ `${member}/${role} combo removed from policy for crypto key ${keyNameOne}.`
+ )
+ );
});
diff --git a/language/slackbot/demo_bot.js b/language/slackbot/demo_bot.js
index 6f8c5242e1..16486f0f9b 100755
--- a/language/slackbot/demo_bot.js
+++ b/language/slackbot/demo_bot.js
@@ -48,7 +48,7 @@ const Language = require('@google-cloud/language');
const path = require('path');
const sqlite3 = require('sqlite3').verbose();
-const controller = Botkit.slackbot({ debug: false });
+const controller = Botkit.slackbot({debug: false});
// create our database if it does not already exist.
const db = new sqlite3.cached.Database(path.join(__dirname, './slackDB.db'));
@@ -79,71 +79,69 @@ const TABLE_SQL = `CREATE TABLE if not exists entities (
ts integer
);`;
-function startController () {
+function startController() {
if (!process.env.SLACK_TOKEN_PATH) {
throw new Error('Please set the SLACK_TOKEN_PATH environment variable!');
}
- let token = fs.readFileSync(process.env.SLACK_TOKEN_PATH, { encoding: 'utf8' });
+ let token = fs.readFileSync(process.env.SLACK_TOKEN_PATH, {encoding: 'utf8'});
token = token.replace(/\s/g, '');
// Create the table that will store entity information if it does not already
// exist.
db.run(TABLE_SQL);
- controller
- .spawn({ token: token })
- .startRTM((err) => {
- if (err) {
- console.error('Failed to start controller!');
- console.error(err);
- process.exit(1);
- }
- });
+ controller.spawn({token: token}).startRTM(err => {
+ if (err) {
+ console.error('Failed to start controller!');
+ console.error(err);
+ throw err;
+ }
+ });
- return controller
- // If the bot gets a DM or mention with 'hello' or 'hi', it will reply. You
- // can use this to sanity-check your app without needing to use the NL API.
- .hears(
- ['hello', 'hi'],
- ['direct_message', 'direct_mention', 'mention'],
- handleSimpleReply
- )
- // If the bot gets a DM or mention including "top entities", it will reply with
- // a list of the top N most frequent entities used in this channel, as derived
- // by the NL API.
- .hears(
- ['top entities'],
- ['direct_message', 'direct_mention', 'mention'],
- handleEntitiesReply
- )
- // For any posted message, the bot will send the text to the NL API for
- // analysis.
- .on('ambient', handleAmbientMessage)
- .on('rtm_close', startBot);
+ return (
+ controller
+ // If the bot gets a DM or mention with 'hello' or 'hi', it will reply. You
+ // can use this to sanity-check your app without needing to use the NL API.
+ .hears(
+ ['hello', 'hi'],
+ ['direct_message', 'direct_mention', 'mention'],
+ handleSimpleReply
+ )
+ // If the bot gets a DM or mention including "top entities", it will reply with
+ // a list of the top N most frequent entities used in this channel, as derived
+ // by the NL API.
+ .hears(
+ ['top entities'],
+ ['direct_message', 'direct_mention', 'mention'],
+ handleEntitiesReply
+ )
+ // For any posted message, the bot will send the text to the NL API for
+ // analysis.
+ .on('ambient', handleAmbientMessage)
+ .on('rtm_close', startBot)
+ );
}
-function startBot (bot, cerr) {
+function startBot(bot) {
console.error('RTM closed');
- let token = fs.readFileSync(process.env.SLACK_TOKEN_PATH, { encoding: 'utf8' });
+ let token = fs.readFileSync(process.env.SLACK_TOKEN_PATH, {encoding: 'utf8'});
token = token.replace(/\s/g, '');
- bot
- .spawn({ token: token })
- .startRTM((err) => {
- if (err) {
- console.error('Failed to start controller!');
- console.error(err);
- process.exit(1);
- }
- });
+ bot.spawn({token: token}).startRTM(err => {
+ if (err) {
+ console.error('Failed to start controller!');
+ console.error(err);
+ throw err;
+ }
+ });
}
-function handleSimpleReply (bot, message) {
+function handleSimpleReply(bot, message) {
bot.reply(message, 'Hello.');
}
-function handleEntitiesReply (bot, message) {
+function handleEntitiesReply(bot, message) {
bot.reply(message, 'Top entities: ');
// Query the database for the top N entities in the past week
@@ -160,15 +158,17 @@ function handleEntitiesReply (bot, message) {
// Uncomment this to see the query results logged to console:
// console.log(topEntities);
- topEntities.forEach((entity) => {
- entityInfo += `entity: *${entity.name}*, type: ${entity.type}, count: ${entity.wc}\n`;
+ topEntities.forEach(entity => {
+ entityInfo += `entity: *${entity.name}*, type: ${entity.type}, count: ${
+ entity.wc
+ }\n`;
});
bot.reply(message, entityInfo);
});
}
-function analyzeEntities (text, ts) {
+function analyzeEntities(text, ts) {
// Instantiates a client
const language = Language();
@@ -177,36 +177,38 @@ function analyzeEntities (text, ts) {
// The document text, e.g. "Hello, world!"
content: text,
// The type of content to analyze
- type: 'PLAIN_TEXT'
+ type: 'PLAIN_TEXT',
};
// Detects entities in the document
- return language.analyzeEntities({ document: document })
- .then((results) => {
- const entities = results[0].entities;
- entities.forEach((entity) => {
- const name = entity.name;
- const type = entity.type;
- const salience = entity.salience;
- let wikiUrl = '';
- if (entity.metadata.wikipedia_url) {
- wikiUrl = entity.metadata.wikipedia_url;
- }
-
- // Uncomment this to see the entity info logged to console:
- // console.log(`${name}, type: ${type}, w url: ${wikiUrl}, salience: ${salience}, ts: ${ts}`);
-
- db.run(
- 'INSERT INTO entities VALUES (?, ?, ?, ?, ?);',
- [name, type, salience, wikiUrl, Math.round(ts)]
- );
- });
-
- return entities;
+ return language.analyzeEntities({document: document}).then(results => {
+ const entities = results[0].entities;
+ entities.forEach(entity => {
+ const name = entity.name;
+ const type = entity.type;
+ const salience = entity.salience;
+ let wikiUrl = '';
+ if (entity.metadata.wikipedia_url) {
+ wikiUrl = entity.metadata.wikipedia_url;
+ }
+
+ // Uncomment this to see the entity info logged to console:
+ // console.log(`${name}, type: ${type}, w url: ${wikiUrl}, salience: ${salience}, ts: ${ts}`);
+
+ db.run('INSERT INTO entities VALUES (?, ?, ?, ?, ?);', [
+ name,
+ type,
+ salience,
+ wikiUrl,
+ Math.round(ts),
+ ]);
});
+
+ return entities;
+ });
}
-function analyzeSentiment (text) {
+function analyzeSentiment(text) {
// Instantiates a client
const language = Language();
@@ -215,33 +217,32 @@ function analyzeSentiment (text) {
// The document text, e.g. "Hello, world!"
content: text,
// The type of content to analyze
- type: 'PLAIN_TEXT'
+ type: 'PLAIN_TEXT',
};
// Detects the 'sentiment' of some text using the NL API
- return language.analyzeSentiment({ document: document })
- .then((results) => {
- const sentiment = results[0];
-
- // Uncomment the following lines to log the sentiment to the console:
- // console.log(`Sentiment: ${sentiment}`)
- // if (sentiment.score >= SENTIMENT_THRESHOLD) {
- // console.log('Sentiment: positive.');
- // } else if (sentiment.score <= -SENTIMENT_THRESHOLD) {
- // console.log('Sentiment: negative.');
- // }
-
- return sentiment;
- });
+ return language.analyzeSentiment({document: document}).then(results => {
+ const sentiment = results[0];
+
+ // Uncomment the following lines to log the sentiment to the console:
+ // console.log(`Sentiment: ${sentiment}`)
+ // if (sentiment.score >= SENTIMENT_THRESHOLD) {
+ // console.log('Sentiment: positive.');
+ // } else if (sentiment.score <= -SENTIMENT_THRESHOLD) {
+ // console.log('Sentiment: negative.');
+ // }
+
+ return sentiment;
+ });
}
-function handleAmbientMessage (bot, message) {
+function handleAmbientMessage(bot, message) {
// Note: for purposes of this example, we're making two separate calls to the
// API, one to extract the entities from the message, and one to analyze the
// 'sentiment' of the message. These could be combined into one call.
return analyzeEntities(message.text, message.ts)
.then(() => analyzeSentiment(message.text))
- .then((sentiment) => {
+ .then(sentiment => {
if (sentiment.score >= SENTIMENT_THRESHOLD) {
// We have a positive sentiment score larger than the threshold.
bot.reply(message, ':thumbsup:');
diff --git a/language/slackbot/package.json b/language/slackbot/package.json
index af58f30b61..a5eaf5d47e 100644
--- a/language/slackbot/package.json
+++ b/language/slackbot/package.json
@@ -15,11 +15,9 @@
},
"main": "demo_bot.js",
"engines": {
- "node": ">=4.3.2"
+ "node": ">=8.0.0"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"test": "repo-tools test run --cmd ava -- -T 20s --verbose system-test/*.test.js"
},
"dependencies": {
@@ -31,7 +29,6 @@
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
"proxyquire": "1.8.0",
- "semistandard": "^12.0.1",
"sinon": "3.2.0"
}
}
diff --git a/language/slackbot/system-test/controller.test.js b/language/slackbot/system-test/controller.test.js
index 8e9008ae96..34ae06ce22 100644
--- a/language/slackbot/system-test/controller.test.js
+++ b/language/slackbot/system-test/controller.test.js
@@ -25,16 +25,16 @@ const SLACK_TOKEN_PATH = path.join(__dirname, `../.token`);
let controllerMock, botkitMock, program, originalToken;
-test.before((t) => {
+test.before(() => {
originalToken = process.env.SLACK_TOKEN_PATH;
controllerMock = {
spawn: sinon.stub().returnsThis(),
startRTM: sinon.stub().returnsThis(),
hears: sinon.stub().returnsThis(),
- on: sinon.stub().returnsThis()
+ on: sinon.stub().returnsThis(),
};
botkitMock = {
- slackbot: sinon.stub().returns(controllerMock)
+ slackbot: sinon.stub().returns(controllerMock),
};
program = proxyquire(`../demo_bot`, {
botkit: botkitMock,
@@ -42,15 +42,15 @@ test.before((t) => {
verbose: sinon.stub().returns({
cached: {
Database: sinon.stub().returns({
- run: sinon.stub()
- })
- }
- })
- }
+ run: sinon.stub(),
+ }),
+ },
+ }),
+ },
});
});
-test.after.always((t) => {
+test.after.always(() => {
process.env.SLACK_TOKEN_PATH = originalToken;
try {
fs.unlinkSync(SLACK_TOKEN_PATH);
@@ -59,18 +59,22 @@ test.after.always((t) => {
}
});
-test(`should check SLACK_TOKEN_PATH`, (t) => {
+test(`should check SLACK_TOKEN_PATH`, t => {
process.env.SLACK_TOKEN_PATH = ``;
- t.throws(() => {
- program.startController();
- }, Error, `Please set the SLACK_TOKEN_PATH environment variable!`);
+ t.throws(
+ () => {
+ program.startController();
+ },
+ Error,
+ `Please set the SLACK_TOKEN_PATH environment variable!`
+ );
});
-test(`should start the controller`, (t) => {
+test(`should start the controller`, t => {
let controller;
- fs.writeFileSync(SLACK_TOKEN_PATH, `test`, { encoding: `utf8` });
+ fs.writeFileSync(SLACK_TOKEN_PATH, `test`, {encoding: `utf8`});
process.env.SLACK_TOKEN_PATH = SLACK_TOKEN_PATH;
controller = program.startController();
diff --git a/language/slackbot/system-test/demo_bot.test.js b/language/slackbot/system-test/demo_bot.test.js
index 2406bcfdca..b7f21f133b 100644
--- a/language/slackbot/system-test/demo_bot.test.js
+++ b/language/slackbot/system-test/demo_bot.test.js
@@ -30,8 +30,8 @@ const text = `President Obama is speaking at the White House. He is announcing a
let db, controllerMock, botkitMock, botMock, program;
test.before(tools.checkCredentials);
-test.before.cb((t) => {
- fs.unlink(DB_PATH, (err) => {
+test.before.cb(t => {
+ fs.unlink(DB_PATH, err => {
if (err && err.code !== `ENOENT`) {
t.end(err);
return;
@@ -42,27 +42,27 @@ test.before.cb((t) => {
spawn: sinon.stub().returnsThis(),
startRTM: sinon.stub().returnsThis(),
hears: sinon.stub().returnsThis(),
- on: sinon.stub().returnsThis()
+ on: sinon.stub().returnsThis(),
};
botkitMock = {
- slackbot: sinon.stub().returns(controllerMock)
+ slackbot: sinon.stub().returns(controllerMock),
};
botMock = {
- reply: sinon.stub()
+ reply: sinon.stub(),
};
program = proxyquire(`../demo_bot`, {
- botkit: botkitMock
+ botkit: botkitMock,
});
db.run(program.TABLE_SQL, t.end);
});
});
-test.after.cb.always((t) => {
- fs.unlink(DB_PATH, (err) => {
+test.after.cb.always(t => {
+ fs.unlink(DB_PATH, err => {
if (err) {
t.end(err);
return;
@@ -76,15 +76,15 @@ test.after.cb.always((t) => {
});
});
-test.serial(`should analyze sentiment in text`, async (t) => {
+test.serial(`should analyze sentiment in text`, async t => {
const results = await program.analyzeSentiment(text);
t.is(results.documentSentiment.score > 0, true);
});
-test.serial(`should analyze entities in text`, async (t) => {
+test.serial(`should analyze entities in text`, async t => {
const entities = await program.analyzeEntities(text, Date.now());
- t.is(entities.some((entity) => entity.name === `Obama`), true);
- t.is(entities.some((entity) => entity.name === `White House`), true);
+ t.is(entities.some(entity => entity.name === `Obama`), true);
+ t.is(entities.some(entity => entity.name === `White House`), true);
await new Promise((resolve, reject) => {
setTimeout(() => {
@@ -93,15 +93,15 @@ test.serial(`should analyze entities in text`, async (t) => {
reject(err);
return;
}
- t.is(entities.some((entity) => entity.name === `Obama`), true);
- t.is(entities.some((entity) => entity.name === `White House`), true);
+ t.is(entities.some(entity => entity.name === `Obama`), true);
+ t.is(entities.some(entity => entity.name === `White House`), true);
resolve();
});
}, 1000);
});
});
-test.serial(`should reply to simple hello message`, (t) => {
+test.serial(`should reply to simple hello message`, t => {
const message = {};
program.handleSimpleReply(botMock, message);
@@ -110,7 +110,7 @@ test.serial(`should reply to simple hello message`, (t) => {
t.deepEqual(botMock.reply.getCall(0).args, [message, `Hello.`]);
});
-test.cb.serial(`should reply to entities message`, (t) => {
+test.cb.serial(`should reply to entities message`, t => {
const message = {};
program.handleEntitiesReply(botMock, message);
@@ -119,7 +119,10 @@ test.cb.serial(`should reply to entities message`, (t) => {
try {
t.is(botMock.reply.callCount, 3);
t.deepEqual(botMock.reply.getCall(1).args, [message, `Top entities: `]);
- t.deepEqual(botMock.reply.getCall(2).args, [message, `entity: *Obama*, type: PERSON, count: 1\nentity: *White House*, type: LOCATION, count: 1\nentity: *cookie recipe*, type: WORK_OF_ART, count: 1\n`]);
+ t.deepEqual(botMock.reply.getCall(2).args, [
+ message,
+ `entity: *Obama*, type: PERSON, count: 1\nentity: *White House*, type: LOCATION, count: 1\nentity: *cookie recipe*, type: WORK_OF_ART, count: 1\n`,
+ ]);
t.end();
} catch (err) {
t.end(err);
diff --git a/memorystore/redis/server.js b/memorystore/redis/server.js
index a3e6df09d7..0aff1a06dc 100644
--- a/memorystore/redis/server.js
+++ b/memorystore/redis/server.js
@@ -21,19 +21,21 @@ const REDISHOST = process.env.REDISHOST || 'localhost';
const REDISPORT = process.env.REDISPORT || 6379;
const client = redis.createClient(REDISPORT, REDISHOST);
-client.on('error', (err) => console.error('ERR:REDIS:', err));
+client.on('error', err => console.error('ERR:REDIS:', err));
// create a server
-http.createServer((req, res) => {
-// increment the visit counter
- client.incr('visits', (err, reply) => {
- if (err) {
- console.log(err);
- res.status(500).send(err.message);
- return;
- }
- res.writeHead(200, { 'Content-Type': 'text/plain' });
- res.end(`Visitor number: ${reply}\n`);
- });
-}).listen(8080);
+http
+ .createServer((req, res) => {
+ // increment the visit counter
+ client.incr('visits', (err, reply) => {
+ if (err) {
+ console.log(err);
+ res.status(500).send(err.message);
+ return;
+ }
+ res.writeHead(200, {'Content-Type': 'text/plain'});
+ res.end(`Visitor number: ${reply}\n`);
+ });
+ })
+ .listen(8080);
// [END memorystore_server_js]
diff --git a/package.json b/package.json
index 2947c02545..b4d4ab548e 100644
--- a/package.json
+++ b/package.json
@@ -13,22 +13,8 @@
},
"main": "src/index.js",
"private": true,
- "semistandard": {
- "globals": [
- "rm",
- "exec",
- "cd",
- "ls"
- ],
- "ignore": [
- "appengine/loopback",
- "appengine/parse-server/cloud/main.js",
- "**/node_modules/**",
- "coverage"
- ]
- },
"scripts": {
- "lint": "semistandard '**/*.js'",
+ "lint": "eslint '**/*.js'",
"pretest": "npm run lint",
"unit-cover": "nyc --cache npm test && nyc report --reporter=html",
"system-cover": "nyc --cache npm run system-test && nyc report --reporter=html",
@@ -39,8 +25,11 @@
"@google-cloud/nodejs-repo-tools": "^2.3.6",
"@google-cloud/storage": "1.7.0",
"ava": "0.25.0",
+ "eslint": "^5.9.0",
+ "eslint-config-prettier": "^3.3.0",
+ "eslint-plugin-node": "^8.0.0",
+ "eslint-plugin-prettier": "^3.0.0",
"nyc": "13.0.1",
- "semistandard": "^12.0.1",
- "shelljs": "0.8.2"
+ "prettier": "^1.15.2"
}
}
diff --git a/storage-transfer/package.json b/storage-transfer/package.json
index 9194ac3258..374e5ffd29 100644
--- a/storage-transfer/package.json
+++ b/storage-transfer/package.json
@@ -9,11 +9,9 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=6"
+ "node": ">=8"
},
"scripts": {
- "lint": "semistandard '**/*.js'",
- "pretest": "npm run lint",
"unit-test": "repo-tools test run --cmd ava -- -T 20s --verbose test/*.test.js",
"system-test": "repo-tools test run --cmd ava -- -T 20s --verbose system-test/*.test.js",
"test": "npm run unit-test && npm run system-test"
@@ -29,7 +27,6 @@
"@google-cloud/nodejs-repo-tools": "^3.0.0",
"ava": "0.25.0",
"proxyquire": "2.0.1",
- "semistandard": "^12.0.1",
"sinon": "4.4.8",
"uuid": "3.3.2"
},
diff --git a/storage-transfer/system-test/transfer.test.js b/storage-transfer/system-test/transfer.test.js
index e27ed04379..e8c0fe14d0 100644
--- a/storage-transfer/system-test/transfer.test.js
+++ b/storage-transfer/system-test/transfer.test.js
@@ -13,6 +13,7 @@
* limitations under the License.
*/
+/* eslint no-empty: 0 */
'use strict';
const storage = require(`@google-cloud/storage`)();
@@ -37,18 +38,16 @@ test.before(async () => {
const bucketOptions = {
entity: 'allUsers',
- role: storage.acl.WRITER_ROLE
+ role: storage.acl.WRITER_ROLE,
};
- await storage.createBucket(firstBucketName)
- .then((data) => {
- const bucket = data[0];
- return bucket.acl.add(bucketOptions);
- });
- await storage.createBucket(secondBucketName)
- .then((data) => {
- const bucket = data[0];
- return bucket.acl.add(bucketOptions);
- });
+ await storage.createBucket(firstBucketName).then(data => {
+ const bucket = data[0];
+ return bucket.acl.add(bucketOptions);
+ });
+ await storage.createBucket(secondBucketName).then(data => {
+ const bucket = data[0];
+ return bucket.acl.add(bucketOptions);
+ });
});
test.after.always(async () => {
@@ -56,32 +55,32 @@ test.after.always(async () => {
const bucketOne = storage.bucket(firstBucketName);
const bucketTwo = storage.bucket(secondBucketName);
try {
- bucketOne.deleteFiles({ force: true });
+ bucketOne.deleteFiles({force: true});
} catch (err) {} // ignore error
try {
- bucketOne.deleteFiles({ force: true });
+ bucketOne.deleteFiles({force: true});
} catch (err) {} // ignore error
try {
bucketOne.delete();
} catch (err) {} // ignore error
try {
- bucketTwo.deleteFiles({ force: true });
+ bucketTwo.deleteFiles({force: true});
} catch (err) {} // ignore error
try {
- bucketTwo.deleteFiles({ force: true });
+ bucketTwo.deleteFiles({force: true});
} catch (err) {} // ignore error
try {
bucketTwo.delete();
} catch (err) {} // ignore error
});
-test.cb.serial(`should create a storage transfer job`, (t) => {
+test.cb.serial(`should create a storage transfer job`, t => {
const options = {
srcBucket: firstBucketName,
destBucket: secondBucketName,
date: date,
time: time,
- description: description
+ description: description,
};
program.createTransferJob(options, (err, transferJob) => {
@@ -90,12 +89,14 @@ test.cb.serial(`should create a storage transfer job`, (t) => {
t.is(transferJob.name.indexOf(`transferJobs/`), 0);
t.is(transferJob.description, description);
t.is(transferJob.status, `ENABLED`);
- t.true(console.log.calledWith(`Created transfer job: %s`, transferJob.name));
+ t.true(
+ console.log.calledWith(`Created transfer job: %s`, transferJob.name)
+ );
setTimeout(t.end, 2000);
});
});
-test.cb.serial(`should get a transferJob`, (t) => {
+test.cb.serial(`should get a transferJob`, t => {
program.getTransferJob(jobName, (err, transferJob) => {
t.ifError(err);
t.is(transferJob.name, jobName);
@@ -106,11 +107,11 @@ test.cb.serial(`should get a transferJob`, (t) => {
});
});
-test.cb.serial(`should update a transferJob`, (t) => {
+test.cb.serial(`should update a transferJob`, t => {
var options = {
job: jobName,
field: `status`,
- value: status
+ value: status,
};
program.updateTransferJob(options, (err, transferJob) => {
@@ -118,23 +119,27 @@ test.cb.serial(`should update a transferJob`, (t) => {
t.is(transferJob.name, jobName);
t.is(transferJob.description, description);
t.is(transferJob.status, status);
- t.true(console.log.calledWith(`Updated transfer job: %s`, transferJob.name));
+ t.true(
+ console.log.calledWith(`Updated transfer job: %s`, transferJob.name)
+ );
setTimeout(t.end, 2000);
});
});
-test.cb.serial(`should list transferJobs`, (t) => {
+test.cb.serial(`should list transferJobs`, t => {
program.listTransferJobs((err, transferJobs) => {
t.ifError(err);
- t.true(transferJobs.some((transferJob) => transferJob.name === jobName));
- t.true(transferJobs.some((transferJob) => transferJob.description === description));
- t.true(transferJobs.some((transferJob) => transferJob.status === status));
+ t.true(transferJobs.some(transferJob => transferJob.name === jobName));
+ t.true(
+ transferJobs.some(transferJob => transferJob.description === description)
+ );
+ t.true(transferJobs.some(transferJob => transferJob.status === status));
t.true(console.log.calledWith(`Found %d jobs!`, transferJobs.length));
setTimeout(t.end, 2000);
});
});
-test.cb.serial(`should list transferJobs`, (t) => {
+test.cb.serial(`should list transferJobs`, t => {
program.listTransferOperations(jobName, (err, operations) => {
t.ifError(err);
t.true(Array.isArray(operations));
diff --git a/storage-transfer/test/transfer.test.js b/storage-transfer/test/transfer.test.js
index d1c9f8fffb..c9118421fc 100644
--- a/storage-transfer/test/transfer.test.js
+++ b/storage-transfer/test/transfer.test.js
@@ -25,62 +25,62 @@ const destBucketName = `bar`;
const jobName = `transferJobs/123456789012345678`;
const transferOperationName = `transferOperations/123456789012345678`;
-function getSample () {
+function getSample() {
const transferJobMock = {
- name: jobName
+ name: jobName,
};
const transferOperationMock = {};
const storagetransferMock = {
transferJobs: {
- create: sinon.stub().yields(null, { data: transferJobMock }),
- get: sinon.stub().yields(null, { data: transferJobMock }),
- patch: sinon.stub().yields(null, { data: transferJobMock }),
+ create: sinon.stub().yields(null, {data: transferJobMock}),
+ get: sinon.stub().yields(null, {data: transferJobMock}),
+ patch: sinon.stub().yields(null, {data: transferJobMock}),
list: sinon.stub().yields(null, {
data: {
- transferJobs: [transferJobMock]
- }
- })
+ transferJobs: [transferJobMock],
+ },
+ }),
},
transferOperations: {
- get: sinon.stub().yields(null, { data: transferOperationMock }),
- pause: sinon.stub().yields(null, { data: transferOperationMock }),
- resume: sinon.stub().yields(null, { data: transferOperationMock }),
+ get: sinon.stub().yields(null, {data: transferOperationMock}),
+ pause: sinon.stub().yields(null, {data: transferOperationMock}),
+ resume: sinon.stub().yields(null, {data: transferOperationMock}),
list: sinon.stub().yields(null, {
data: {
- operations: [transferOperationMock]
- }
- })
- }
+ operations: [transferOperationMock],
+ },
+ }),
+ },
};
const googleMock = {
storagetransfer: sinon.stub().returns(storagetransferMock),
auth: {
- getApplicationDefault: sinon.stub().yields(null, {})
- }
+ getApplicationDefault: sinon.stub().yields(null, {}),
+ },
};
const googleapisMock = {
- google: googleMock
+ google: googleMock,
};
return {
program: proxyquire(`../transfer`, {
googleapis: googleapisMock,
- yargs: proxyquire(`yargs`, {})
+ yargs: proxyquire(`yargs`, {}),
}),
mocks: {
googleapis: googleapisMock,
storagetransfer: storagetransferMock,
transferJob: transferJobMock,
- transferOperation: transferOperationMock
- }
+ transferOperation: transferOperationMock,
+ },
};
}
test.beforeEach(tools.stubConsole);
test.afterEach.always(tools.restoreConsole);
-test.serial(`should create a transfer job`, (t) => {
+test.serial(`should create a transfer job`, t => {
const description = `description`;
const sample = getSample();
const callback = sinon.stub();
@@ -90,30 +90,44 @@ test.serial(`should create a transfer job`, (t) => {
srcBucket: srcBucketName,
destBucket: destBucketName,
date: date,
- time: time
+ time: time,
};
sample.program.createTransferJob(options, callback);
t.true(sample.mocks.storagetransfer.transferJobs.create.calledOnce);
- t.is(sample.mocks.storagetransfer.transferJobs.create.firstCall.args[0].resource.description, undefined);
+ t.is(
+ sample.mocks.storagetransfer.transferJobs.create.firstCall.args[0].resource
+ .description,
+ undefined
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null, sample.mocks.transferJob]);
t.true(console.log.calledOnce);
- t.deepEqual(console.log.firstCall.args, [`Created transfer job: %s`, sample.mocks.transferJob.name]);
+ t.deepEqual(console.log.firstCall.args, [
+ `Created transfer job: %s`,
+ sample.mocks.transferJob.name,
+ ]);
options.description = description;
sample.program.createTransferJob(options, callback);
t.true(sample.mocks.storagetransfer.transferJobs.create.calledTwice);
- t.is(sample.mocks.storagetransfer.transferJobs.create.secondCall.args[0].resource.description, description);
+ t.is(
+ sample.mocks.storagetransfer.transferJobs.create.secondCall.args[0].resource
+ .description,
+ description
+ );
t.true(callback.calledTwice);
t.deepEqual(callback.secondCall.args, [null, sample.mocks.transferJob]);
t.true(console.log.calledTwice);
- t.deepEqual(console.log.secondCall.args, [`Created transfer job: %s`, sample.mocks.transferJob.name]);
+ t.deepEqual(console.log.secondCall.args, [
+ `Created transfer job: %s`,
+ sample.mocks.transferJob.name,
+ ]);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -125,7 +139,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle create error`, (t) => {
+test.serial(`should handle create error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -137,25 +151,33 @@ test.serial(`should handle create error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should get a transfer job`, (t) => {
+test.serial(`should get a transfer job`, t => {
const sample = getSample();
const callback = sinon.stub();
sample.program.getTransferJob(jobName, callback);
t.true(sample.mocks.storagetransfer.transferJobs.get.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferJobs.get.firstCall.args.slice(0, -1), [{
- auth: {},
- projectId: process.env.GCLOUD_PROJECT,
- jobName: jobName
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferJobs.get.firstCall.args.slice(0, -1),
+ [
+ {
+ auth: {},
+ projectId: process.env.GCLOUD_PROJECT,
+ jobName: jobName,
+ },
+ ]
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null, sample.mocks.transferJob]);
t.true(console.log.calledOnce);
- t.deepEqual(console.log.firstCall.args, [`Found transfer job: %s`, sample.mocks.transferJob.name]);
+ t.deepEqual(console.log.firstCall.args, [
+ `Found transfer job: %s`,
+ sample.mocks.transferJob.name,
+ ]);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -167,7 +189,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle get error`, (t) => {
+test.serial(`should handle get error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -179,34 +201,42 @@ test.serial(`should handle get error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should update a transfer job`, (t) => {
+test.serial(`should update a transfer job`, t => {
const sample = getSample();
const callback = sinon.stub();
const options = {
job: jobName,
field: `status`,
- value: `DISABLED`
+ value: `DISABLED`,
};
sample.program.updateTransferJob(options, callback);
t.true(sample.mocks.storagetransfer.transferJobs.patch.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferJobs.patch.firstCall.args.slice(0, -1), [{
- auth: {},
- jobName: jobName,
- resource: {
- projectId: process.env.GCLOUD_PROJECT,
- transferJob: {
- name: jobName,
- status: options.value
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferJobs.patch.firstCall.args.slice(0, -1),
+ [
+ {
+ auth: {},
+ jobName: jobName,
+ resource: {
+ projectId: process.env.GCLOUD_PROJECT,
+ transferJob: {
+ name: jobName,
+ status: options.value,
+ },
+ updateTransferJobFieldMask: options.field,
+ },
},
- updateTransferJobFieldMask: options.field
- }
- }]);
+ ]
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null, sample.mocks.transferJob]);
t.true(console.log.calledOnce);
- t.deepEqual(console.log.firstCall.args, [`Updated transfer job: %s`, jobName]);
+ t.deepEqual(console.log.firstCall.args, [
+ `Updated transfer job: %s`,
+ jobName,
+ ]);
options.field = `description`;
options.value = `description`;
@@ -214,22 +244,33 @@ test.serial(`should update a transfer job`, (t) => {
sample.program.updateTransferJob(options, callback);
t.true(sample.mocks.storagetransfer.transferJobs.patch.calledTwice);
- t.deepEqual(sample.mocks.storagetransfer.transferJobs.patch.secondCall.args.slice(0, -1), [{
- auth: {},
- jobName: jobName,
- resource: {
- projectId: process.env.GCLOUD_PROJECT,
- transferJob: {
- name: jobName,
- description: options.value
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferJobs.patch.secondCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ auth: {},
+ jobName: jobName,
+ resource: {
+ projectId: process.env.GCLOUD_PROJECT,
+ transferJob: {
+ name: jobName,
+ description: options.value,
+ },
+ updateTransferJobFieldMask: options.field,
+ },
},
- updateTransferJobFieldMask: options.field
- }
- }]);
+ ]
+ );
t.true(callback.calledTwice);
t.deepEqual(callback.secondCall.args, [null, sample.mocks.transferJob]);
t.true(console.log.calledTwice);
- t.deepEqual(console.log.secondCall.args, [`Updated transfer job: %s`, jobName]);
+ t.deepEqual(console.log.secondCall.args, [
+ `Updated transfer job: %s`,
+ jobName,
+ ]);
options.field = `transferSpec`;
options.value = `{"foo":"bar"}`;
@@ -237,32 +278,40 @@ test.serial(`should update a transfer job`, (t) => {
sample.program.updateTransferJob(options, callback);
t.true(sample.mocks.storagetransfer.transferJobs.patch.calledThrice);
- t.deepEqual(sample.mocks.storagetransfer.transferJobs.patch.thirdCall.args.slice(0, -1), [{
- auth: {},
- jobName: jobName,
- resource: {
- projectId: process.env.GCLOUD_PROJECT,
- transferJob: {
- name: jobName,
- transferSpec: JSON.parse(options.value)
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferJobs.patch.thirdCall.args.slice(0, -1),
+ [
+ {
+ auth: {},
+ jobName: jobName,
+ resource: {
+ projectId: process.env.GCLOUD_PROJECT,
+ transferJob: {
+ name: jobName,
+ transferSpec: JSON.parse(options.value),
+ },
+ updateTransferJobFieldMask: options.field,
+ },
},
- updateTransferJobFieldMask: options.field
- }
- }]);
+ ]
+ );
t.true(callback.calledThrice);
t.deepEqual(callback.thirdCall.args, [null, sample.mocks.transferJob]);
t.true(console.log.calledThrice);
- t.deepEqual(console.log.thirdCall.args, [`Updated transfer job: %s`, jobName]);
+ t.deepEqual(console.log.thirdCall.args, [
+ `Updated transfer job: %s`,
+ jobName,
+ ]);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
const options = {
job: jobName,
field: `status`,
- value: `DISABLED`
+ value: `DISABLED`,
};
sample.mocks.googleapis.google.auth.getApplicationDefault.yields(error);
@@ -272,14 +321,14 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle patch error`, (t) => {
+test.serial(`should handle patch error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
const options = {
job: jobName,
field: `status`,
- value: `DISABLED`
+ value: `DISABLED`,
};
sample.mocks.storagetransfer.transferJobs.patch.yields(error);
@@ -289,17 +338,22 @@ test.serial(`should handle patch error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should list transfer jobs`, (t) => {
+test.serial(`should list transfer jobs`, t => {
const sample = getSample();
const callback = sinon.stub();
sample.program.listTransferJobs(callback);
t.true(sample.mocks.storagetransfer.transferJobs.list.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferJobs.list.firstCall.args.slice(0, -1), [{
- auth: {},
- filter: JSON.stringify({ project_id: process.env.GCLOUD_PROJECT })
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferJobs.list.firstCall.args.slice(0, -1),
+ [
+ {
+ auth: {},
+ filter: JSON.stringify({project_id: process.env.GCLOUD_PROJECT}),
+ },
+ ]
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null, [sample.mocks.transferJob]]);
t.true(console.log.calledOnce);
@@ -309,16 +363,21 @@ test.serial(`should list transfer jobs`, (t) => {
sample.program.listTransferJobs(callback);
t.true(sample.mocks.storagetransfer.transferJobs.list.calledTwice);
- t.deepEqual(sample.mocks.storagetransfer.transferJobs.list.secondCall.args.slice(0, -1), [{
- auth: {},
- filter: JSON.stringify({ project_id: process.env.GCLOUD_PROJECT })
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferJobs.list.secondCall.args.slice(0, -1),
+ [
+ {
+ auth: {},
+ filter: JSON.stringify({project_id: process.env.GCLOUD_PROJECT}),
+ },
+ ]
+ );
t.true(callback.calledTwice);
t.deepEqual(callback.secondCall.args, [null, []]);
t.true(console.log.calledOnce);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -330,7 +389,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle list error`, (t) => {
+test.serial(`should handle list error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -342,7 +401,7 @@ test.serial(`should handle list error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should list transfer operations`, (t) => {
+test.serial(`should list transfer operations`, t => {
const sample = getSample();
const callback = sinon.stub();
@@ -350,13 +409,24 @@ test.serial(`should list transfer operations`, (t) => {
sample.program.listTransferOperations(undefined, callback);
t.true(sample.mocks.storagetransfer.transferOperations.list.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferOperations.list.firstCall.args.slice(0, -1), [{
- name: `transferOperations`,
- auth: {},
- filter: JSON.stringify({ project_id: process.env.GCLOUD_PROJECT })
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferOperations.list.firstCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ name: `transferOperations`,
+ auth: {},
+ filter: JSON.stringify({project_id: process.env.GCLOUD_PROJECT}),
+ },
+ ]
+ );
t.true(callback.calledOnce);
- t.deepEqual(callback.firstCall.args, [null, [sample.mocks.transferOperation]]);
+ t.deepEqual(callback.firstCall.args, [
+ null,
+ [sample.mocks.transferOperation],
+ ]);
t.true(console.log.calledOnce);
t.deepEqual(console.log.firstCall.args, [`Found %d operations!`, 1]);
@@ -364,13 +434,27 @@ test.serial(`should list transfer operations`, (t) => {
sample.program.listTransferOperations(jobName, callback);
t.true(sample.mocks.storagetransfer.transferOperations.list.calledTwice);
- t.deepEqual(sample.mocks.storagetransfer.transferOperations.list.secondCall.args.slice(0, -1), [{
- name: `transferOperations`,
- auth: {},
- filter: JSON.stringify({ project_id: process.env.GCLOUD_PROJECT, job_names: [jobName] })
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferOperations.list.secondCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ name: `transferOperations`,
+ auth: {},
+ filter: JSON.stringify({
+ project_id: process.env.GCLOUD_PROJECT,
+ job_names: [jobName],
+ }),
+ },
+ ]
+ );
t.true(callback.calledTwice);
- t.deepEqual(callback.secondCall.args, [null, [sample.mocks.transferOperation]]);
+ t.deepEqual(callback.secondCall.args, [
+ null,
+ [sample.mocks.transferOperation],
+ ]);
t.true(console.log.calledTwice);
t.deepEqual(console.log.secondCall.args, [`Found %d operations!`, 1]);
@@ -379,17 +463,28 @@ test.serial(`should list transfer operations`, (t) => {
sample.program.listTransferOperations(jobName, callback);
t.true(sample.mocks.storagetransfer.transferOperations.list.calledThrice);
- t.deepEqual(sample.mocks.storagetransfer.transferOperations.list.thirdCall.args.slice(0, -1), [{
- name: `transferOperations`,
- auth: {},
- filter: JSON.stringify({ project_id: process.env.GCLOUD_PROJECT, job_names: [jobName] })
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferOperations.list.thirdCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ name: `transferOperations`,
+ auth: {},
+ filter: JSON.stringify({
+ project_id: process.env.GCLOUD_PROJECT,
+ job_names: [jobName],
+ }),
+ },
+ ]
+ );
t.true(callback.calledThrice);
t.deepEqual(callback.thirdCall.args, [null, []]);
t.true(console.log.calledTwice);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -401,7 +496,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle list error`, (t) => {
+test.serial(`should handle list error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -413,7 +508,7 @@ test.serial(`should handle list error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should get a transfer operation`, (t) => {
+test.serial(`should get a transfer operation`, t => {
const sample = getSample();
const callback = sinon.stub();
@@ -425,17 +520,28 @@ test.serial(`should get a transfer operation`, (t) => {
t.true(callback.firstCall.args[1] === sample.mocks.transferOperation);
t.true(sample.mocks.storagetransfer.transferOperations.get.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferOperations.get.firstCall.args.slice(0, -1), [{
- name: transferOperationName,
- auth: {}
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferOperations.get.firstCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ name: transferOperationName,
+ auth: {},
+ },
+ ]
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null, sample.mocks.transferOperation]);
t.true(console.log.calledOnce);
- t.deepEqual(console.log.firstCall.args, [`Found transfer operation: %s`, sample.mocks.transferOperation]);
+ t.deepEqual(console.log.firstCall.args, [
+ `Found transfer operation: %s`,
+ sample.mocks.transferOperation,
+ ]);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -447,7 +553,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle get error`, (t) => {
+test.serial(`should handle get error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -459,7 +565,7 @@ test.serial(`should handle get error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should pause a transfer operation`, (t) => {
+test.serial(`should pause a transfer operation`, t => {
const sample = getSample();
const callback = sinon.stub();
@@ -470,17 +576,28 @@ test.serial(`should pause a transfer operation`, (t) => {
t.ifError(callback.firstCall.args[0], `callback did not receive error`);
t.true(sample.mocks.storagetransfer.transferOperations.pause.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferOperations.pause.firstCall.args.slice(0, -1), [{
- name: transferOperationName,
- auth: {}
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferOperations.pause.firstCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ name: transferOperationName,
+ auth: {},
+ },
+ ]
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null]);
t.true(console.log.calledOnce);
- t.deepEqual(console.log.firstCall.args, [`Paused transfer operation: %s`, transferOperationName]);
+ t.deepEqual(console.log.firstCall.args, [
+ `Paused transfer operation: %s`,
+ transferOperationName,
+ ]);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -492,7 +609,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle pause error`, (t) => {
+test.serial(`should handle pause error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -504,7 +621,7 @@ test.serial(`should handle pause error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should resume a transfer operation`, (t) => {
+test.serial(`should resume a transfer operation`, t => {
const sample = getSample();
const callback = sinon.stub();
@@ -515,17 +632,28 @@ test.serial(`should resume a transfer operation`, (t) => {
t.ifError(callback.firstCall.args[0], `callback did not receive error`);
t.true(sample.mocks.storagetransfer.transferOperations.resume.calledOnce);
- t.deepEqual(sample.mocks.storagetransfer.transferOperations.resume.firstCall.args.slice(0, -1), [{
- name: transferOperationName,
- auth: {}
- }]);
+ t.deepEqual(
+ sample.mocks.storagetransfer.transferOperations.resume.firstCall.args.slice(
+ 0,
+ -1
+ ),
+ [
+ {
+ name: transferOperationName,
+ auth: {},
+ },
+ ]
+ );
t.true(callback.calledOnce);
t.deepEqual(callback.firstCall.args, [null]);
t.true(console.log.calledOnce);
- t.deepEqual(console.log.firstCall.args, [`Resumed transfer operation: %s`, transferOperationName]);
+ t.deepEqual(console.log.firstCall.args, [
+ `Resumed transfer operation: %s`,
+ transferOperationName,
+ ]);
});
-test.serial(`should handle auth error`, (t) => {
+test.serial(`should handle auth error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -537,7 +665,7 @@ test.serial(`should handle auth error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should handle resume error`, (t) => {
+test.serial(`should handle resume error`, t => {
const error = new Error(`error`);
const sample = getSample();
const callback = sinon.stub();
@@ -549,22 +677,31 @@ test.serial(`should handle resume error`, (t) => {
t.deepEqual(callback.firstCall.args, [error]);
});
-test.serial(`should call createTransferJob`, (t) => {
+test.serial(`should call createTransferJob`, t => {
const program = getSample().program;
sinon.stub(program, `createTransferJob`);
- program.main([`jobs`, `create`, srcBucketName, destBucketName, `time`, `date`]);
+ program.main([
+ `jobs`,
+ `create`,
+ srcBucketName,
+ destBucketName,
+ `time`,
+ `date`,
+ ]);
t.true(program.createTransferJob.calledOnce);
- t.deepEqual(program.createTransferJob.firstCall.args.slice(0, -1), [{
- srcBucket: srcBucketName,
- destBucket: destBucketName,
- time: `time`,
- date: `date`,
- description: undefined
- }]);
+ t.deepEqual(program.createTransferJob.firstCall.args.slice(0, -1), [
+ {
+ srcBucket: srcBucketName,
+ destBucket: destBucketName,
+ time: `time`,
+ date: `date`,
+ description: undefined,
+ },
+ ]);
});
-test.serial(`should call getTransferJob`, (t) => {
+test.serial(`should call getTransferJob`, t => {
const program = getSample().program;
sinon.stub(program, `getTransferJob`);
@@ -573,7 +710,7 @@ test.serial(`should call getTransferJob`, (t) => {
t.deepEqual(program.getTransferJob.firstCall.args.slice(0, -1), [jobName]);
});
-test.serial(`should call listTransferJobs`, (t) => {
+test.serial(`should call listTransferJobs`, t => {
const program = getSample().program;
sinon.stub(program, `listTransferJobs`);
@@ -582,60 +719,72 @@ test.serial(`should call listTransferJobs`, (t) => {
t.deepEqual(program.listTransferJobs.firstCall.args.slice(0, -1), []);
});
-test.serial(`should call updateTransferJob`, (t) => {
+test.serial(`should call updateTransferJob`, t => {
const program = getSample().program;
sinon.stub(program, `updateTransferJob`);
program.main([`jobs`, `set`, jobName, `status`, `DISABLED`]);
t.true(program.updateTransferJob.calledOnce);
- t.deepEqual(program.updateTransferJob.firstCall.args.slice(0, -1), [{
- job: jobName,
- field: `status`,
- value: `DISABLED`
- }]);
+ t.deepEqual(program.updateTransferJob.firstCall.args.slice(0, -1), [
+ {
+ job: jobName,
+ field: `status`,
+ value: `DISABLED`,
+ },
+ ]);
});
-test.serial(`should call listTransferOperations`, (t) => {
+test.serial(`should call listTransferOperations`, t => {
const program = getSample().program;
sinon.stub(program, `listTransferOperations`);
program.main([`operations`, `list`]);
t.true(program.listTransferOperations.calledOnce);
- t.deepEqual(program.listTransferOperations.firstCall.args.slice(0, -1), [undefined]);
+ t.deepEqual(program.listTransferOperations.firstCall.args.slice(0, -1), [
+ undefined,
+ ]);
});
-test.serial(`should call listTransferOperations and filter`, (t) => {
+test.serial(`should call listTransferOperations and filter`, t => {
const program = getSample().program;
sinon.stub(program, `listTransferOperations`);
program.main([`operations`, `list`, jobName]);
t.true(program.listTransferOperations.calledOnce);
- t.deepEqual(program.listTransferOperations.firstCall.args.slice(0, -1), [jobName]);
+ t.deepEqual(program.listTransferOperations.firstCall.args.slice(0, -1), [
+ jobName,
+ ]);
});
-test.serial(`should call getTransferOperation`, (t) => {
+test.serial(`should call getTransferOperation`, t => {
const program = getSample().program;
sinon.stub(program, `getTransferOperation`);
program.main([`operations`, `get`, transferOperationName]);
t.true(program.getTransferOperation.calledOnce);
- t.deepEqual(program.getTransferOperation.firstCall.args.slice(0, -1), [transferOperationName]);
+ t.deepEqual(program.getTransferOperation.firstCall.args.slice(0, -1), [
+ transferOperationName,
+ ]);
});
-test.serial(`should call pauseTransferOperation`, (t) => {
+test.serial(`should call pauseTransferOperation`, t => {
const program = getSample().program;
sinon.stub(program, `pauseTransferOperation`);
program.main([`operations`, `pause`, transferOperationName]);
t.true(program.pauseTransferOperation.calledOnce);
- t.deepEqual(program.pauseTransferOperation.firstCall.args.slice(0, -1), [transferOperationName]);
+ t.deepEqual(program.pauseTransferOperation.firstCall.args.slice(0, -1), [
+ transferOperationName,
+ ]);
});
-test.serial(`should call resumeTransferOperation`, (t) => {
+test.serial(`should call resumeTransferOperation`, t => {
const program = getSample().program;
sinon.stub(program, `resumeTransferOperation`);
program.main([`operations`, `resume`, transferOperationName]);
t.true(program.resumeTransferOperation.calledOnce);
- t.deepEqual(program.resumeTransferOperation.firstCall.args.slice(0, -1), [transferOperationName]);
+ t.deepEqual(program.resumeTransferOperation.firstCall.args.slice(0, -1), [
+ transferOperationName,
+ ]);
});
diff --git a/storage-transfer/transfer.js b/storage-transfer/transfer.js
index 894fc8a476..059c2a730b 100644
--- a/storage-transfer/transfer.js
+++ b/storage-transfer/transfer.js
@@ -25,8 +25,8 @@ var storagetransfer = google.storagetransfer('v1');
// [END setup]
// [START auth]
-function auth (callback) {
- google.auth.getApplicationDefault(function (err, authClient) {
+function auth(callback) {
+ google.auth.getApplicationDefault(function(err, authClient) {
if (err) {
return callback(err);
}
@@ -41,7 +41,7 @@ function auth (callback) {
// Scopes can be specified either as an array or as a single,
// space-delimited string.
authClient = authClient.createScoped([
- 'https://www.googleapis.com/auth/cloud-platform'
+ 'https://www.googleapis.com/auth/cloud-platform',
]);
}
callback(null, authClient);
@@ -61,11 +61,11 @@ function auth (callback) {
* @param {string} [options.description] Optional. Description for the new transfer job.
* @param {function} callback The callback function.
*/
-function createTransferJob (options, callback) {
+function createTransferJob(options, callback) {
var startDate = moment(options.date, 'YYYY/MM/DD');
var transferTime = moment(options.time, 'HH:mm');
- auth(function (err, authClient) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
@@ -75,44 +75,47 @@ function createTransferJob (options, callback) {
status: 'ENABLED',
transferSpec: {
gcsDataSource: {
- bucketName: options.srcBucket
+ bucketName: options.srcBucket,
},
gcsDataSink: {
- bucketName: options.destBucket
+ bucketName: options.destBucket,
},
transferOptions: {
- deleteObjectsFromSourceAfterTransfer: false
- }
+ deleteObjectsFromSourceAfterTransfer: false,
+ },
},
schedule: {
scheduleStartDate: {
year: startDate.year(),
month: startDate.month() + 1,
- day: startDate.date()
+ day: startDate.date(),
},
startTimeOfDay: {
hours: transferTime.hours(),
- minutes: transferTime.minutes()
- }
- }
+ minutes: transferTime.minutes(),
+ },
+ },
};
if (options.description) {
transferJob.description = options.description;
}
- storagetransfer.transferJobs.create({
- auth: authClient,
- resource: transferJob
- }, function (err, response) {
- if (err) {
- return callback(err);
- }
+ storagetransfer.transferJobs.create(
+ {
+ auth: authClient,
+ resource: transferJob,
+ },
+ function(err, response) {
+ if (err) {
+ return callback(err);
+ }
- const transferJob = response.data;
- console.log('Created transfer job: %s', transferJob.name);
- return callback(null, transferJob);
- });
+ const transferJob = response.data;
+ console.log('Created transfer job: %s', transferJob.name);
+ return callback(null, transferJob);
+ }
+ );
});
}
// [END create_transfer_job]
@@ -124,25 +127,28 @@ function createTransferJob (options, callback) {
* @param {string} jobName The name of the transfer job to get.
* @param {function} callback The callback function.
*/
-function getTransferJob (jobName, callback) {
- auth(function (err, authClient) {
+function getTransferJob(jobName, callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
- storagetransfer.transferJobs.get({
- auth: authClient,
- projectId: process.env.GCLOUD_PROJECT,
- jobName: jobName
- }, function (err, response) {
- if (err) {
- return callback(err);
- }
+ storagetransfer.transferJobs.get(
+ {
+ auth: authClient,
+ projectId: process.env.GCLOUD_PROJECT,
+ jobName: jobName,
+ },
+ function(err, response) {
+ if (err) {
+ return callback(err);
+ }
- const transferJob = response.data;
- console.log('Found transfer job: %s', transferJob.name);
- return callback(null, transferJob);
- });
+ const transferJob = response.data;
+ console.log('Found transfer job: %s', transferJob.name);
+ return callback(null, transferJob);
+ }
+ );
});
}
// [END get_transfer_job]
@@ -157,8 +163,8 @@ function getTransferJob (jobName, callback) {
* @param {string} options.value The new value for the field.
* @param {function} callback The callback function.
*/
-function updateTransferJob (options, callback) {
- auth(function (err, authClient) {
+function updateTransferJob(options, callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
@@ -166,9 +172,9 @@ function updateTransferJob (options, callback) {
var patchRequest = {
projectId: process.env.GCLOUD_PROJECT,
transferJob: {
- name: options.job
+ name: options.job,
},
- updateTransferJobFieldMask: options.field
+ updateTransferJobFieldMask: options.field,
};
if (options.field === 'description') {
@@ -179,19 +185,22 @@ function updateTransferJob (options, callback) {
patchRequest.transferJob.transferSpec = JSON.parse(options.value);
}
- storagetransfer.transferJobs.patch({
- auth: authClient,
- jobName: options.job,
- resource: patchRequest
- }, function (err, response) {
- if (err) {
- return callback(err);
- }
+ storagetransfer.transferJobs.patch(
+ {
+ auth: authClient,
+ jobName: options.job,
+ resource: patchRequest,
+ },
+ function(err, response) {
+ if (err) {
+ return callback(err);
+ }
- const transferJob = response.data;
- console.log('Updated transfer job: %s', transferJob.name);
- return callback(null, transferJob);
- });
+ const transferJob = response.data;
+ console.log('Updated transfer job: %s', transferJob.name);
+ return callback(null, transferJob);
+ }
+ );
});
}
// [END update_transfer_job]
@@ -202,25 +211,28 @@ function updateTransferJob (options, callback) {
*
* @param {function} callback The callback function.
*/
-function listTransferJobs (callback) {
- auth(function (err, authClient) {
+function listTransferJobs(callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
- storagetransfer.transferJobs.list({
- auth: authClient,
- filter: JSON.stringify({ project_id: process.env.GCLOUD_PROJECT })
- }, function (err, response) {
- if (err) {
- return callback(err);
- } else if (!response.data || !response.data.transferJobs) {
- return callback(null, []);
- }
+ storagetransfer.transferJobs.list(
+ {
+ auth: authClient,
+ filter: JSON.stringify({project_id: process.env.GCLOUD_PROJECT}),
+ },
+ function(err, response) {
+ if (err) {
+ return callback(err);
+ } else if (!response.data || !response.data.transferJobs) {
+ return callback(null, []);
+ }
- console.log('Found %d jobs!', response.data.transferJobs.length);
- return callback(null, response.data.transferJobs);
- });
+ console.log('Found %d jobs!', response.data.transferJobs.length);
+ return callback(null, response.data.transferJobs);
+ }
+ );
});
}
// [END list_transfer_jobs]
@@ -232,34 +244,37 @@ function listTransferJobs (callback) {
* @param {string} [jobName] An optional job name by which to filter results.
* @param {function} callback The callback function.
*/
-function listTransferOperations (jobName, callback) {
- auth(function (err, authClient) {
+function listTransferOperations(jobName, callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
var filter = {
- project_id: process.env.GCLOUD_PROJECT
+ project_id: process.env.GCLOUD_PROJECT,
};
if (jobName) {
filter.job_names = [jobName];
}
- storagetransfer.transferOperations.list({
- name: 'transferOperations',
- filter: JSON.stringify(filter),
- auth: authClient
- }, function (err, response, apiResponse) {
- if (err) {
- return callback(err);
- } else if (!response.data || !response.data.operations) {
- return callback(null, []);
- }
+ storagetransfer.transferOperations.list(
+ {
+ name: 'transferOperations',
+ filter: JSON.stringify(filter),
+ auth: authClient,
+ },
+ function(err, response) {
+ if (err) {
+ return callback(err);
+ } else if (!response.data || !response.data.operations) {
+ return callback(null, []);
+ }
- console.log('Found %d operations!', response.data.operations.length);
- return callback(null, response.data.operations);
- });
+ console.log('Found %d operations!', response.data.operations.length);
+ return callback(null, response.data.operations);
+ }
+ );
});
}
// [END list_transfer_operations]
@@ -271,24 +286,27 @@ function listTransferOperations (jobName, callback) {
* @param {string} transferOperationName The name of the transfer operation.
* @param {function} callback The callback function.
*/
-function getTransferOperation (transferOperationName, callback) {
- auth(function (err, authClient) {
+function getTransferOperation(transferOperationName, callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
- storagetransfer.transferOperations.get({
- name: transferOperationName,
- auth: authClient
- }, function (err, response) {
- if (err) {
- return callback(err);
- }
+ storagetransfer.transferOperations.get(
+ {
+ name: transferOperationName,
+ auth: authClient,
+ },
+ function(err, response) {
+ if (err) {
+ return callback(err);
+ }
- const transferOperation = response.data;
- console.log('Found transfer operation: %s', transferOperation);
- return callback(null, transferOperation);
- });
+ const transferOperation = response.data;
+ console.log('Found transfer operation: %s', transferOperation);
+ return callback(null, transferOperation);
+ }
+ );
});
}
// [END get_transfer_operation]
@@ -300,23 +318,26 @@ function getTransferOperation (transferOperationName, callback) {
* @param {string} transferOperationName The name of the transfer operation.
* @param {function} callback The callback function.
*/
-function pauseTransferOperation (transferOperationName, callback) {
- auth(function (err, authClient) {
+function pauseTransferOperation(transferOperationName, callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
- storagetransfer.transferOperations.pause({
- name: transferOperationName,
- auth: authClient
- }, function (err) {
- if (err) {
- return callback(err);
- }
+ storagetransfer.transferOperations.pause(
+ {
+ name: transferOperationName,
+ auth: authClient,
+ },
+ function(err) {
+ if (err) {
+ return callback(err);
+ }
- console.log('Paused transfer operation: %s', transferOperationName);
- return callback(null);
- });
+ console.log('Paused transfer operation: %s', transferOperationName);
+ return callback(null);
+ }
+ );
});
}
// [END pause_transfer_operation]
@@ -328,23 +349,26 @@ function pauseTransferOperation (transferOperationName, callback) {
* @param {string} transferOperationName The name of the transfer operation.
* @param {function} callback The callback function.
*/
-function resumeTransferOperation (transferOperationName, callback) {
- auth(function (err, authClient) {
+function resumeTransferOperation(transferOperationName, callback) {
+ auth(function(err, authClient) {
if (err) {
return callback(err);
}
- storagetransfer.transferOperations.resume({
- name: transferOperationName,
- auth: authClient
- }, function (err) {
- if (err) {
- return callback(err);
- }
+ storagetransfer.transferOperations.resume(
+ {
+ name: transferOperationName,
+ auth: authClient,
+ },
+ function(err) {
+ if (err) {
+ return callback(err);
+ }
- console.log('Resumed transfer operation: %s', transferOperationName);
- return callback(null);
- });
+ console.log('Resumed transfer operation: %s', transferOperationName);
+ return callback(null);
+ }
+ );
});
}
// [END resume_transfer_operation]
@@ -353,7 +377,7 @@ function resumeTransferOperation (transferOperationName, callback) {
// The command-line program
var cli = require('yargs');
-var program = module.exports = {
+var program = (module.exports = {
createTransferJob: createTransferJob,
getTransferJob: getTransferJob,
listTransferJobs: listTransferJobs,
@@ -362,73 +386,140 @@ var program = module.exports = {
getTransferOperation: getTransferOperation,
pauseTransferOperation: pauseTransferOperation,
resumeTransferOperation: resumeTransferOperation,
- main: function (args) {
+ main: function(args) {
// Run the command-line program
cli.help().strict().parse(args).argv; // eslint-disable-line
- }
-};
+ },
+});
cli
.demand(1)
- .command('jobs [args]', 'Run a job command.', (yargs) => {
- yargs
- .demand(2)
- .command('create