Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Video update #476

Merged
merged 13 commits into from
Sep 21, 2017
14 changes: 7 additions & 7 deletions video/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ The [Cloud Video Intelligence API](https://cloud.google.com/video-intelligence)

yarn install

[prereq]: ../README.md#prerequisities
[prereq]: ../README.md#prerequisites
[run]: ../README.md#how-to-run-a-sample

## Samples
Expand All @@ -44,17 +44,17 @@ Commands:
Intelligence API.
labels-gcs <gcsUri> Labels objects in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.
labels-file <gcsUri> Labels objects in a video stored locally using the Cloud Video Intelligence API.
safe-search <gcsUri> Detects adult content in a video stored in Google Cloud Storage.
safe-search <gcsUri> Detects explicit content in a video stored in Google Cloud Storage.

Options:
--help Show help [boolean]

Examples:
node analyze.js faces gs://demomaker/volleyball_court.mp4
node analyze.js shots gs://demomaker/volleyball_court.mp4
node analyze.js labels-gcs gs://demomaker/volleyball_court.mp4
node analyze.js labels-file cat.mp4
node analyze.js safe-search gs://demomaker/volleyball_court.mp4
node analyze.js faces gs://demomaker/larry_sergey_ice_bucket_short.mp4
node analyze.js shots gs://demomaker/sushi.mp4
node analyze.js labels-gcs gs://demomaker/tomatoes.mp4
node analyze.js labels-file resources/cat.mp4
node analyze.js safe-search gs://demomaker/tomatoes.mp4

For more information, see https://cloud.google.com/video-intelligence/docs
```
Expand Down
168 changes: 114 additions & 54 deletions video/analyze.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,30 @@ function analyzeFaces (gcsUri) {
.then((results) => {
// Gets faces
const faces = results[0].annotationResults[0].faceAnnotations;
console.log('Faces:');
faces.forEach((face, faceIdx) => {
console.log('Thumbnail size:', face.thumbnail.length);
console.log(`Face #${faceIdx}`);
console.log(`\tThumbnail size: ${face.thumbnail.length}`);
face.segments.forEach((segment, segmentIdx) => {
console.log(`Face #${faceIdx}, appearance #${segmentIdx}:`);
console.log(`\tStart: ${segment.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${segment.endTimeOffset / 1e6}s`);
segment = segment.segment;
if (segment.startTimeOffset.seconds === undefined) {
segment.startTimeOffset.seconds = 0;
}
if (segment.startTimeOffset.nanos === undefined) {
segment.startTimeOffset.nanos = 0;
}
if (segment.endTimeOffset.seconds === undefined) {
segment.endTimeOffset.seconds = 0;
}
if (segment.endTimeOffset.nanos === undefined) {
segment.endTimeOffset.nanos = 0;
}
console.log(`\tAppearance #${segmentIdx}:`);
console.log(`\t\tStart: ${segment.startTimeOffset.seconds}` +
`.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\t\tEnd: ${segment.endTimeOffset.seconds}.` +
`${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s`);
});
console.log(`\tLocations:`);
});
})
.catch((err) => {
Expand All @@ -63,7 +79,9 @@ function analyzeLabelsGCS (gcsUri) {
const Video = require('@google-cloud/video-intelligence');

// Instantiates a client
const video = Video();
const video = Video({
servicePath: `videointelligence.googleapis.com`
});

// The GCS filepath of the video to analyze
// const gcsUri = 'gs://my-bucket/my-video.mp4';
Expand All @@ -81,24 +99,32 @@ function analyzeLabelsGCS (gcsUri) {
return operation.promise();
})
.then((results) => {
// Gets labels
const labels = results[0].annotationResults[0].labelAnnotations;
console.log('Labels:');
// Gets annotations for video
const annotations = results[0].annotationResults[0];

const labels = annotations.segmentLabelAnnotations;
labels.forEach((label) => {
console.log(`Label ${label.description} occurs at:`);
const isEntireVideo = label.locations.some((location) =>
location.segment.startTimeOffset.toNumber() === -1 &&
location.segment.endTimeOffset.toNumber() === -1
);

if (isEntireVideo) {
console.log(`\tEntire video`);
} else {
label.locations.forEach((location) => {
console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`);
});
}
console.log(`Label ${label.entity.description} occurs at:`);
label.segments.forEach((segment) => {
let time = segment.segment;
if (time.startTimeOffset.seconds === undefined) {
time.startTimeOffset.seconds = 0;
}
if (time.startTimeOffset.nanos === undefined) {
time.startTimeOffset.nanos = 0;
}
if (time.endTimeOffset.seconds === undefined) {
time.endTimeOffset.seconds = 0;
}
if (time.endTimeOffset.nanos === undefined) {
time.endTimeOffset.nanos = 0;
}
console.log(`\tStart: ${time.startTimeOffset.seconds}` +
`.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\tEnd: ${time.endTimeOffset.seconds}.` +
`${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\tConfidence: ${segment.confidence}`);
});
});
})
.catch((err) => {
Expand Down Expand Up @@ -137,24 +163,32 @@ function analyzeLabelsLocal (path) {
return operation.promise();
})
.then((results) => {
// Gets labels for first video
const labels = results[0].annotationResults[0].labelAnnotations;
console.log('Labels:');
// Gets annotations for video
const annotations = results[0].annotationResults[0];

const labels = annotations.segmentLabelAnnotations;
labels.forEach((label) => {
console.log(`Label ${label.description} occurs at:`);
const isEntireVideo = label.locations.some((location) =>
location.segment.startTimeOffset.toNumber() === -1 &&
location.segment.endTimeOffset.toNumber() === -1
);

if (isEntireVideo) {
console.log(`\tEntire video`);
} else {
label.locations.forEach((location) => {
console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`);
});
}
console.log(`Label ${label.entity.description} occurs at:`);
label.segments.forEach((segment) => {
let time = segment.segment;
if (time.startTimeOffset.seconds === undefined) {
time.startTimeOffset.seconds = 0;
}
if (time.startTimeOffset.nanos === undefined) {
time.startTimeOffset.nanos = 0;
}
if (time.endTimeOffset.seconds === undefined) {
time.endTimeOffset.seconds = 0;
}
if (time.endTimeOffset.nanos === undefined) {
time.endTimeOffset.nanos = 0;
}
console.log(`\tStart: ${time.startTimeOffset.seconds}` +
`.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\tEnd: ${time.endTimeOffset.seconds}.` +
`${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\tConfidence: ${segment.confidence}`);
});
});
})
.catch((err) => {
Expand Down Expand Up @@ -195,9 +229,29 @@ function analyzeShots (gcsUri) {
console.log(`The entire video is one shot.`);
} else {
shotChanges.forEach((shot, shotIdx) => {
console.log(`Shot ${shotIdx} occurs from:`);
console.log(`\tStart: ${shot.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${shot.endTimeOffset / 1e6}s`);
console.log(`Scene ${shotIdx} occurs from:`);
if (shot.startTimeOffset === undefined) {
shot.startTimeOffset = {};
}
if (shot.endTimeOffset === undefined) {
shot.endTimeOffset = {};
}
if (shot.startTimeOffset.seconds === undefined) {
shot.startTimeOffset.seconds = 0;
}
if (shot.startTimeOffset.nanos === undefined) {
shot.startTimeOffset.nanos = 0;
}
if (shot.endTimeOffset.seconds === undefined) {
shot.endTimeOffset.seconds = 0;
}
if (shot.endTimeOffset.nanos === undefined) {
shot.endTimeOffset.nanos = 0;
}
console.log(`\tStart: ${shot.startTimeOffset.seconds}` +
`.${(shot.startTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\tEnd: ${shot.endTimeOffset.seconds}.` +
`${(shot.endTimeOffset.nanos / 1e6).toFixed(0)}s`);
});
}
})
Expand All @@ -220,7 +274,7 @@ function analyzeSafeSearch (gcsUri) {

const request = {
inputUri: gcsUri,
features: ['SAFE_SEARCH_DETECTION']
features: ['EXPLICIT_CONTENT_DETECTION']
};

// Human-readable likelihoods
Expand All @@ -235,15 +289,21 @@ function analyzeSafeSearch (gcsUri) {
})
.then((results) => {
// Gets unsafe content
const safeSearchResults = results[0].annotationResults[0].safeSearchAnnotations;
console.log('Safe search results:');
safeSearchResults.forEach((result) => {
console.log(`Time: ${result.timeOffset / 1e6}s`);
console.log(`\tAdult: ${likelihoods[result.adult]}`);
console.log(`\tSpoof: ${likelihoods[result.spoof]}`);
console.log(`\tMedical: ${likelihoods[result.medical]}`);
console.log(`\tViolent: ${likelihoods[result.violent]}`);
console.log(`\tRacy: ${likelihoods[result.racy]}`);
const explicitContentResults = results[0].annotationResults[0].explicitAnnotation;
console.log('Explicit annotation results:');
explicitContentResults.frames.forEach((result) => {
if (result.timeOffset === undefined) {
result.timeOffset = {};
}
if (result.timeOffset.seconds === undefined) {
result.timeOffset.seconds = 0;
}
if (result.timeOffset.nanos === undefined) {
result.timeOffset.nanos = 0;
}
console.log(`\tTime: ${result.timeOffset.seconds}` +
`.${(result.timeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\t\tPornography liklihood: ${likelihoods[result.pornographyLikelihood]}`);
});
})
.catch((err) => {
Expand Down Expand Up @@ -280,7 +340,7 @@ require(`yargs`) // eslint-disable-line
)
.command(
`safe-search <gcsUri>`,
`Detects adult content in a video stored in Google Cloud Storage.`,
`Detects explicit content in a video stored in Google Cloud Storage.`,
{},
(opts) => analyzeSafeSearch(opts.gcsUri)
)
Expand Down
9 changes: 4 additions & 5 deletions video/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,14 @@
"test": "samples test run --cmd ava -- -T 5m --verbose system-test/*.test.js"
},
"dependencies": {
"@google-cloud/video-intelligence": "0.1.0",
"googleapis": "19.0.0",
"@google-cloud/video-intelligence": "^0.3.2",
"long": "^3.2.0",
"safe-buffer": "5.1.0",
"safe-buffer": "5.1.1",
"yargs": "8.0.2"
},
"devDependencies": {
"@google-cloud/nodejs-repo-tools": "1.4.15",
"ava": "0.19.1",
"@google-cloud/nodejs-repo-tools": "1.4.17",
"ava": "0.22.0",
"proxyquire": "1.8.0"
},
"cloud-repo-tools": {
Expand Down
67 changes: 24 additions & 43 deletions video/quickstart.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,15 @@
const Video = require('@google-cloud/video-intelligence');

// Instantiates a client
const video = Video({
projectId: process.env.GCLOUD_PROJECT // Replace with your Google Cloud project ID
});
const video = Video();

// The GCS filepath of the video to analyze
const gcsUri = 'gs://demomaker/tomatoes.mp4';
const gcsUri = 'gs://nodejs-docs-samples-video/quickstart_short.mp4';

// Construct request
const request = {
inputUri: gcsUri,
features: ['FACE_DETECTION', 'LABEL_DETECTION', 'SHOT_CHANGE_DETECTION']
features: ['LABEL_DETECTION']
};

// Execute request
Expand All @@ -44,47 +42,30 @@ video.annotateVideo(request)
// Gets annotations for video
const annotations = results[0].annotationResults[0];

// Gets faces for video from its annotations
const faces = annotations.faceAnnotations;
faces.forEach((face, faceIdx) => {
console.log('Thumbnail size:', face.thumbnail.length);
face.segments.forEach((segment, segmentIdx) => {
console.log(`Face #${faceIdx}, appearance #${segmentIdx}:`);
console.log(`\tStart: ${segment.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${segment.endTimeOffset / 1e6}s`);
});
});

// Gets labels for video from its annotations
const labels = annotations.labelAnnotations;
const labels = annotations.segmentLabelAnnotations;
labels.forEach((label) => {
console.log(`Label ${label.description} occurs at:`);
const isEntireVideo = label.locations.some((location) =>
location.segment.startTimeOffset.toNumber() === -1 &&
location.segment.endTimeOffset.toNumber() === -1
);

if (isEntireVideo) {
console.log(`\tEntire video`);
} else {
label.locations.forEach((location) => {
console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`);
});
}
});

// Gets shot changes for video from its annotations
const shotChanges = annotations.shotAnnotations;
if (shotChanges.length === 1) {
console.log(`The entire video is one scene.`);
} else {
shotChanges.forEach((shot, shotIdx) => {
console.log(`Scene ${shotIdx} occurs from:`);
console.log(`\tStart: ${shot.startTimeOffset / 1e6}s`);
console.log(`\tEnd: ${shot.endTimeOffset / 1e6}s`);
console.log(`Label ${label.entity.description} occurs at:`);
label.segments.forEach((segment) => {
segment = segment.segment;
if (segment.startTimeOffset.seconds === undefined) {
segment.startTimeOffset.seconds = 0;
}
if (segment.startTimeOffset.nanos === undefined) {
segment.startTimeOffset.nanos = 0;
}
if (segment.endTimeOffset.seconds === undefined) {
segment.endTimeOffset.seconds = 0;
}
if (segment.endTimeOffset.nanos === undefined) {
segment.endTimeOffset.nanos = 0;
}
console.log(`\tStart: ${segment.startTimeOffset.seconds}` +
`.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s`);
console.log(`\tEnd: ${segment.endTimeOffset.seconds}.` +
`${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s`);
});
}
});
})
.catch((err) => {
console.error('ERROR:', err);
Expand Down
Loading