From 0df0619eb69c22c28df048a816353f8daf220433 Mon Sep 17 00:00:00 2001 From: Ace Nassri Date: Thu, 24 Aug 2017 16:33:37 -0700 Subject: [PATCH 1/8] Speed up video tests --- video/quickstart.js | 2 +- video/system-test/analyze.test.js | 25 ++++++++++++++----------- video/system-test/quickstart.test.js | 2 +- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/video/quickstart.js b/video/quickstart.js index 3aeec4334c..5a2cb74a6a 100644 --- a/video/quickstart.js +++ b/video/quickstart.js @@ -25,7 +25,7 @@ const video = Video({ }); // The GCS filepath of the video to analyze -const gcsUri = 'gs://demomaker/tomatoes.mp4'; +const gcsUri = 'gs://nodejs-docs-samples-video/quickstart_short.mp4'; // Construct request const request = { diff --git a/video/system-test/analyze.test.js b/video/system-test/analyze.test.js index 7ac7d1d7af..ec305443cb 100644 --- a/video/system-test/analyze.test.js +++ b/video/system-test/analyze.test.js @@ -24,9 +24,13 @@ const tools = require(`@google-cloud/nodejs-repo-tools`); const cmd = `node analyze.js`; const cwd = path.join(__dirname, `..`); +const url = `gs://nodejs-docs-samples-video/quickstart.mp4`; +const shortUrl = `gs://nodejs-docs-samples-video/quickstart_short.mp4`; +const file = `resources/cat.mp4`; + // analyze_faces test(`should analyze faces in a GCS file`, async (t) => { - const output = await tools.runAsync(`${cmd} faces gs://demomaker/larry_sergey_ice_bucket_short.mp4`, cwd); + const output = await tools.runAsync(`${cmd} faces ${shortUrl}`, cwd); t.regex(output, /Thumbnail size: \d+/); t.regex(output, /Start: \d+\.\d+s/); t.regex(output, /End: \d+\.\d+s/); @@ -34,40 +38,39 @@ test(`should analyze faces in a GCS file`, async (t) => { // analyze_labels_gcs (one scene) test(`should analyze labels in a GCS file with one scene`, async (t) => { - const output = await tools.runAsync(`${cmd} labels-gcs gs://demomaker/tomatoes.mp4`, cwd); - t.regex(output, /Label Tomato occurs at:/); + const output = await tools.runAsync(`${cmd} labels-gcs ${shortUrl}`, cwd); + t.regex(output, /Label Shirt occurs at:/); t.regex(output, /Entire video/); }); // analyze_labels_gcs (multiple scenes) test(`should analyze labels in a GCS file with multiple scenes`, async (t) => { - const output = await tools.runAsync(`${cmd} labels-gcs gs://demomaker/sushi.mp4`, cwd); - t.regex(output, /Label Food occurs at:/); - t.regex(output, /Start: \d+\.\d+s/); - t.regex(output, /End: \d+\.\d+s/); + const output = await tools.runAsync(`${cmd} labels-gcs ${url}`, cwd); + t.regex(output, /Label Shirt occurs at:/); + t.regex(output, /Entire video/); }); // analyze_labels_local test(`should analyze labels in a local file`, async (t) => { - const output = await tools.runAsync(`${cmd} labels-file resources/cat.mp4`, cwd); + const output = await tools.runAsync(`${cmd} labels-file ${file}`, cwd); t.regex(output, /Label Whiskers occurs at:/); t.regex(output, /Entire video/); }); // analyze_shots (multiple shots) test(`should analyze shots in a GCS file with multiple shots`, async (t) => { - const output = await tools.runAsync(`${cmd} shots gs://demomaker/sushi.mp4`, cwd); + const output = await tools.runAsync(`${cmd} shots ${url}`, cwd); t.regex(output, /Shot 0 occurs from:/); }); // analyze_shots (one shot) test(`should analyze shots in a GCS file with one shot`, async (t) => { - const output = await tools.runAsync(`${cmd} shots gs://demomaker/tomatoes.mp4`, cwd); + const output = await tools.runAsync(`${cmd} shots ${shortUrl}`, cwd); t.regex(output, /The entire video is one shot./); }); // analyze_safe_search test(`should analyze safe search results in a GCS file`, async (t) => { - const output = await tools.runAsync(`${cmd} safe-search gs://demomaker/tomatoes.mp4`, cwd); + const output = await tools.runAsync(`${cmd} safe-search ${url}`, cwd); t.regex(output, /Spoof:/); }); diff --git a/video/system-test/quickstart.test.js b/video/system-test/quickstart.test.js index bff0f3c49a..20eaf51e99 100644 --- a/video/system-test/quickstart.test.js +++ b/video/system-test/quickstart.test.js @@ -24,7 +24,7 @@ const cwd = path.join(__dirname, `..`); test(`should analyze a hardcoded video`, async (t) => { const output = await tools.runAsync(cmd, cwd); - t.regex(output, /Label Tomato occurs at:/); + t.regex(output, /Label Standing occurs at:/); t.regex(output, /Entire video/); t.regex(output, /The entire video is one scene./); }); From 7287b870092a4edfc5d372eaaabd215ee0595b07 Mon Sep 17 00:00:00 2001 From: Ace Nassri Date: Thu, 31 Aug 2017 14:58:53 -0700 Subject: [PATCH 2/8] Update video libraries WIP --- video/README.md | 12 ++-- video/analyze.js | 98 ++++++++++++++++++---------- video/package.json | 10 +-- video/quickstart.js | 37 ++++++++--- video/system-test/analyze.test.js | 13 ++-- video/system-test/quickstart.test.js | 3 +- 6 files changed, 111 insertions(+), 62 deletions(-) diff --git a/video/README.md b/video/README.md index fca30dac83..8642c51e61 100644 --- a/video/README.md +++ b/video/README.md @@ -26,7 +26,7 @@ The [Cloud Video Intelligence API](https://cloud.google.com/video-intelligence) yarn install -[prereq]: ../README.md#prerequisities +[prereq]: ../README.md#prerequisites [run]: ../README.md#how-to-run-a-sample ## Samples @@ -44,17 +44,17 @@ Commands: Intelligence API. labels-gcs Labels objects in a video stored in Google Cloud Storage using the Cloud Video Intelligence API. labels-file Labels objects in a video stored locally using the Cloud Video Intelligence API. - safe-search Detects adult content in a video stored in Google Cloud Storage. + safe-search Detects explicit content in a video stored in Google Cloud Storage. Options: --help Show help [boolean] Examples: - node analyze.js faces gs://demomaker/volleyball_court.mp4 - node analyze.js shots gs://demomaker/volleyball_court.mp4 - node analyze.js labels-gcs gs://demomaker/volleyball_court.mp4 + node analyze.js faces gs://demomaker/larry_sergey_ice_bucket_short.mp4 + node analyze.js shots gs://demomaker/sushi.mp4 + node analyze.js labels-gcs gs://demomaker/tomatoes.mp4 node analyze.js labels-file cat.mp4 - node analyze.js safe-search gs://demomaker/volleyball_court.mp4 + node analyze.js safe-search gs://demomaker/tomatoes.mp4 For more information, see https://cloud.google.com/video-intelligence/docs ``` diff --git a/video/analyze.js b/video/analyze.js index 5ed2312cbd..eeec5425e3 100644 --- a/video/analyze.js +++ b/video/analyze.js @@ -21,7 +21,9 @@ function analyzeFaces (gcsUri) { const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video(); + const video = Video({ + servicePath: `alpha-videointelligence.googleapis.com` + }); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; @@ -41,13 +43,18 @@ function analyzeFaces (gcsUri) { .then((results) => { // Gets faces const faces = results[0].annotationResults[0].faceAnnotations; - console.log('Faces:'); faces.forEach((face, faceIdx) => { - console.log('Thumbnail size:', face.thumbnail.length); + console.log(`Face #${faceIdx}`); + console.log(`\tThumbnail size: ${face.thumbnail.length}`); face.segments.forEach((segment, segmentIdx) => { - console.log(`Face #${faceIdx}, appearance #${segmentIdx}:`); - console.log(`\tStart: ${segment.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${segment.endTimeOffset / 1e6}s`); + console.log(`\tAppearance #${segmentIdx}:`); + console.log(`\t\tStart: ${segment.startTimeOffset / 1e6}s`); + console.log(`\t\tEnd: ${segment.endTimeOffset / 1e6}s`); + }); + console.log(`\tLocations:`); + face.locations.forEach((location, locationIdx) => { + const box = location.boundingBox; + console.log(`\t\tTime ${location.timeOffset / 1e6}s: (${box.top}, ${box.left}) - (${box.bottom}, ${box.right})`); }); }); }) @@ -57,13 +64,16 @@ function analyzeFaces (gcsUri) { // [END analyze_faces] } +// TODO finish this method function analyzeLabelsGCS (gcsUri) { // [START analyze_labels_gcs] // Imports the Google Cloud Video Intelligence library const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video(); + const video = Video({ + servicePath: `alpha-videointelligence.googleapis.com` + }); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; @@ -83,22 +93,27 @@ function analyzeLabelsGCS (gcsUri) { .then((results) => { // Gets labels const labels = results[0].annotationResults[0].labelAnnotations; + // TODO What does "level" mean? + // TODO Why are there no segment-level annotations? + console.log('Labels:'); labels.forEach((label) => { console.log(`Label ${label.description} occurs at:`); - const isEntireVideo = label.locations.some((location) => - location.segment.startTimeOffset.toNumber() === -1 && - location.segment.endTimeOffset.toNumber() === -1 - ); - - if (isEntireVideo) { - console.log(`\tEntire video`); - } else { - label.locations.forEach((location) => { + label.locations.forEach((location) => { + const isEntireVideo = label.locations.some((location) => + location.segment.startTimeOffset.toNumber() === -1 && + location.segment.endTimeOffset.toNumber() === -1 + ); + + if (isEntireVideo) { + console.log(`\tEntire video`); + } else { console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`); console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`); - }); - } + } + + console.log(`\tConfidence: ${location.confidence}`); + }); }); }) .catch((err) => { @@ -114,7 +129,9 @@ function analyzeLabelsLocal (path) { const fs = require('fs'); // Instantiates a client - const video = Video(); + const video = Video({ + servicePath: `alpha-videointelligence.googleapis.com` + }); // The local filepath of the video to analyze // const path = 'my-file.mp4'; @@ -139,22 +156,26 @@ function analyzeLabelsLocal (path) { .then((results) => { // Gets labels for first video const labels = results[0].annotationResults[0].labelAnnotations; + // TODO What does "level" mean? + // TODO Why are there no segment-level annotations? + console.log('Labels:'); labels.forEach((label) => { console.log(`Label ${label.description} occurs at:`); - const isEntireVideo = label.locations.some((location) => - location.segment.startTimeOffset.toNumber() === -1 && - location.segment.endTimeOffset.toNumber() === -1 - ); - - if (isEntireVideo) { - console.log(`\tEntire video`); - } else { - label.locations.forEach((location) => { + + label.locations.forEach((location) => { + const isEntireVideo = + location.segment.startTimeOffset.toNumber() === -1 && + location.segment.endTimeOffset.toNumber() === -1; + + if (isEntireVideo) { + console.log(`\tEntire video`); + } else { console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`); console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`); - }); - } + } + console.log(`\tConfidence: ${location.confidence}`); + }); }); }) .catch((err) => { @@ -169,7 +190,9 @@ function analyzeShots (gcsUri) { const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video(); + const video = Video({ + servicePath: `alpha-videointelligence.googleapis.com` + }); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; @@ -207,13 +230,16 @@ function analyzeShots (gcsUri) { // [END analyze_shots] } +// TODO upgrade this method for v1beta2 function analyzeSafeSearch (gcsUri) { // [START analyze_safe_search] // Imports the Google Cloud Video Intelligence library const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video(); + const video = Video({ + servicePath: `alpha-videointelligence.googleapis.com` + }); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; @@ -235,9 +261,9 @@ function analyzeSafeSearch (gcsUri) { }) .then((results) => { // Gets unsafe content - const safeSearchResults = results[0].annotationResults[0].safeSearchAnnotations; - console.log('Safe search results:'); - safeSearchResults.forEach((result) => { + const explicitContentResults = results[0].annotationResults[0].safeSearchAnnotations; + console.log('Explicit content results:'); + explicitContentResults.forEach((result) => { console.log(`Time: ${result.timeOffset / 1e6}s`); console.log(`\tAdult: ${likelihoods[result.adult]}`); console.log(`\tSpoof: ${likelihoods[result.spoof]}`); @@ -280,7 +306,7 @@ require(`yargs`) // eslint-disable-line ) .command( `safe-search `, - `Detects adult content in a video stored in Google Cloud Storage.`, + `Detects explicit content in a video stored in Google Cloud Storage.`, {}, (opts) => analyzeSafeSearch(opts.gcsUri) ) diff --git a/video/package.json b/video/package.json index 6a4ff41ca5..b7263db43b 100644 --- a/video/package.json +++ b/video/package.json @@ -17,15 +17,15 @@ "test": "samples test run --cmd ava -- -T 5m --verbose system-test/*.test.js" }, "dependencies": { - "@google-cloud/video-intelligence": "0.1.0", - "googleapis": "19.0.0", + "@google-cloud/video-intelligence": "0.2.0", + "googleapis": "21.3.0", "long": "^3.2.0", - "safe-buffer": "5.1.0", + "safe-buffer": "5.1.1", "yargs": "8.0.2" }, "devDependencies": { - "@google-cloud/nodejs-repo-tools": "1.4.15", - "ava": "0.19.1", + "@google-cloud/nodejs-repo-tools": "1.4.17", + "ava": "0.22.0", "proxyquire": "1.8.0" }, "cloud-repo-tools": { diff --git a/video/quickstart.js b/video/quickstart.js index 5a2cb74a6a..af6092ce90 100644 --- a/video/quickstart.js +++ b/video/quickstart.js @@ -21,12 +21,16 @@ const Video = require('@google-cloud/video-intelligence'); // Instantiates a client const video = Video({ + servicePath: 'alpha-videointelligence.googleapis.com', projectId: process.env.GCLOUD_PROJECT // Replace with your Google Cloud project ID }); // The GCS filepath of the video to analyze const gcsUri = 'gs://nodejs-docs-samples-video/quickstart_short.mp4'; +// Human-readable likelihoods +const likelihoods = ['UNKNOWN', 'VERY_UNLIKELY', 'UNLIKELY', 'POSSIBLE', 'LIKELY', 'VERY_LIKELY']; + // Construct request const request = { inputUri: gcsUri, @@ -45,6 +49,7 @@ video.annotateVideo(request) const annotations = results[0].annotationResults[0]; // Gets faces for video from its annotations + // TODO upgrade const faces = annotations.faceAnnotations; faces.forEach((face, faceIdx) => { console.log('Thumbnail size:', face.thumbnail.length); @@ -59,19 +64,19 @@ video.annotateVideo(request) const labels = annotations.labelAnnotations; labels.forEach((label) => { console.log(`Label ${label.description} occurs at:`); - const isEntireVideo = label.locations.some((location) => - location.segment.startTimeOffset.toNumber() === -1 && - location.segment.endTimeOffset.toNumber() === -1 - ); + label.locations.forEach((location) => { + const isEntireVideo = + location.segment.startTimeOffset.toNumber() === -1 && + location.segment.endTimeOffset.toNumber() === -1; - if (isEntireVideo) { - console.log(`\tEntire video`); - } else { - label.locations.forEach((location) => { + if (isEntireVideo) { + console.log(`\tEntire video`); + } else { console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`); console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`); - }); - } + } + console.log(`\tConfidence: ${location.confidence}`); + }); }); // Gets shot changes for video from its annotations @@ -85,6 +90,18 @@ video.annotateVideo(request) console.log(`\tEnd: ${shot.endTimeOffset / 1e6}s`); }); } + + // Gets explicit content data for video from its annotations + const explicitContentResults = annotations.safeSearchAnnotations; + console.log('Explicit content results:'); + explicitContentResults.forEach((result) => { + console.log(`Time: ${result.timeOffset / 1e6}s`); + console.log(`\tAdult: ${likelihoods[result.adult]}`); + console.log(`\tSpoof: ${likelihoods[result.spoof]}`); + console.log(`\tMedical: ${likelihoods[result.medical]}`); + console.log(`\tViolent: ${likelihoods[result.violent]}`); + console.log(`\tRacy: ${likelihoods[result.racy]}`); + }); }) .catch((err) => { console.error('ERROR:', err); diff --git a/video/system-test/analyze.test.js b/video/system-test/analyze.test.js index ec305443cb..1a290cd474 100644 --- a/video/system-test/analyze.test.js +++ b/video/system-test/analyze.test.js @@ -30,31 +30,35 @@ const file = `resources/cat.mp4`; // analyze_faces test(`should analyze faces in a GCS file`, async (t) => { - const output = await tools.runAsync(`${cmd} faces ${shortUrl}`, cwd); + const output = await tools.runAsync(`${cmd} faces ${url}`, cwd); t.regex(output, /Thumbnail size: \d+/); t.regex(output, /Start: \d+\.\d+s/); t.regex(output, /End: \d+\.\d+s/); + t.regex(output, /Time \d+\.\d+s: \(\d+, \d+\) - \(\d+, \d+\)/); }); // analyze_labels_gcs (one scene) test(`should analyze labels in a GCS file with one scene`, async (t) => { const output = await tools.runAsync(`${cmd} labels-gcs ${shortUrl}`, cwd); - t.regex(output, /Label Shirt occurs at:/); + t.regex(output, /Label shirt occurs at:/); t.regex(output, /Entire video/); + t.regex(output, /Confidence: \d+\.\d+/); }); // analyze_labels_gcs (multiple scenes) test(`should analyze labels in a GCS file with multiple scenes`, async (t) => { const output = await tools.runAsync(`${cmd} labels-gcs ${url}`, cwd); - t.regex(output, /Label Shirt occurs at:/); + t.regex(output, /Label shirt occurs at:/); t.regex(output, /Entire video/); + t.regex(output, /Confidence: \d+\.\d+/); }); // analyze_labels_local test(`should analyze labels in a local file`, async (t) => { const output = await tools.runAsync(`${cmd} labels-file ${file}`, cwd); - t.regex(output, /Label Whiskers occurs at:/); + t.regex(output, /Label whiskers occurs at:/); t.regex(output, /Entire video/); + t.regex(output, /Confidence: \d+\.\d+/); }); // analyze_shots (multiple shots) @@ -72,5 +76,6 @@ test(`should analyze shots in a GCS file with one shot`, async (t) => { // analyze_safe_search test(`should analyze safe search results in a GCS file`, async (t) => { const output = await tools.runAsync(`${cmd} safe-search ${url}`, cwd); + t.regex(output, /Time: \d+\.\d+s/); t.regex(output, /Spoof:/); }); diff --git a/video/system-test/quickstart.test.js b/video/system-test/quickstart.test.js index 20eaf51e99..bafa586035 100644 --- a/video/system-test/quickstart.test.js +++ b/video/system-test/quickstart.test.js @@ -24,7 +24,8 @@ const cwd = path.join(__dirname, `..`); test(`should analyze a hardcoded video`, async (t) => { const output = await tools.runAsync(cmd, cwd); - t.regex(output, /Label Standing occurs at:/); + t.regex(output, /Label standing occurs at:/); t.regex(output, /Entire video/); + t.regex(output, /Confidence: \d+\.\d+/); t.regex(output, /The entire video is one scene./); }); From 2fadc9e1f9a05371a6bfcaf1cb2fcf9792c90bb3 Mon Sep 17 00:00:00 2001 From: Ace Nassri Date: Thu, 31 Aug 2017 16:37:24 -0700 Subject: [PATCH 3/8] Make tests serial to reduce flakiness --- video/system-test/analyze.test.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/video/system-test/analyze.test.js b/video/system-test/analyze.test.js index 1a290cd474..82c4a409ec 100644 --- a/video/system-test/analyze.test.js +++ b/video/system-test/analyze.test.js @@ -29,7 +29,7 @@ const shortUrl = `gs://nodejs-docs-samples-video/quickstart_short.mp4`; const file = `resources/cat.mp4`; // analyze_faces -test(`should analyze faces in a GCS file`, async (t) => { +test.serial(`should analyze faces in a GCS file`, async (t) => { const output = await tools.runAsync(`${cmd} faces ${url}`, cwd); t.regex(output, /Thumbnail size: \d+/); t.regex(output, /Start: \d+\.\d+s/); @@ -38,7 +38,7 @@ test(`should analyze faces in a GCS file`, async (t) => { }); // analyze_labels_gcs (one scene) -test(`should analyze labels in a GCS file with one scene`, async (t) => { +test.serial(`should analyze labels in a GCS file with one scene`, async (t) => { const output = await tools.runAsync(`${cmd} labels-gcs ${shortUrl}`, cwd); t.regex(output, /Label shirt occurs at:/); t.regex(output, /Entire video/); @@ -46,7 +46,7 @@ test(`should analyze labels in a GCS file with one scene`, async (t) => { }); // analyze_labels_gcs (multiple scenes) -test(`should analyze labels in a GCS file with multiple scenes`, async (t) => { +test.serial(`should analyze labels in a GCS file with multiple scenes`, async (t) => { const output = await tools.runAsync(`${cmd} labels-gcs ${url}`, cwd); t.regex(output, /Label shirt occurs at:/); t.regex(output, /Entire video/); @@ -54,7 +54,7 @@ test(`should analyze labels in a GCS file with multiple scenes`, async (t) => { }); // analyze_labels_local -test(`should analyze labels in a local file`, async (t) => { +test.serial(`should analyze labels in a local file`, async (t) => { const output = await tools.runAsync(`${cmd} labels-file ${file}`, cwd); t.regex(output, /Label whiskers occurs at:/); t.regex(output, /Entire video/); @@ -62,19 +62,19 @@ test(`should analyze labels in a local file`, async (t) => { }); // analyze_shots (multiple shots) -test(`should analyze shots in a GCS file with multiple shots`, async (t) => { +test.serial(`should analyze shots in a GCS file with multiple shots`, async (t) => { const output = await tools.runAsync(`${cmd} shots ${url}`, cwd); t.regex(output, /Shot 0 occurs from:/); }); // analyze_shots (one shot) -test(`should analyze shots in a GCS file with one shot`, async (t) => { +test.serial(`should analyze shots in a GCS file with one shot`, async (t) => { const output = await tools.runAsync(`${cmd} shots ${shortUrl}`, cwd); t.regex(output, /The entire video is one shot./); }); // analyze_safe_search -test(`should analyze safe search results in a GCS file`, async (t) => { +test.serial(`should analyze safe search results in a GCS file`, async (t) => { const output = await tools.runAsync(`${cmd} safe-search ${url}`, cwd); t.regex(output, /Time: \d+\.\d+s/); t.regex(output, /Spoof:/); From 0ffcffc4ca7a5a08b4db8e59ff351bbf691fa945 Mon Sep 17 00:00:00 2001 From: Ace Nassri Date: Thu, 31 Aug 2017 16:42:57 -0700 Subject: [PATCH 4/8] Remove old TODOs --- video/analyze.js | 2 -- video/quickstart.js | 1 - 2 files changed, 3 deletions(-) diff --git a/video/analyze.js b/video/analyze.js index eeec5425e3..e724ec34cd 100644 --- a/video/analyze.js +++ b/video/analyze.js @@ -64,7 +64,6 @@ function analyzeFaces (gcsUri) { // [END analyze_faces] } -// TODO finish this method function analyzeLabelsGCS (gcsUri) { // [START analyze_labels_gcs] // Imports the Google Cloud Video Intelligence library @@ -230,7 +229,6 @@ function analyzeShots (gcsUri) { // [END analyze_shots] } -// TODO upgrade this method for v1beta2 function analyzeSafeSearch (gcsUri) { // [START analyze_safe_search] // Imports the Google Cloud Video Intelligence library diff --git a/video/quickstart.js b/video/quickstart.js index af6092ce90..b450aba5cd 100644 --- a/video/quickstart.js +++ b/video/quickstart.js @@ -49,7 +49,6 @@ video.annotateVideo(request) const annotations = results[0].annotationResults[0]; // Gets faces for video from its annotations - // TODO upgrade const faces = annotations.faceAnnotations; faces.forEach((face, faceIdx) => { console.log('Thumbnail size:', face.thumbnail.length); From 7bfa28be6bda4384ad24d26cb1a073a74ae6bb51 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Thu, 14 Sep 2017 13:10:43 -0700 Subject: [PATCH 5/8] Fixes endpoint for faces --- video/analyze.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/video/analyze.js b/video/analyze.js index e724ec34cd..05ef6b5db0 100644 --- a/video/analyze.js +++ b/video/analyze.js @@ -22,7 +22,7 @@ function analyzeFaces (gcsUri) { // Instantiates a client const video = Video({ - servicePath: `alpha-videointelligence.googleapis.com` + servicePath: `videointelligence.googleapis.com` }); // The GCS filepath of the video to analyze From bc851fe25065029e5a43fff5d9cd53a4f77349a7 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Mon, 18 Sep 2017 17:58:44 -0700 Subject: [PATCH 6/8] Updates to new client library version --- video/package.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/video/package.json b/video/package.json index b7263db43b..4e6765ed18 100644 --- a/video/package.json +++ b/video/package.json @@ -17,8 +17,7 @@ "test": "samples test run --cmd ava -- -T 5m --verbose system-test/*.test.js" }, "dependencies": { - "@google-cloud/video-intelligence": "0.2.0", - "googleapis": "21.3.0", + "@google-cloud/video-intelligence": "^0.3.1", "long": "^3.2.0", "safe-buffer": "5.1.1", "yargs": "8.0.2" From 7cd9b484e3e4a6bdb00fd638455f982c5d8b0c61 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Tue, 19 Sep 2017 16:55:23 -0700 Subject: [PATCH 7/8] update endpoint. --- video/analyze.js | 18 +++++------------- video/quickstart.js | 2 +- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/video/analyze.js b/video/analyze.js index 05ef6b5db0..899b4761b9 100644 --- a/video/analyze.js +++ b/video/analyze.js @@ -21,9 +21,7 @@ function analyzeFaces (gcsUri) { const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video({ - servicePath: `videointelligence.googleapis.com` - }); + const video = Video(); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; @@ -71,7 +69,7 @@ function analyzeLabelsGCS (gcsUri) { // Instantiates a client const video = Video({ - servicePath: `alpha-videointelligence.googleapis.com` + servicePath: `videointelligence.googleapis.com` }); // The GCS filepath of the video to analyze @@ -128,9 +126,7 @@ function analyzeLabelsLocal (path) { const fs = require('fs'); // Instantiates a client - const video = Video({ - servicePath: `alpha-videointelligence.googleapis.com` - }); + const video = Video(); // The local filepath of the video to analyze // const path = 'my-file.mp4'; @@ -189,9 +185,7 @@ function analyzeShots (gcsUri) { const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video({ - servicePath: `alpha-videointelligence.googleapis.com` - }); + const video = Video(); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; @@ -235,9 +229,7 @@ function analyzeSafeSearch (gcsUri) { const Video = require('@google-cloud/video-intelligence'); // Instantiates a client - const video = Video({ - servicePath: `alpha-videointelligence.googleapis.com` - }); + const video = Video(); // The GCS filepath of the video to analyze // const gcsUri = 'gs://my-bucket/my-video.mp4'; diff --git a/video/quickstart.js b/video/quickstart.js index b450aba5cd..3b8e938c00 100644 --- a/video/quickstart.js +++ b/video/quickstart.js @@ -21,7 +21,7 @@ const Video = require('@google-cloud/video-intelligence'); // Instantiates a client const video = Video({ - servicePath: 'alpha-videointelligence.googleapis.com', + servicePath: 'videointelligence.googleapis.com', projectId: process.env.GCLOUD_PROJECT // Replace with your Google Cloud project ID }); From 8f5dd568a9d912ebb2549bf55f4fe9a2e279974d Mon Sep 17 00:00:00 2001 From: Gus Class Date: Thu, 21 Sep 2017 15:12:25 -0700 Subject: [PATCH 8/8] Upgrades to new client library version --- video/README.md | 2 +- video/analyze.js | 156 +++++++++++++++++---------- video/package.json | 2 +- video/quickstart.js | 77 ++++--------- video/system-test/analyze.test.js | 12 +-- video/system-test/quickstart.test.js | 3 - 6 files changed, 127 insertions(+), 125 deletions(-) diff --git a/video/README.md b/video/README.md index 8642c51e61..05da66feb4 100644 --- a/video/README.md +++ b/video/README.md @@ -53,7 +53,7 @@ Examples: node analyze.js faces gs://demomaker/larry_sergey_ice_bucket_short.mp4 node analyze.js shots gs://demomaker/sushi.mp4 node analyze.js labels-gcs gs://demomaker/tomatoes.mp4 - node analyze.js labels-file cat.mp4 + node analyze.js labels-file resources/cat.mp4 node analyze.js safe-search gs://demomaker/tomatoes.mp4 For more information, see https://cloud.google.com/video-intelligence/docs diff --git a/video/analyze.js b/video/analyze.js index 899b4761b9..f82c904a8c 100644 --- a/video/analyze.js +++ b/video/analyze.js @@ -45,15 +45,26 @@ function analyzeFaces (gcsUri) { console.log(`Face #${faceIdx}`); console.log(`\tThumbnail size: ${face.thumbnail.length}`); face.segments.forEach((segment, segmentIdx) => { + segment = segment.segment; + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; + } + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; + } + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; + } + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; + } console.log(`\tAppearance #${segmentIdx}:`); - console.log(`\t\tStart: ${segment.startTimeOffset / 1e6}s`); - console.log(`\t\tEnd: ${segment.endTimeOffset / 1e6}s`); + console.log(`\t\tStart: ${segment.startTimeOffset.seconds}` + + `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\t\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s`); }); console.log(`\tLocations:`); - face.locations.forEach((location, locationIdx) => { - const box = location.boundingBox; - console.log(`\t\tTime ${location.timeOffset / 1e6}s: (${box.top}, ${box.left}) - (${box.bottom}, ${box.right})`); - }); }); }) .catch((err) => { @@ -88,28 +99,31 @@ function analyzeLabelsGCS (gcsUri) { return operation.promise(); }) .then((results) => { - // Gets labels - const labels = results[0].annotationResults[0].labelAnnotations; - // TODO What does "level" mean? - // TODO Why are there no segment-level annotations? + // Gets annotations for video + const annotations = results[0].annotationResults[0]; - console.log('Labels:'); + const labels = annotations.segmentLabelAnnotations; labels.forEach((label) => { - console.log(`Label ${label.description} occurs at:`); - label.locations.forEach((location) => { - const isEntireVideo = label.locations.some((location) => - location.segment.startTimeOffset.toNumber() === -1 && - location.segment.endTimeOffset.toNumber() === -1 - ); - - if (isEntireVideo) { - console.log(`\tEntire video`); - } else { - console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`); + console.log(`Label ${label.entity.description} occurs at:`); + label.segments.forEach((segment) => { + let time = segment.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; } - - console.log(`\tConfidence: ${location.confidence}`); + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log(`\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\tConfidence: ${segment.confidence}`); }); }); }) @@ -149,27 +163,31 @@ function analyzeLabelsLocal (path) { return operation.promise(); }) .then((results) => { - // Gets labels for first video - const labels = results[0].annotationResults[0].labelAnnotations; - // TODO What does "level" mean? - // TODO Why are there no segment-level annotations? + // Gets annotations for video + const annotations = results[0].annotationResults[0]; - console.log('Labels:'); + const labels = annotations.segmentLabelAnnotations; labels.forEach((label) => { - console.log(`Label ${label.description} occurs at:`); - - label.locations.forEach((location) => { - const isEntireVideo = - location.segment.startTimeOffset.toNumber() === -1 && - location.segment.endTimeOffset.toNumber() === -1; - - if (isEntireVideo) { - console.log(`\tEntire video`); - } else { - console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`); + console.log(`Label ${label.entity.description} occurs at:`); + label.segments.forEach((segment) => { + let time = segment.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; } - console.log(`\tConfidence: ${location.confidence}`); + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log(`\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\tConfidence: ${segment.confidence}`); }); }); }) @@ -211,9 +229,29 @@ function analyzeShots (gcsUri) { console.log(`The entire video is one shot.`); } else { shotChanges.forEach((shot, shotIdx) => { - console.log(`Shot ${shotIdx} occurs from:`); - console.log(`\tStart: ${shot.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${shot.endTimeOffset / 1e6}s`); + console.log(`Scene ${shotIdx} occurs from:`); + if (shot.startTimeOffset === undefined) { + shot.startTimeOffset = {}; + } + if (shot.endTimeOffset === undefined) { + shot.endTimeOffset = {}; + } + if (shot.startTimeOffset.seconds === undefined) { + shot.startTimeOffset.seconds = 0; + } + if (shot.startTimeOffset.nanos === undefined) { + shot.startTimeOffset.nanos = 0; + } + if (shot.endTimeOffset.seconds === undefined) { + shot.endTimeOffset.seconds = 0; + } + if (shot.endTimeOffset.nanos === undefined) { + shot.endTimeOffset.nanos = 0; + } + console.log(`\tStart: ${shot.startTimeOffset.seconds}` + + `.${(shot.startTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\tEnd: ${shot.endTimeOffset.seconds}.` + + `${(shot.endTimeOffset.nanos / 1e6).toFixed(0)}s`); }); } }) @@ -236,7 +274,7 @@ function analyzeSafeSearch (gcsUri) { const request = { inputUri: gcsUri, - features: ['SAFE_SEARCH_DETECTION'] + features: ['EXPLICIT_CONTENT_DETECTION'] }; // Human-readable likelihoods @@ -251,15 +289,21 @@ function analyzeSafeSearch (gcsUri) { }) .then((results) => { // Gets unsafe content - const explicitContentResults = results[0].annotationResults[0].safeSearchAnnotations; - console.log('Explicit content results:'); - explicitContentResults.forEach((result) => { - console.log(`Time: ${result.timeOffset / 1e6}s`); - console.log(`\tAdult: ${likelihoods[result.adult]}`); - console.log(`\tSpoof: ${likelihoods[result.spoof]}`); - console.log(`\tMedical: ${likelihoods[result.medical]}`); - console.log(`\tViolent: ${likelihoods[result.violent]}`); - console.log(`\tRacy: ${likelihoods[result.racy]}`); + const explicitContentResults = results[0].annotationResults[0].explicitAnnotation; + console.log('Explicit annotation results:'); + explicitContentResults.frames.forEach((result) => { + if (result.timeOffset === undefined) { + result.timeOffset = {}; + } + if (result.timeOffset.seconds === undefined) { + result.timeOffset.seconds = 0; + } + if (result.timeOffset.nanos === undefined) { + result.timeOffset.nanos = 0; + } + console.log(`\tTime: ${result.timeOffset.seconds}` + + `.${(result.timeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\t\tPornography liklihood: ${likelihoods[result.pornographyLikelihood]}`); }); }) .catch((err) => { diff --git a/video/package.json b/video/package.json index 4e6765ed18..e17c092aa4 100644 --- a/video/package.json +++ b/video/package.json @@ -17,7 +17,7 @@ "test": "samples test run --cmd ava -- -T 5m --verbose system-test/*.test.js" }, "dependencies": { - "@google-cloud/video-intelligence": "^0.3.1", + "@google-cloud/video-intelligence": "^0.3.2", "long": "^3.2.0", "safe-buffer": "5.1.1", "yargs": "8.0.2" diff --git a/video/quickstart.js b/video/quickstart.js index 3b8e938c00..bb40eafe9f 100644 --- a/video/quickstart.js +++ b/video/quickstart.js @@ -20,21 +20,15 @@ const Video = require('@google-cloud/video-intelligence'); // Instantiates a client -const video = Video({ - servicePath: 'videointelligence.googleapis.com', - projectId: process.env.GCLOUD_PROJECT // Replace with your Google Cloud project ID -}); +const video = Video(); // The GCS filepath of the video to analyze const gcsUri = 'gs://nodejs-docs-samples-video/quickstart_short.mp4'; -// Human-readable likelihoods -const likelihoods = ['UNKNOWN', 'VERY_UNLIKELY', 'UNLIKELY', 'POSSIBLE', 'LIKELY', 'VERY_LIKELY']; - // Construct request const request = { inputUri: gcsUri, - features: ['FACE_DETECTION', 'LABEL_DETECTION', 'SHOT_CHANGE_DETECTION'] + features: ['LABEL_DETECTION'] }; // Execute request @@ -48,58 +42,29 @@ video.annotateVideo(request) // Gets annotations for video const annotations = results[0].annotationResults[0]; - // Gets faces for video from its annotations - const faces = annotations.faceAnnotations; - faces.forEach((face, faceIdx) => { - console.log('Thumbnail size:', face.thumbnail.length); - face.segments.forEach((segment, segmentIdx) => { - console.log(`Face #${faceIdx}, appearance #${segmentIdx}:`); - console.log(`\tStart: ${segment.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${segment.endTimeOffset / 1e6}s`); - }); - }); - // Gets labels for video from its annotations - const labels = annotations.labelAnnotations; + const labels = annotations.segmentLabelAnnotations; labels.forEach((label) => { - console.log(`Label ${label.description} occurs at:`); - label.locations.forEach((location) => { - const isEntireVideo = - location.segment.startTimeOffset.toNumber() === -1 && - location.segment.endTimeOffset.toNumber() === -1; - - if (isEntireVideo) { - console.log(`\tEntire video`); - } else { - console.log(`\tStart: ${location.segment.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${location.segment.endTimeOffset / 1e6}s`); + console.log(`Label ${label.entity.description} occurs at:`); + label.segments.forEach((segment) => { + segment = segment.segment; + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; } - console.log(`\tConfidence: ${location.confidence}`); - }); - }); - - // Gets shot changes for video from its annotations - const shotChanges = annotations.shotAnnotations; - if (shotChanges.length === 1) { - console.log(`The entire video is one scene.`); - } else { - shotChanges.forEach((shot, shotIdx) => { - console.log(`Scene ${shotIdx} occurs from:`); - console.log(`\tStart: ${shot.startTimeOffset / 1e6}s`); - console.log(`\tEnd: ${shot.endTimeOffset / 1e6}s`); + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; + } + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; + } + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; + } + console.log(`\tStart: ${segment.startTimeOffset.seconds}` + + `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s`); + console.log(`\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s`); }); - } - - // Gets explicit content data for video from its annotations - const explicitContentResults = annotations.safeSearchAnnotations; - console.log('Explicit content results:'); - explicitContentResults.forEach((result) => { - console.log(`Time: ${result.timeOffset / 1e6}s`); - console.log(`\tAdult: ${likelihoods[result.adult]}`); - console.log(`\tSpoof: ${likelihoods[result.spoof]}`); - console.log(`\tMedical: ${likelihoods[result.medical]}`); - console.log(`\tViolent: ${likelihoods[result.violent]}`); - console.log(`\tRacy: ${likelihoods[result.racy]}`); }); }) .catch((err) => { diff --git a/video/system-test/analyze.test.js b/video/system-test/analyze.test.js index 82c4a409ec..ce54904743 100644 --- a/video/system-test/analyze.test.js +++ b/video/system-test/analyze.test.js @@ -32,16 +32,14 @@ const file = `resources/cat.mp4`; test.serial(`should analyze faces in a GCS file`, async (t) => { const output = await tools.runAsync(`${cmd} faces ${url}`, cwd); t.regex(output, /Thumbnail size: \d+/); - t.regex(output, /Start: \d+\.\d+s/); - t.regex(output, /End: \d+\.\d+s/); - t.regex(output, /Time \d+\.\d+s: \(\d+, \d+\) - \(\d+, \d+\)/); + t.regex(output, /Start:.*\d+\.\d+s/); + t.regex(output, /End:.*\d+\.\d+s/); }); // analyze_labels_gcs (one scene) test.serial(`should analyze labels in a GCS file with one scene`, async (t) => { const output = await tools.runAsync(`${cmd} labels-gcs ${shortUrl}`, cwd); t.regex(output, /Label shirt occurs at:/); - t.regex(output, /Entire video/); t.regex(output, /Confidence: \d+\.\d+/); }); @@ -49,7 +47,6 @@ test.serial(`should analyze labels in a GCS file with one scene`, async (t) => { test.serial(`should analyze labels in a GCS file with multiple scenes`, async (t) => { const output = await tools.runAsync(`${cmd} labels-gcs ${url}`, cwd); t.regex(output, /Label shirt occurs at:/); - t.regex(output, /Entire video/); t.regex(output, /Confidence: \d+\.\d+/); }); @@ -57,14 +54,13 @@ test.serial(`should analyze labels in a GCS file with multiple scenes`, async (t test.serial(`should analyze labels in a local file`, async (t) => { const output = await tools.runAsync(`${cmd} labels-file ${file}`, cwd); t.regex(output, /Label whiskers occurs at:/); - t.regex(output, /Entire video/); t.regex(output, /Confidence: \d+\.\d+/); }); // analyze_shots (multiple shots) test.serial(`should analyze shots in a GCS file with multiple shots`, async (t) => { const output = await tools.runAsync(`${cmd} shots ${url}`, cwd); - t.regex(output, /Shot 0 occurs from:/); + t.regex(output, /Scene 0 occurs from:/); }); // analyze_shots (one shot) @@ -77,5 +73,5 @@ test.serial(`should analyze shots in a GCS file with one shot`, async (t) => { test.serial(`should analyze safe search results in a GCS file`, async (t) => { const output = await tools.runAsync(`${cmd} safe-search ${url}`, cwd); t.regex(output, /Time: \d+\.\d+s/); - t.regex(output, /Spoof:/); + t.regex(output, /Explicit annotation results:/); }); diff --git a/video/system-test/quickstart.test.js b/video/system-test/quickstart.test.js index bafa586035..20965be0c6 100644 --- a/video/system-test/quickstart.test.js +++ b/video/system-test/quickstart.test.js @@ -25,7 +25,4 @@ const cwd = path.join(__dirname, `..`); test(`should analyze a hardcoded video`, async (t) => { const output = await tools.runAsync(cmd, cwd); t.regex(output, /Label standing occurs at:/); - t.regex(output, /Entire video/); - t.regex(output, /Confidence: \d+\.\d+/); - t.regex(output, /The entire video is one scene./); });