diff --git a/samples/analyze.js b/samples/analyze.js index f9072451..237bce64 100644 --- a/samples/analyze.js +++ b/samples/analyze.js @@ -15,7 +15,7 @@ 'use strict'; -function analyzeLabelsGCS(gcsUri) { +async function analyzeLabelsGCS(gcsUri) { // [START video_analyze_labels_gcs] // Imports the Google Cloud Video Intelligence library const video = require('@google-cloud/video-intelligence').v1; @@ -34,57 +34,50 @@ function analyzeLabelsGCS(gcsUri) { }; // Detects labels in a video - client - .annotateVideo(request) - .then(results => { - const operation = results[0]; - console.log('Waiting for operation to complete...'); - return operation.promise(); - }) - .then(results => { - // Gets annotations for video - const annotations = results[0].annotationResults[0]; - - const labels = annotations.segmentLabelAnnotations; - labels.forEach(label => { - console.log(`Label ${label.entity.description} occurs at:`); - label.segments.forEach(segment => { - const time = segment.segment; - if (time.startTimeOffset.seconds === undefined) { - time.startTimeOffset.seconds = 0; - } - if (time.startTimeOffset.nanos === undefined) { - time.startTimeOffset.nanos = 0; - } - if (time.endTimeOffset.seconds === undefined) { - time.endTimeOffset.seconds = 0; - } - if (time.endTimeOffset.nanos === undefined) { - time.endTimeOffset.nanos = 0; - } - console.log( - `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log( - `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log(`\tConfidence: ${segment.confidence}`); - }); - }); - }) - .catch(err => { - console.error('ERROR:', err); + const [operation] = await client.annotateVideo(request); + console.log('Waiting for operation to complete...'); + const [operationResult] = await operation.promise(); + + // Gets annotations for video + const annotations = operationResult.annotationResults[0]; + + const labels = annotations.segmentLabelAnnotations; + labels.forEach(label => { + console.log(`Label ${label.entity.description} occurs at:`); + label.segments.forEach(segment => { + const time = segment.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; + } + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log(`\tConfidence: ${segment.confidence}`); }); + }); // [END video_analyze_labels_gcs] } -function analyzeLabelsLocal(path) { +async function analyzeLabelsLocal(path) { // [START video_analyze_labels] // Imports the Google Cloud Video Intelligence library + Node's fs library const video = require('@google-cloud/video-intelligence').v1; const fs = require('fs'); + const util = require('util'); // Creates a client const client = new video.VideoIntelligenceServiceClient(); @@ -95,7 +88,8 @@ function analyzeLabelsLocal(path) { // const path = 'Local file to analyze, e.g. ./my-file.mp4'; // Reads a local video file and converts it to base64 - const file = fs.readFileSync(path); + const readFile = util.promisify(fs.readFile); + const file = await readFile(path); const inputContent = file.toString('base64'); // Constructs request @@ -105,53 +99,45 @@ function analyzeLabelsLocal(path) { }; // Detects labels in a video - client - .annotateVideo(request) - .then(results => { - const operation = results[0]; - console.log('Waiting for operation to complete...'); - return operation.promise(); - }) - .then(results => { - // Gets annotations for video - const annotations = results[0].annotationResults[0]; - - const labels = annotations.segmentLabelAnnotations; - labels.forEach(label => { - console.log(`Label ${label.entity.description} occurs at:`); - label.segments.forEach(segment => { - const time = segment.segment; - if (time.startTimeOffset.seconds === undefined) { - time.startTimeOffset.seconds = 0; - } - if (time.startTimeOffset.nanos === undefined) { - time.startTimeOffset.nanos = 0; - } - if (time.endTimeOffset.seconds === undefined) { - time.endTimeOffset.seconds = 0; - } - if (time.endTimeOffset.nanos === undefined) { - time.endTimeOffset.nanos = 0; - } - console.log( - `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log( - `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log(`\tConfidence: ${segment.confidence}`); - }); - }); - }) - .catch(err => { - console.error('ERROR:', err); + const [operation] = await client.annotateVideo(request); + console.log('Waiting for operation to complete...'); + const [operationResult] = await operation.promise(); + // Gets annotations for video + const annotations = operationResult.annotationResults[0]; + + const labels = annotations.segmentLabelAnnotations; + labels.forEach(label => { + console.log(`Label ${label.entity.description} occurs at:`); + label.segments.forEach(segment => { + const time = segment.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; + } + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log(`\tConfidence: ${segment.confidence}`); }); + }); + // [END video_analyze_labels] } -function analyzeShots(gcsUri) { +async function analyzeShots(gcsUri) { // [START video_analyze_shots] // Imports the Google Cloud Video Intelligence library const video = require('@google-cloud/video-intelligence').v1; @@ -170,59 +156,51 @@ function analyzeShots(gcsUri) { }; // Detects camera shot changes - client - .annotateVideo(request) - .then(results => { - const operation = results[0]; - console.log('Waiting for operation to complete...'); - return operation.promise(); - }) - .then(results => { - // Gets shot changes - const shotChanges = results[0].annotationResults[0].shotAnnotations; - console.log('Shot changes:'); - - if (shotChanges.length === 1) { - console.log(`The entire video is one shot.`); - } else { - shotChanges.forEach((shot, shotIdx) => { - console.log(`Scene ${shotIdx} occurs from:`); - if (shot.startTimeOffset === undefined) { - shot.startTimeOffset = {}; - } - if (shot.endTimeOffset === undefined) { - shot.endTimeOffset = {}; - } - if (shot.startTimeOffset.seconds === undefined) { - shot.startTimeOffset.seconds = 0; - } - if (shot.startTimeOffset.nanos === undefined) { - shot.startTimeOffset.nanos = 0; - } - if (shot.endTimeOffset.seconds === undefined) { - shot.endTimeOffset.seconds = 0; - } - if (shot.endTimeOffset.nanos === undefined) { - shot.endTimeOffset.nanos = 0; - } - console.log( - `\tStart: ${shot.startTimeOffset.seconds}` + - `.${(shot.startTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log( - `\tEnd: ${shot.endTimeOffset.seconds}.` + - `${(shot.endTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - }); + const [operation] = await client.annotateVideo(request); + console.log('Waiting for operation to complete...'); + const [operationResult] = await operation.promise(); + // Gets shot changes + const shotChanges = operationResult.annotationResults[0].shotAnnotations; + console.log('Shot changes:'); + + if (shotChanges.length === 1) { + console.log(`The entire video is one shot.`); + } else { + shotChanges.forEach((shot, shotIdx) => { + console.log(`Scene ${shotIdx} occurs from:`); + if (shot.startTimeOffset === undefined) { + shot.startTimeOffset = {}; + } + if (shot.endTimeOffset === undefined) { + shot.endTimeOffset = {}; } - }) - .catch(err => { - console.error('ERROR:', err); + if (shot.startTimeOffset.seconds === undefined) { + shot.startTimeOffset.seconds = 0; + } + if (shot.startTimeOffset.nanos === undefined) { + shot.startTimeOffset.nanos = 0; + } + if (shot.endTimeOffset.seconds === undefined) { + shot.endTimeOffset.seconds = 0; + } + if (shot.endTimeOffset.nanos === undefined) { + shot.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${shot.startTimeOffset.seconds}` + + `.${(shot.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${shot.endTimeOffset.seconds}.` + + `${(shot.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); }); + } + // [END video_analyze_shots] } -function analyzeSafeSearch(gcsUri) { +async function analyzeSafeSearch(gcsUri) { // [START video_analyze_explicit_content] // Imports the Google Cloud Video Intelligence library const video = require('@google-cloud/video-intelligence').v1; @@ -251,46 +229,35 @@ function analyzeSafeSearch(gcsUri) { ]; // Detects unsafe content - client - .annotateVideo(request) - .then(results => { - const operation = results[0]; - console.log('Waiting for operation to complete...'); - return operation.promise(); - }) - .then(results => { - // Gets unsafe content - const explicitContentResults = - results[0].annotationResults[0].explicitAnnotation; - console.log('Explicit annotation results:'); - explicitContentResults.frames.forEach(result => { - if (result.timeOffset === undefined) { - result.timeOffset = {}; - } - if (result.timeOffset.seconds === undefined) { - result.timeOffset.seconds = 0; - } - if (result.timeOffset.nanos === undefined) { - result.timeOffset.nanos = 0; - } - console.log( - `\tTime: ${result.timeOffset.seconds}` + - `.${(result.timeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log( - `\t\tPornography liklihood: ${ - likelihoods[result.pornographyLikelihood] - }` - ); - }); - }) - .catch(err => { - console.error('ERROR:', err); - }); + const [opertaion] = await client.annotateVideo(request); + console.log('Waiting for operation to complete...'); + const [operationResult] = await opertaion.promise(); + // Gets unsafe content + const explicitContentResults = + operationResult.annotationResults[0].explicitAnnotation; + console.log('Explicit annotation results:'); + explicitContentResults.frames.forEach(result => { + if (result.timeOffset === undefined) { + result.timeOffset = {}; + } + if (result.timeOffset.seconds === undefined) { + result.timeOffset.seconds = 0; + } + if (result.timeOffset.nanos === undefined) { + result.timeOffset.nanos = 0; + } + console.log( + `\tTime: ${result.timeOffset.seconds}` + + `.${(result.timeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\t\tPornography liklihood: ${likelihoods[result.pornographyLikelihood]}` + ); + }); // [END video_analyze_explicit_content] } -function analyzeVideoTranscription(gcsUri) { +async function analyzeVideoTranscription(gcsUri) { // [START video_speech_transcription_gcs_beta] // Imports the Google Cloud Video Intelligence library const videoIntelligence = require('@google-cloud/video-intelligence') @@ -317,75 +284,69 @@ function analyzeVideoTranscription(gcsUri) { videoContext: videoContext, }; - client - .annotateVideo(request) - .then(results => { - const operation = results[0]; - console.log('Waiting for operation to complete...'); - return operation.promise(); - }) - .then(results => { - console.log('Word level information:'); - const alternative = - results[0].annotationResults[0].speechTranscriptions[0].alternatives[0]; - alternative.words.forEach(wordInfo => { - const start_time = - wordInfo.startTime.seconds + wordInfo.startTime.nanos * 1e-9; - const end_time = - wordInfo.endTime.seconds + wordInfo.endTime.nanos * 1e-9; - console.log( - '\t' + start_time + 's - ' + end_time + 's: ' + wordInfo.word - ); - }); - console.log('Transcription: ' + alternative.transcript); - }) - .catch(err => { - console.error('ERROR:', err); - }); + const [operation] = await client.annotateVideo(request); + console.log('Waiting for operation to complete...'); + const [operationResult] = await operation.promise(); + console.log('Word level information:'); + const alternative = + operationResult.annotationResults[0].speechTranscriptions[0] + .alternatives[0]; + alternative.words.forEach(wordInfo => { + const start_time = + wordInfo.startTime.seconds + wordInfo.startTime.nanos * 1e-9; + const end_time = wordInfo.endTime.seconds + wordInfo.endTime.nanos * 1e-9; + console.log('\t' + start_time + 's - ' + end_time + 's: ' + wordInfo.word); + }); + console.log('Transcription: ' + alternative.transcript); + // [END video_speech_transcription_gcs_beta] } -require(`yargs`) - .demand(1) - .command( - `shots `, - `Analyzes shot angles in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, - {}, - opts => analyzeShots(opts.gcsUri) - ) - .command( - `labels-gcs `, - `Labels objects in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, - {}, - opts => analyzeLabelsGCS(opts.gcsUri) - ) - .command( - `labels-file `, - `Labels objects in a video stored locally using the Cloud Video Intelligence API.`, - {}, - opts => analyzeLabelsLocal(opts.filePath) - ) - .command( - `safe-search `, - `Detects explicit content in a video stored in Google Cloud Storage.`, - {}, - opts => analyzeSafeSearch(opts.gcsUri) - ) - .command( - `transcription `, - `Extract the video transcription using the Cloud Video Intelligence API.`, - {}, - opts => analyzeVideoTranscription(opts.gcsUri) - ) - .example(`node $0 shots gs://demomaker/sushi.mp4`) - .example(`node $0 labels-gcs gs://demomaker/tomatoes.mp4`) - .example(`node $0 labels-file cat.mp4`) - .example(`node $0 safe-search gs://demomaker/tomatoes.mp4`) - .example(`node $0 transcription gs://demomaker/tomatoes.mp4`) - .wrap(120) - .recommendCommands() - .epilogue( - `For more information, see https://cloud.google.com/video-intelligence/docs` - ) - .help() - .strict().argv; +async function main() { + require(`yargs`) + .demand(1) + .command( + `shots `, + `Analyzes shot angles in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => analyzeShots(opts.gcsUri) + ) + .command( + `labels-gcs `, + `Labels objects in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => analyzeLabelsGCS(opts.gcsUri) + ) + .command( + `labels-file `, + `Labels objects in a video stored locally using the Cloud Video Intelligence API.`, + {}, + opts => analyzeLabelsLocal(opts.filePath) + ) + .command( + `safe-search `, + `Detects explicit content in a video stored in Google Cloud Storage.`, + {}, + opts => analyzeSafeSearch(opts.gcsUri) + ) + .command( + `transcription `, + `Extract the video transcription using the Cloud Video Intelligence API.`, + {}, + opts => analyzeVideoTranscription(opts.gcsUri) + ) + .example(`node $0 shots gs://demomaker/sushi.mp4`) + .example(`node $0 labels-gcs gs://demomaker/tomatoes.mp4`) + .example(`node $0 labels-file cat.mp4`) + .example(`node $0 safe-search gs://demomaker/tomatoes.mp4`) + .example(`node $0 transcription gs://demomaker/tomatoes.mp4`) + .wrap(120) + .recommendCommands() + .epilogue( + `For more information, see https://cloud.google.com/video-intelligence/docs` + ) + .help() + .strict().argv; +} + +main().catch(console.error); diff --git a/samples/analyze.v1p2beta1.js b/samples/analyze.v1p2beta1.js index a502d66f..1e70051a 100644 --- a/samples/analyze.v1p2beta1.js +++ b/samples/analyze.v1p2beta1.js @@ -304,42 +304,47 @@ async function analyzeObjectTracking(path) { }); // [END video_object_tracking_beta] } -require(`yargs`) - .demand(1) - .command( - `video-text-gcs `, - `Analyzes text in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, - {}, - opts => analyzeTextGCS(opts.gcsUri) - ) - .command( - `track-objects-gcs `, - `Analyzes objects in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, - {}, - opts => analyzeObjectTrackingGCS(opts.gcsUri) - ) - .command( - `video-text `, - `Analyzes text in a video stored in a local file using the Cloud Video Intelligence API.`, - {}, - opts => analyzeText(opts.path) - ) - .command( - `track-objects `, - `Analyzes objects in a video stored in a local file using the Cloud Video Intelligence API.`, - {}, - opts => analyzeObjectTracking(opts.path) - ) - .example(`node $0 video-text ./resources/googlework_short.mp4`) - .example( - `node $0 video-text-gcs gs://nodejs-docs-samples/videos/googlework_short.mp4` - ) - .example(`node $0 track-objects ./resources/cat.mp4`) - .example(`node $0 track-objects-gcs gs://nodejs-docs-samples/video/cat.mp4`) - .wrap(120) - .recommendCommands() - .epilogue( - `For more information, see https://cloud.google.com/video-intelligence/docs` - ) - .help() - .strict().argv; + +async function main() { + require(`yargs`) + .demand(1) + .command( + `video-text-gcs `, + `Analyzes text in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => analyzeTextGCS(opts.gcsUri) + ) + .command( + `track-objects-gcs `, + `Analyzes objects in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => analyzeObjectTrackingGCS(opts.gcsUri) + ) + .command( + `video-text `, + `Analyzes text in a video stored in a local file using the Cloud Video Intelligence API.`, + {}, + opts => analyzeText(opts.path) + ) + .command( + `track-objects `, + `Analyzes objects in a video stored in a local file using the Cloud Video Intelligence API.`, + {}, + opts => analyzeObjectTracking(opts.path) + ) + .example(`node $0 video-text ./resources/googlework_short.mp4`) + .example( + `node $0 video-text-gcs gs://nodejs-docs-samples/videos/googlework_short.mp4` + ) + .example(`node $0 track-objects ./resources/cat.mp4`) + .example(`node $0 track-objects-gcs gs://nodejs-docs-samples/video/cat.mp4`) + .wrap(120) + .recommendCommands() + .epilogue( + `For more information, see https://cloud.google.com/video-intelligence/docs` + ) + .help() + .strict().argv; +} + +main().catch(console.error); diff --git a/samples/quickstart.js b/samples/quickstart.js index d09c139c..5adc54b7 100644 --- a/samples/quickstart.js +++ b/samples/quickstart.js @@ -14,67 +14,64 @@ */ 'use strict'; +async function main() { + // [START video_quickstart] + // Imports the Google Cloud Video Intelligence library + const videoIntelligence = require('@google-cloud/video-intelligence'); -// [START video_quickstart] -// Imports the Google Cloud Video Intelligence library -const videoIntelligence = require('@google-cloud/video-intelligence'); + // Creates a client + const client = new videoIntelligence.VideoIntelligenceServiceClient(); -// Creates a client -const client = new videoIntelligence.VideoIntelligenceServiceClient(); + // The GCS uri of the video to analyze + const gcsUri = 'gs://nodejs-docs-samples-video/quickstart_short.mp4'; -// The GCS uri of the video to analyze -const gcsUri = 'gs://nodejs-docs-samples-video/quickstart_short.mp4'; + // Construct request + const request = { + inputUri: gcsUri, + features: ['LABEL_DETECTION'], + }; -// Construct request -const request = { - inputUri: gcsUri, - features: ['LABEL_DETECTION'], -}; + // Execute request + const [operation] = await client.annotateVideo(request); -// Execute request -client - .annotateVideo(request) - .then(results => { - const operation = results[0]; - console.log( - 'Waiting for operation to complete... (this may take a few minutes)' - ); - return operation.promise(); - }) - .then(results => { - // Gets annotations for video - const annotations = results[0].annotationResults[0]; + console.log( + 'Waiting for operation to complete... (this may take a few minutes)' + ); - // Gets labels for video from its annotations - const labels = annotations.segmentLabelAnnotations; - labels.forEach(label => { - console.log(`Label ${label.entity.description} occurs at:`); - label.segments.forEach(segment => { - segment = segment.segment; - if (segment.startTimeOffset.seconds === undefined) { - segment.startTimeOffset.seconds = 0; - } - if (segment.startTimeOffset.nanos === undefined) { - segment.startTimeOffset.nanos = 0; - } - if (segment.endTimeOffset.seconds === undefined) { - segment.endTimeOffset.seconds = 0; - } - if (segment.endTimeOffset.nanos === undefined) { - segment.endTimeOffset.nanos = 0; - } - console.log( - `\tStart: ${segment.startTimeOffset.seconds}` + - `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - console.log( - `\tEnd: ${segment.endTimeOffset.seconds}.` + - `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s` - ); - }); + const [operationResult] = await operation.promise(); + + // Gets annotations for video + const annotations = operationResult.annotationResults[0]; + + // Gets labels for video from its annotations + const labels = annotations.segmentLabelAnnotations; + labels.forEach(label => { + console.log(`Label ${label.entity.description} occurs at:`); + label.segments.forEach(segment => { + segment = segment.segment; + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; + } + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; + } + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; + } + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${segment.startTimeOffset.seconds}` + + `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); }); - }) - .catch(err => { - console.error('ERROR:', err); }); -// [END video_quickstart] + // [END video_quickstart] +} + +main().catch(console.error);