-
Notifications
You must be signed in to change notification settings - Fork 2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
28f6863
commit e168b1d
Showing
14 changed files
with
954 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,138 @@ | ||
/** | ||
* Copyright 2018, Google, LLC. | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
'use strict'; | ||
|
||
const {assert} = require('chai'); | ||
const cp = require('child_process'); | ||
|
||
const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); | ||
|
||
const cmdDataset = 'node automlTranslationDataset.js'; | ||
const cmdModel = 'node automlTranslationModel.js'; | ||
const cmdPredict = 'node automlTranslationPredict.js'; | ||
|
||
const testDataSetName = 'testDataSet'; | ||
const dummyDataSet = 'dummyDataSet'; | ||
const testModelName = 'dummyModel'; | ||
const sampleText = './resources/testInput.txt'; | ||
const donotdeleteModelId = 'TRL188026453969732486'; | ||
|
||
describe.skip('automl sample tests', () => { | ||
it(`should create a create, list, and delete a dataset`, async () => { | ||
// Check to see that this dataset does not yet exist | ||
let output = execSync(`${cmdDataset} list-datasets`); | ||
assert.match(output, new RegExp(testDataSetName)); | ||
|
||
// Create dataset | ||
output = execSync(`${cmdDataset} create-dataset -n "${testDataSetName}"`); | ||
const dataSetId = output | ||
.split(`\n`)[1] | ||
.split(`:`)[1] | ||
.trim(); | ||
assert.match( | ||
output, | ||
new RegExp(`Dataset display name: ${testDataSetName}`) | ||
); | ||
|
||
// Delete dataset | ||
output = execSync(`${cmdDataset} delete-dataset -i "${dataSetId}"`); | ||
assert.match(output, /Dataset deleted./); | ||
}); | ||
|
||
// We make two models running this test, see hard-coded workaround below | ||
it(`should create a dataset, import data, and start making a model`, async () => { | ||
// Check to see that this dataset does not yet exist | ||
let output = execSync(`${cmdDataset} list-datasets`); | ||
assert.notMatch(output, new RegExp(dummyDataSet)); | ||
|
||
// Create dataset | ||
output = execSync(`${cmdDataset} create-dataset -n "${dummyDataSet}"`); | ||
const dataSetId = output | ||
.split(`\n`)[1] | ||
.split(`:`)[1] | ||
.trim(); | ||
assert.match(output, new RegExp(`Dataset display name: ${dummyDataSet}`)); | ||
|
||
// Import Data | ||
output = execSync( | ||
`${cmdDataset} import-data -i "${dataSetId}" -p "gs://nodejs-docs-samples-vcm/flowerTraindata20lines.csv"` | ||
); | ||
assert.match(output, /Data imported./); | ||
|
||
// Check to make sure model doesn't already exist | ||
output = execSync(`${cmdModel} list-models`); | ||
assert.notMatch(output, testModelName); | ||
|
||
// Begin training dataset, getting operation ID for next operation | ||
output = execSync( | ||
`${cmdModel} create-model -i "${dataSetId}" -m "${testModelName}" -t "2"` | ||
); | ||
const operationName = output | ||
.split(`\n`)[0] | ||
.split(`:`)[1] | ||
.trim(); | ||
assert.match(output, `Training started...`); | ||
|
||
// Poll operation status, here confirming that operation is not complete yet | ||
output = execSync( | ||
`${cmdModel} get-operation-status -i "${dataSetId}" -o "${operationName}"` | ||
); | ||
assert.match(output, /done: false/); | ||
}); | ||
|
||
it(`should run get model (from a prexisting model)`, async () => { | ||
// Confirm dataset exists | ||
let output = execSync(`${cmdDataset} list-datasets`); | ||
assert.match(output, /me_do_not_delete/); | ||
|
||
// List model evaluations, confirm model exists | ||
output = execSync( | ||
`${cmdModel} list-model-evaluations -a "${donotdeleteModelId}"` | ||
); | ||
assert.match(output, /translationEvaluationMetrics:/); | ||
|
||
// Get model evaluation | ||
output = execSync(`${cmdModel} get-model -a "${donotdeleteModelId}"`); | ||
assert.match(output, /Model deployment state: DEPLOYED/); | ||
}); | ||
|
||
it(`should run Prediction from prexisting model`, async () => { | ||
// Confirm dataset exists | ||
let output = execSync(`${cmdDataset} list-datasets`); | ||
assert.match(output, /me_do_not_delete/); | ||
|
||
// List model evaluations, confirm model exists | ||
output = execSync( | ||
`${cmdModel} list-model-evaluations -a "${donotdeleteModelId}"` | ||
); | ||
assert.match(output, `translationEvaluationMetrics:`); | ||
|
||
// Run prediction on 'testImage.jpg' in resources folder | ||
output = execSync( | ||
`${cmdPredict} predict -i "${donotdeleteModelId}" -f "${sampleText}" -t "False"` | ||
); | ||
assert.match( | ||
output, | ||
/Translated Content: {2}これがどのように終わるか教えて/ | ||
); | ||
}); | ||
|
||
// List datasets | ||
it(`should list datasets`, async () => { | ||
const output = execSync(`${cmdDataset} list-datasets`); | ||
assert.match(output, /List of datasets:/); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
/** | ||
* Copyright 2019, Google LLC | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
`use strict`; | ||
|
||
async function main(projectId = 'YOUR_PROJECT_ID') { | ||
// [START automl_translation_create_dataset] | ||
const automl = require(`@google-cloud/automl`); | ||
|
||
const client = new automl.AutoMlClient(); | ||
const computeRegion = 'us-central1'; | ||
const datasetName = 'myDataset'; | ||
const source = 'en'; | ||
const target = 'ja'; | ||
|
||
// A resource that represents Google Cloud Platform location. | ||
const projectLocation = client.locationPath(projectId, computeRegion); | ||
|
||
// Specify the source and target language. | ||
const datasetSpec = { | ||
sourceLanguageCode: source, | ||
targetLanguageCode: target, | ||
}; | ||
|
||
// Set dataset name and dataset specification. | ||
const datasetInfo = { | ||
displayName: datasetName, | ||
translationDatasetMetadata: datasetSpec, | ||
}; | ||
|
||
// Create a dataset with the dataset specification in the region. | ||
const [dataset] = await client.createDataset({ | ||
parent: projectLocation, | ||
dataset: datasetInfo, | ||
}); | ||
|
||
// Display the dataset information | ||
console.log(`Dataset name: ${dataset.name}`); | ||
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`); | ||
console.log(`Dataset display name: ${dataset.displayName}`); | ||
console.log(`Dataset example count: ${dataset.exampleCount}`); | ||
console.log(`Translation dataset specification:`); | ||
console.log( | ||
`\tSource language code: ${ | ||
dataset.translationDatasetMetadata.sourceLanguageCode | ||
}` | ||
); | ||
console.log( | ||
`\tTarget language code: ${ | ||
dataset.translationDatasetMetadata.targetLanguageCode | ||
}` | ||
); | ||
console.log(`Dataset create time:`); | ||
console.log(`\tseconds: ${dataset.createTime.seconds}`); | ||
console.log(`\tnanos: ${dataset.createTime.nanos}`); | ||
// [END automl_translation_create_dataset] | ||
} | ||
|
||
main(...process.argv.slice(2)).catch(err => console.error(err)); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
/** | ||
* Copyright 2019, Google LLC | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
`use strict`; | ||
|
||
async function main( | ||
projectId = 'YOUR_PROJECT_ID', | ||
computeRegion = 'YOUR_REGION', | ||
datasetId = 'YOUR_DATASET' | ||
) { | ||
// [START automl_translation_delete_dataset] | ||
const automl = require(`@google-cloud/automl`); | ||
const client = new automl.AutoMlClient(); | ||
|
||
/** | ||
* TODO(developer): Uncomment the following line before running the sample. | ||
*/ | ||
// const projectId = `The GCLOUD_PROJECT string, e.g. "my-gcloud-project"`; | ||
// const computeRegion = `region-name, e.g. "us-central1"`; | ||
// const datasetId = `Id of the dataset`; | ||
|
||
// Get the full path of the dataset. | ||
const datasetFullId = client.datasetPath(projectId, computeRegion, datasetId); | ||
|
||
// Delete a dataset. | ||
const [operations] = await client.deleteDataset({name: datasetFullId}); | ||
const operationResponses = await operations.promise(); | ||
// The final result of the operation. | ||
if (operationResponses[2].done === true) console.log(`Dataset deleted.`); | ||
|
||
// [END automl_translation_delete_dataset] | ||
} | ||
|
||
main(...process.argv.slice(2)).catch(err => console.error(err)); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
/** | ||
* Copyright 2019, Google LLC | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
`use strict`; | ||
|
||
async function main( | ||
projectId = 'YOUR_PROJECT_ID', | ||
computeRegion = 'YOUR_REGION', | ||
datasetId = 'YOUR_DATASET' | ||
) { | ||
// [START automl_translation_get_dataset] | ||
const automl = require(`@google-cloud/automl`); | ||
const client = new automl.AutoMlClient(); | ||
|
||
/** | ||
* TODO(developer): Uncomment the following line before running the sample. | ||
*/ | ||
// const projectId = `The GCLOUD_PROJECT string, e.g. "my-gcloud-project"`; | ||
// const computeRegion = `region-name, e.g. "us-central1"`; | ||
// const datasetId = `Id of the dataset`; | ||
|
||
// Get the full path of the dataset. | ||
const datasetFullId = client.datasetPath(projectId, computeRegion, datasetId); | ||
|
||
// Get complete detail of the dataset. | ||
const [dataset] = await client.getDataset({name: datasetFullId}); | ||
|
||
// Display the dataset information. | ||
console.log(`Dataset name: ${dataset.name}`); | ||
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`); | ||
console.log(`Dataset display name: ${dataset.displayName}`); | ||
console.log(`Dataset example count: ${dataset.exampleCount}`); | ||
console.log(`Translation dataset specification:`); | ||
console.log( | ||
`\tSource language code: ${ | ||
dataset.translationDatasetMetadata.sourceLanguageCode | ||
}` | ||
); | ||
console.log( | ||
`\tTarget language code: ${ | ||
dataset.translationDatasetMetadata.targetLanguageCode | ||
}` | ||
); | ||
console.log(`Dataset create time:`); | ||
console.log(`\tseconds: ${dataset.createTime.seconds}`); | ||
console.log(`\tnanos: ${dataset.createTime.nanos}`); | ||
|
||
// [END automl_translation_get_dataset] | ||
} | ||
|
||
main(...process.argv.slice(2)).catch(err => console.error(err)); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
/** | ||
* Copyright 2019, Google LLC | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
`use strict`; | ||
|
||
async function main( | ||
projectId = 'YOUR_PROJECT_ID', | ||
computeRegion = 'YOUR_REGION', | ||
datasetId = 'YOUR_DATASET', | ||
path = 'YOUR_PATH' | ||
) { | ||
// [START automl_translation_import_data] | ||
const automl = require(`@google-cloud/automl`); | ||
|
||
const client = new automl.AutoMlClient(); | ||
|
||
/** | ||
* TODO(developer): Uncomment the following line before running the sample. | ||
*/ | ||
// const projectId = `The GCLOUD_PROJECT string, e.g. "my-gcloud-project"`; | ||
// const computeRegion = `region-name, e.g. "us-central1"`; | ||
// const datasetId = `Id of the dataset`; | ||
// const path = `string or array of .csv paths in AutoML Vision CSV format, e.g. “gs://myproject/mytraindata.csv”;` | ||
|
||
// Get the full path of the dataset. | ||
const datasetFullId = client.datasetPath(projectId, computeRegion, datasetId); | ||
|
||
// Get the multiple Google Cloud Storage URIs. | ||
const inputUris = path.split(`,`); | ||
const inputConfig = { | ||
gcsSource: { | ||
inputUris: inputUris, | ||
}, | ||
}; | ||
|
||
// Import data from the input URI. | ||
const [operation] = await client.importData({ | ||
name: datasetFullId, | ||
inputConfig: inputConfig, | ||
}); | ||
console.log(`Processing import...`); | ||
const operationResponses = await operation.promise(); | ||
// The final result of the operation. | ||
if (operationResponses[2].done === true) { | ||
console.log(`Data imported.`); | ||
} | ||
|
||
// [END automl_translation_import_data] | ||
} | ||
|
||
main(...process.argv.slice(2)).catch(err => console.error(err)); |
Oops, something went wrong.