diff --git a/demo-ng/app/app.css b/demo-ng/app/app.css index 98b2ddc4..53f5821b 100644 --- a/demo-ng/app/app.css +++ b/demo-ng/app/app.css @@ -75,15 +75,6 @@ button { background-color: #1832d5; } -.mlKitCamera { - /*border-radius: 20;*/ - /*border-width: 8;*/ - /*border-color: #ddd;*/ - width: 280; - height: 280; - margin-top: 16; -} - Label.mlkit-result { margin: 5; text-align: left; diff --git a/demo-ng/app/tabs/mlkit/barcodescanning/barcodescanning.component.html b/demo-ng/app/tabs/mlkit/barcodescanning/barcodescanning.component.html index 126ef96b..a05bae6b 100644 --- a/demo-ng/app/tabs/mlkit/barcodescanning/barcodescanning.component.html +++ b/demo-ng/app/tabs/mlkit/barcodescanning/barcodescanning.component.html @@ -8,7 +8,7 @@ ; onBarcodeScanResult(event): void { - const result: MLKitScanBarcodesResult = event.value; + const result: MLKitScanBarcodesOnDeviceResult = event.value; this.barcodes = result.barcodes; - console.log(">>> onBarcodeScanResult, this.barcodes: " + JSON.stringify(this.barcodes)); } } diff --git a/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.html b/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.html index f8562271..3f1e7f3e 100644 --- a/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.html +++ b/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.html @@ -8,7 +8,7 @@ @@ -25,9 +25,9 @@ - - - + + + diff --git a/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.ts b/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.ts index 5f7fb428..31b9fcc7 100644 --- a/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.ts +++ b/demo-ng/app/tabs/mlkit/facedetection/facedetection.component.ts @@ -1,6 +1,5 @@ import { Component } from "@angular/core"; -import { MLKitDetectFacesResult } from "nativescript-plugin-firebase/mlkit/facedetection"; -import { ImageSource } from "tns-core-modules/image-source"; +import { MLKitDetectFacesOnDeviceResult } from "nativescript-plugin-firebase/mlkit/facedetection"; @Component({ selector: "mlkit-facedetection", @@ -18,7 +17,7 @@ export class FaceDetectionComponent { mlKitAllOK: string; onFaceDetectionResult(scanResult: any): any { - const value: MLKitDetectFacesResult = scanResult.value; + const value: MLKitDetectFacesOnDeviceResult = scanResult.value; if (value.faces.length > 0) { this.faces = value.faces; diff --git a/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.html b/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.html index 2244a91c..48edefb3 100644 --- a/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.html +++ b/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.html @@ -6,7 +6,7 @@ diff --git a/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.ts b/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.ts index 8e1d508c..e441ee12 100644 --- a/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.ts +++ b/demo-ng/app/tabs/mlkit/imagelabeling/imagelabeling.component.ts @@ -1,5 +1,5 @@ import { Component } from "@angular/core"; -import { MLKitImageLabelingResult } from "nativescript-plugin-firebase/mlkit/imagelabeling"; +import { MLKitImageLabelingOnDeviceResult } from "nativescript-plugin-firebase/mlkit/imagelabeling"; @Component({ selector: "mlkit-imagelabeling", @@ -13,7 +13,7 @@ export class ImageLabelingComponent { }>; onImageLabeledResult(scanResult: any): void { - const value: MLKitImageLabelingResult = scanResult.value; + const value: MLKitImageLabelingOnDeviceResult = scanResult.value; this.labels = value.labels; } } diff --git a/demo-ng/app/tabs/mlkit/mlkit.component.html b/demo-ng/app/tabs/mlkit/mlkit.component.html index 6415a831..d83dba9d 100644 --- a/demo-ng/app/tabs/mlkit/mlkit.component.html +++ b/demo-ng/app/tabs/mlkit/mlkit.component.html @@ -4,7 +4,10 @@ - + + + + diff --git a/demo-ng/app/tabs/mlkit/mlkit.component.ts b/demo-ng/app/tabs/mlkit/mlkit.component.ts index 436fca54..40f608ea 100644 --- a/demo-ng/app/tabs/mlkit/mlkit.component.ts +++ b/demo-ng/app/tabs/mlkit/mlkit.component.ts @@ -1,17 +1,22 @@ import { Component, NgZone } from "@angular/core"; +import { RouterExtensions } from "nativescript-angular"; import { ImageSource } from "tns-core-modules/image-source"; - -import { BarcodeFormat, MLKitScanBarcodesResult } from "nativescript-plugin-firebase/mlkit/barcodescanning"; -import { MLKitRecognizeTextLocalResult, MLKitRecognizeTextCloudResult } from "nativescript-plugin-firebase/mlkit/textrecognition"; -import { MLKitLandmarkRecognitionCloudResult } from "nativescript-plugin-firebase/mlkit/landmarkrecognition"; -import { MLKitDetectFacesResult } from "nativescript-plugin-firebase/mlkit/facedetection"; import { action } from "tns-core-modules/ui/dialogs"; import { ImageAsset } from "tns-core-modules/image-asset"; +import { isIOS } from "tns-core-modules/platform"; import * as ImagePicker from "nativescript-imagepicker"; import * as Camera from "nativescript-camera"; -import { RouterExtensions } from "nativescript-angular"; -import { isIOS } from "tns-core-modules/platform"; -import { MLKitImageLabelingResult } from "nativescript-plugin-firebase/mlkit/imagelabeling"; +import { BarcodeFormat, MLKitScanBarcodesOnDeviceResult } from "nativescript-plugin-firebase/mlkit/barcodescanning"; +import { MLKitLandmarkRecognitionCloudResult } from "nativescript-plugin-firebase/mlkit/landmarkrecognition"; +import { MLKitDetectFacesOnDeviceResult } from "nativescript-plugin-firebase/mlkit/facedetection"; +import { + MLKitRecognizeTextCloudResult, + MLKitRecognizeTextOnDeviceResult +} from "nativescript-plugin-firebase/mlkit/textrecognition"; +import { + MLKitImageLabelingCloudResult, + MLKitImageLabelingOnDeviceResult +} from "nativescript-plugin-firebase/mlkit/imagelabeling"; const firebase = require("nativescript-plugin-firebase"); @@ -25,15 +30,16 @@ export class MLKitComponent { pickedImage: ImageSource; private mlkitFeatures: Array = [ - "Text recognition (local)", + "Text recognition (on device)", "Text recognition (cloud)", - "Barcode scanning", - "Face detection", - "Image labeling", + "Barcode scanning (on device)", + "Face detection (on device)", + "Image labeling (on device)", + "Image labeling (cloud)", "Landmark recognition (cloud)" ]; - private mlkitLocalFeatures: Array = [ + private mlkitOnDeviceFeatures: Array = [ "Text recognition", "Barcode scanning", "Face detection", @@ -46,9 +52,9 @@ export class MLKitComponent { fromCameraFeed(): void { action( - "Test which ML Kit feature? No cloud processing will be used.", + "Test which on-device ML Kit feature?", "Cancel", - this.mlkitLocalFeatures + this.mlkitOnDeviceFeatures ).then((pickedItem: string) => { let to; if (pickedItem === "Text recognition") { @@ -83,7 +89,7 @@ export class MLKitComponent { width: 800, height: 800, keepAspectRatio: true, - saveToGallery: true, + saveToGallery: false, cameraFacing: "rear" }).then(imageAsset => { new ImageSource().fromAsset(imageAsset).then(imageSource => { @@ -147,34 +153,34 @@ export class MLKitComponent { this.mlkitFeatures ).then((pickedItem: string) => { let pickedItemIndex = this.mlkitFeatures.indexOf(pickedItem); - if (pickedItem === "Text recognition (local)") { - this.recognizeTextLocal(imageSource); + if (pickedItem === "Text recognition (on device)") { + this.recognizeTextOnDevice(imageSource); } else if (pickedItem === "Text recognition (cloud)") { this.recognizeTextCloud(imageSource); - } else if (pickedItem === "Barcode scanning") { - this.scanBarcode(imageSource); - } else if (pickedItem === "Face detection") { - this.detectFaces(imageSource); - } else if (pickedItem === "Image labeling") { - this.labelImage(imageSource); + } else if (pickedItem === "Barcode scanning (on device)") { + this.scanBarcodeOnDevice(imageSource); + } else if (pickedItem === "Face detection (on device)") { + this.detectFacesOnDevice(imageSource); + } else if (pickedItem === "Image labeling (on device)") { + this.labelImageOnDevice(imageSource); + } else if (pickedItem === "Image labeling (cloud)") { + this.labelImageCloud(imageSource); } else if (pickedItem === "Landmark recognition (cloud)") { this.recognizeLandmarkCloud(imageSource); } }); } - private recognizeTextLocal(imageSource: ImageSource): void { - firebase.mlkit.textrecognition.recognizeTextLocal({ + private recognizeTextOnDevice(imageSource: ImageSource): void { + firebase.mlkit.textrecognition.recognizeTextOnDevice({ image: imageSource - }).then( - (result: MLKitRecognizeTextLocalResult) => { - alert({ - title: `Result`, - message: result.features.map(feature => feature.text).join(""), - okButtonText: "OK" - }); - }) - .catch(errorMessage => console.log("ML Kit error: " + errorMessage)); + }).then((result: MLKitRecognizeTextOnDeviceResult) => { + alert({ + title: `Result`, + message: result.features.map(feature => feature.text).join(""), + okButtonText: "OK" + }); + }).catch(errorMessage => console.log("ML Kit error: " + errorMessage)); } private recognizeTextCloud(imageSource: ImageSource): void { @@ -207,12 +213,13 @@ export class MLKitComponent { .catch(errorMessage => console.log("ML Kit error: " + errorMessage)); } - private scanBarcode(imageSource: ImageSource): void { - firebase.mlkit.barcodescanning.scanBarcodes({ + private scanBarcodeOnDevice(imageSource: ImageSource): void { + console.log(">>> imageSource.rotationAngle: " + imageSource.rotationAngle); + firebase.mlkit.barcodescanning.scanBarcodesOnDevice({ image: imageSource, formats: [BarcodeFormat.QR_CODE, BarcodeFormat.EAN_13] }).then( - (result: MLKitScanBarcodesResult) => { + (result: MLKitScanBarcodesOnDeviceResult) => { alert({ title: `Result`, message: JSON.stringify(result.barcodes), @@ -222,11 +229,11 @@ export class MLKitComponent { .catch(errorMessage => console.log("ML Kit error: " + errorMessage)); } - private detectFaces(imageSource: ImageSource): void { - firebase.mlkit.facedetection.detectFaces({ + private detectFacesOnDevice(imageSource: ImageSource): void { + firebase.mlkit.facedetection.detectFacesOnDevice({ image: imageSource }).then( - (result: MLKitDetectFacesResult) => { + (result: MLKitDetectFacesOnDeviceResult) => { alert({ title: `Result`, message: JSON.stringify(result.faces), @@ -236,12 +243,27 @@ export class MLKitComponent { .catch(errorMessage => console.log("ML Kit error: " + errorMessage)); } - private labelImage(imageSource: ImageSource): void { - firebase.mlkit.imagelabeling.labelImage({ + private labelImageOnDevice(imageSource: ImageSource): void { + firebase.mlkit.imagelabeling.labelImageOnDevice({ + image: imageSource, + confidenceThreshold: 0.3 + }).then( + (result: MLKitImageLabelingOnDeviceResult) => { + alert({ + title: `Result`, + message: JSON.stringify(result.labels), + okButtonText: "OK" + }); + }) + .catch(errorMessage => console.log("ML Kit error: " + errorMessage)); + } + + private labelImageCloud(imageSource: ImageSource): void { + firebase.mlkit.imagelabeling.labelImageCloud({ image: imageSource, confidenceThreshold: 0.3 }).then( - (result: MLKitImageLabelingResult) => { + (result: MLKitImageLabelingCloudResult) => { alert({ title: `Result`, message: JSON.stringify(result.labels), diff --git a/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.html b/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.html index bc7ec233..df4d241f 100644 --- a/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.html +++ b/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.html @@ -6,7 +6,7 @@ diff --git a/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.ts b/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.ts index c42bd308..ab96a5f9 100644 --- a/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.ts +++ b/demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.ts @@ -1,5 +1,5 @@ import { Component } from "@angular/core"; -import { MLKitRecognizeTextLocalResult } from "nativescript-plugin-firebase/mlkit/textrecognition"; +import { MLKitRecognizeTextOnDeviceResult } from "nativescript-plugin-firebase/mlkit/textrecognition"; @Component({ selector: "mlkit-textrecognition", @@ -12,7 +12,7 @@ export class TextRecognitionComponent { }>; onTextRecognitionResult(scanResult: any): void { - const value: MLKitRecognizeTextLocalResult = scanResult.value; + const value: MLKitRecognizeTextOnDeviceResult = scanResult.value; this.features = value.features; } } diff --git a/docs/ML_KIT.md b/docs/ML_KIT.md index f2cc7b8f..65ce38c2 100644 --- a/docs/ML_KIT.md +++ b/docs/ML_KIT.md @@ -3,5 +3,102 @@ Just dumping a few things I should not forget to add to the doc: - see step 3 @ https://firebase.google.com/docs/ml-kit/android/recognize-text - For Cloud processing, enable the Vision API and upgrade your Firebase project to "Blaze" (pay as you go) -Known issues: -- Detecting faces from still images doesn't currently work on iOS (detection from the camera stream works though) + +ML Kit + +Make sure to check out [this demo app](https://github.com/EddyVerbruggen/nativescript-plugin-firebase/tree/master/demo-ng) because it has almost all ML Kit features this plugin currently supports! Steps: + +```bash +git clone https://github.com/EddyVerbruggen/nativescript-plugin-firebase +cd nativescript-plugin-firebase/src +npm i +npm run demo-ng.ios (or .android) +``` + +## Enabling ML Kit +During plugin installation you'll be asked whether or not you use ML Kit and which of its features. + +In case you're upgrading and you have the `firebase.nativescript.json` file in your project root, it's safest to rename it (so you can see what your old configuration was), +then clean your platforms folder (`rm -rf platforms`) and build your app again. You will be prompted which Firebase features you'll want to use. + +## ML Kit Features +There are two ways of using ML Kit: On-device or in the cloud. Depending on the + +- *On-device*. These features have been enhanced to not only interpret still images, but you can also run ML against a live camera feed. Why? Because it's fr***ing cool! +- *Cloud*. The cloud has much larger and always up to date models, so results will be more accurate. Since this is a remote service reconition speed depends heavily on the size of the images you send to the cloud. + + +### Cloud configuration +To nbe able to use Cloud features you need to do two things: + +1) Enable the Cloud Vision API: + +- Open the [Cloud Vision API](https://console.cloud.google.com/apis/library/vision.googleapis.com/) in the Cloud Console API library. +- Ensure that your Firebase project is selected in the menu at the top of the page. +- If the API is not already enabled, click _Enable_. + +2) Upgrade to a Blaze plan. + +- Open the [Firebase console](https://console.firebase.google.com/). +- Select your project. +- In the bottom left, make sure you're on the _Blaze_ plan, or hit the 'Upgrade' button. + +### Feature table + +|Feature|On-device|Cloud +|---|---|--- +|[Text recognition](#text-recognition)|✅|✅ +|[Face detection](face-detection)|✅*| +|[Barcode scanning](barcode-scanning)|✅| +|[Image labeling](image-labeling)|✅|✅ +|[Landmark recognition](landmark-recognition)||✅ +|[Custom model inference](custom-model-inference)|✅| + +*) _Currently detecting faces from still images doesn't work on iOS (detection from the camera stream works fine though)._ + +### [Text recognition](https://firebase.google.com/docs/ml-kit/recognize-text) +ML Kit - Text recognition + +#### Still image (on-device) + +```typescript +import { MLKitRecognizeTextOnDeviceResult } from "nativescript-plugin-firebase/mlkit/textrecognition"; +const firebase = require("nativescript-plugin-firebase"); + +firebase.mlkit.textrecognition.recognizeTextOnDevice({ + image: imageSource // a NativeScript Image or ImageSource, see the demo for examples +}).then((result: MLKitRecognizeTextOnDeviceResult) => { // just look at this type to see what else is returned + console.log(result.features.map(feature => feature.text).join("")); +}).catch(errorMessage => console.log("ML Kit error: " + errorMessage)); +``` + +#### Still image (cloud) + +```typescript +import { MLKitRecognizeTextCloudResult } from "nativescript-plugin-firebase/mlkit/textrecognition"; +const firebase = require("nativescript-plugin-firebase"); + +firebase.mlkit.textrecognition.recognizeTextOnDevice({ + image: imageSource, // a NativeScript Image or ImageSource, see the demo for examples + modelType: "latest", // either "latest" or "stable" (default "stable") + maxResults: 15 // default 10 +}).then((result: MLKitRecognizeTextCloudResult) => { + console.log(result.text); +}).catch(errorMessage => console.log("ML Kit error: " + errorMessage)); +``` +#### Live camera feed + +### [Face detection](https://firebase.google.com/docs/ml-kit/detect-faces) +ML Kit - Face detection + +### [Barcode scanning](https://firebase.google.com/docs/ml-kit/read-barcodes) +ML Kit - Barcode scanning + +### [Image labeling](https://firebase.google.com/docs/ml-kit/label-images) +ML Kit - Image labeling + +### [Landmark recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) +ML Kit - Landmark recognition + +### [Custom model inference](https://firebase.google.com/docs/ml-kit/use-custom-models) +Coming soon diff --git a/docs/images/features/mlkit.png b/docs/images/features/mlkit.png new file mode 100644 index 00000000..c921dec7 Binary files /dev/null and b/docs/images/features/mlkit.png differ diff --git a/docs/images/features/mlkit_face_detection.png b/docs/images/features/mlkit_face_detection.png new file mode 100644 index 00000000..350e0d87 Binary files /dev/null and b/docs/images/features/mlkit_face_detection.png differ diff --git a/docs/images/features/mlkit_text_barcode_scanning.png b/docs/images/features/mlkit_text_barcode_scanning.png new file mode 100644 index 00000000..8ab45215 Binary files /dev/null and b/docs/images/features/mlkit_text_barcode_scanning.png differ diff --git a/docs/images/features/mlkit_text_image_labeling.png b/docs/images/features/mlkit_text_image_labeling.png new file mode 100644 index 00000000..3139eec7 Binary files /dev/null and b/docs/images/features/mlkit_text_image_labeling.png differ diff --git a/docs/images/features/mlkit_text_landmark_recognition.png b/docs/images/features/mlkit_text_landmark_recognition.png new file mode 100644 index 00000000..7ca3bcb9 Binary files /dev/null and b/docs/images/features/mlkit_text_landmark_recognition.png differ diff --git a/docs/images/features/mlkit_text_recognition.png b/docs/images/features/mlkit_text_recognition.png new file mode 100644 index 00000000..46d7f2fa Binary files /dev/null and b/docs/images/features/mlkit_text_recognition.png differ diff --git a/src/mlkit/barcodescanning/index.android.ts b/src/mlkit/barcodescanning/index.android.ts index 8ab25948..a59399f5 100644 --- a/src/mlkit/barcodescanning/index.android.ts +++ b/src/mlkit/barcodescanning/index.android.ts @@ -1,5 +1,5 @@ import { ImageSource } from "tns-core-modules/image-source"; -import { MLKitScanBarcodesOptions, MLKitScanBarcodesResult } from "./"; +import { MLKitScanBarcodesOnDeviceOptions, MLKitScanBarcodesOnDeviceResult } from "./"; import { MLKitOptions } from "../index"; import { BarcodeFormat, MLKitBarcodeScanner as MLKitBarcodeScannerBase } from "./barcodescanning-common"; @@ -28,7 +28,7 @@ export class MLKitBarcodeScanner extends MLKitBarcodeScannerBase { // const imageSource = new ImageSource(); // imageSource.setNativeSource(this.lastVisionImage.getBitmapForDebugging()); - const result = { + const result = { // imageSource: imageSource, barcodes: [] }; @@ -64,14 +64,14 @@ function getBarcodeDetector(formats?: Array): any { } } -export function scanBarcodes(options: MLKitScanBarcodesOptions): Promise { +export function scanBarcodesOnDevice(options: MLKitScanBarcodesOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const firebaseVisionBarcodeDetector = getBarcodeDetector(options.formats); const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({ - onSuccess: (barcodes) => { - const result = { + onSuccess: barcodes => { + const result = { barcodes: [] }; @@ -99,13 +99,12 @@ export function scanBarcodes(options: MLKitScanBarcodesOptions): Promise; } -export interface MLKitScanBarcodesOptions extends MLKitOptions { +export interface MLKitScanBarcodesOnDeviceOptions extends MLKitOptions { /** * Limit to only what you need to speed up processing. * If not set, we'll detect all supported formats. @@ -19,6 +19,6 @@ export interface MLKitScanBarcodesOptions extends MLKitOptions { formats?: Array; } -export declare function scanBarcodes(options: MLKitScanBarcodesOptions): Promise; +export declare function scanBarcodesOnDevice(options: MLKitScanBarcodesOnDeviceOptions): Promise; export declare class MLKitBarcodeScanner extends MLKitCameraView {} diff --git a/src/mlkit/barcodescanning/index.ios.ts b/src/mlkit/barcodescanning/index.ios.ts index bdd645e0..a14d11d7 100644 --- a/src/mlkit/barcodescanning/index.ios.ts +++ b/src/mlkit/barcodescanning/index.ios.ts @@ -1,11 +1,12 @@ import { ImageSource } from "tns-core-modules/image-source"; -import { MLKitScanBarcodesOptions, MLKitScanBarcodesResult } from "./index"; +import { MLKitScanBarcodesOnDeviceOptions, MLKitScanBarcodesOnDeviceResult } from "./index"; import { MLKitOptions } from "../index"; import { BarcodeFormat, MLKitBarcodeScanner as MLKitBarcodeScannerBase } from "./barcodescanning-common"; export { BarcodeFormat }; export class MLKitBarcodeScanner extends MLKitBarcodeScannerBase { + protected createDetector(): any { let formats: Array; if (this.formats) { @@ -22,7 +23,7 @@ export class MLKitBarcodeScanner extends MLKitBarcodeScannerBase { console.log(error.localizedDescription); } else if (barcodes !== null && barcodes.count > 0) { - const result = { + const result = { barcodes: [] }; @@ -58,7 +59,7 @@ function getBarcodeDetector(formats?: Array): any { } } -export function scanBarcodes(options: MLKitScanBarcodesOptions): Promise { +export function scanBarcodesOnDevice(options: MLKitScanBarcodesOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const barcodeDetector = getBarcodeDetector(options.formats); @@ -68,7 +69,7 @@ export function scanBarcodes(options: MLKitScanBarcodesOptions): Promise{ + const result = { barcodes: [] }; @@ -83,7 +84,7 @@ export function scanBarcodes(options: MLKitScanBarcodesOptions): Promise{ + const result = { // imageSource: imageSource, faces: [] }; @@ -58,7 +58,7 @@ function getFaceDetector(): any { return com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionFaceDetector(faceDetectorOptions); } -export function detectFaces(options: MLKitDetectFacesOptions): Promise { +export function detectFacesOnDevice(options: MLKitDetectFacesOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const firebaseVisionFaceDetector = getFaceDetector(); @@ -66,7 +66,7 @@ export function detectFaces(options: MLKitDetectFacesOptions): Promise { - const result = { + const result = { faces: [] }; @@ -95,7 +95,7 @@ export function detectFaces(options: MLKitDetectFacesOptions): Promise; } -export interface MLKitDetectFacesOptions extends MLKitOptions { +export interface MLKitDetectFacesOnDeviceOptions extends MLKitOptions { // TODO there are a few options here } -export declare function detectFaces(options: MLKitDetectFacesOptions): Promise; +export declare function detectFacesOnDevice(options: MLKitDetectFacesOnDeviceOptions): Promise; export declare class MLKitFaceDetection extends MLKitCameraView {} diff --git a/src/mlkit/facedetection/index.ios.ts b/src/mlkit/facedetection/index.ios.ts index 9fcb1568..d535f8cd 100644 --- a/src/mlkit/facedetection/index.ios.ts +++ b/src/mlkit/facedetection/index.ios.ts @@ -1,5 +1,5 @@ import { ImageSource } from "tns-core-modules/image-source"; -import { MLKitDetectFacesOptions, MLKitDetectFacesResult } from "./"; +import { MLKitDetectFacesOnDeviceOptions, MLKitDetectFacesOnDeviceResult } from "./"; import { MLKitOptions } from "../index"; import { MLKitFaceDetection as MLKitFaceDetectionBase } from "./facedetection-common"; @@ -15,7 +15,7 @@ export class MLKitFaceDetection extends MLKitFaceDetectionBase { console.log(error.localizedDescription); } else if (faces !== null && faces.count > 0) { - const result = { + const result = { faces: [] }; @@ -56,7 +56,8 @@ function getDetector(): FIRVisionFaceDetector { return firVision.faceDetectorWithOptions(options); } -export function detectFaces(options: MLKitDetectFacesOptions): Promise { +// TODO somehow this function doesn't work.. probably because of the passed image, but I can't find the cause.. the live camera version works great tho +export function detectFacesOnDevice(options: MLKitDetectFacesOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const faceDetector = getDetector(); @@ -65,14 +66,12 @@ export function detectFaces(options: MLKitDetectFacesOptions): Promise{ + const result = { faces: [] }; - console.log(">>> faces.count: " + faces.count); for (let i = 0, l = faces.count; i < l; i++) { const face: FIRVisionFace = faces.objectAtIndex(i); - console.log(">> face: " + face); result.faces.push({ smilingProbability: face.hasSmilingProbability ? face.smilingProbability : undefined, leftEyeOpenProbability: face.hasLeftEyeOpenProbability ? face.leftEyeOpenProbability : undefined, diff --git a/src/mlkit/imagelabeling/imagelabeling-common.ts b/src/mlkit/imagelabeling/imagelabeling-common.ts index 18825df5..29d4ed62 100644 --- a/src/mlkit/imagelabeling/imagelabeling-common.ts +++ b/src/mlkit/imagelabeling/imagelabeling-common.ts @@ -6,12 +6,6 @@ export const confidenceThresholdProperty = new Property({ - name: "maxResults", - defaultValue: 10, -}); - export abstract class MLKitImageLabeling extends MLKitCameraView { static scanResultEvent: string = "scanResult"; @@ -21,11 +15,6 @@ export abstract class MLKitImageLabeling extends MLKitCameraView { [confidenceThresholdProperty.setNative](value: number) { this.confidenceThreshold = value; } - - [maxResultsThresholdProperty.setNative](value: number) { - this.maxResults = value; - } } confidenceThresholdProperty.register(MLKitImageLabeling); -maxResultsThresholdProperty.register(MLKitImageLabeling); diff --git a/src/mlkit/imagelabeling/index.android.ts b/src/mlkit/imagelabeling/index.android.ts index 2c36f586..74c0adb9 100644 --- a/src/mlkit/imagelabeling/index.android.ts +++ b/src/mlkit/imagelabeling/index.android.ts @@ -1,7 +1,8 @@ import { ImageSource } from "tns-core-modules/image-source"; import { MLKitOptions, } from "../"; -import { MLKitImageLabelingOptions, MLKitImageLabelingResult } from "./"; +import { MLKitImageLabelingOnDeviceOptions, MLKitImageLabelingOnDeviceResult } from "./"; import { MLKitImageLabeling as MLKitImageLabelingBase } from "./imagelabeling-common"; +import { MLKitImageLabelingCloudOptions, MLKitImageLabelingCloudResult } from "./index"; declare const com: any; @@ -20,7 +21,7 @@ export class MLKitImageLabeling extends MLKitImageLabelingBase { // const imageSource = new ImageSource(); // imageSource.setNativeSource(this.lastVisionImage.getBitmapForDebugging()); - const result = { + const result = { // imageSource: imageSource, labels: [] }; @@ -53,14 +54,14 @@ function getDetector(confidenceThreshold: number): any { return com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionLabelDetector(labelDetectorOptions); } -export function labelImage(options: MLKitImageLabelingOptions): Promise { +export function labelImageOnDevice(options: MLKitImageLabelingOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const firebaseVisionLabelDetector = getDetector(options.confidenceThreshold || 0.5); const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({ onSuccess: labels => { - const result = { + const result = { labels: [] }; @@ -88,7 +89,54 @@ export function labelImage(options: MLKitImageLabelingOptions): Promise { + return new Promise((resolve, reject) => { + try { + const cloudDetectorOptions = + new com.google.firebase.ml.vision.cloud.FirebaseVisionCloudDetectorOptions.Builder() + .setModelType(options.modelType === "latest" ? com.google.firebase.ml.vision.cloud.FirebaseVisionCloudDetectorOptions.LATEST_MODEL : com.google.firebase.ml.vision.cloud.FirebaseVisionCloudDetectorOptions.STABLE_MODEL) + .setMaxResults(options.maxResults || 10) + .build(); + + const firebaseVisionCloudLabelDetector = com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionCloudLabelDetector(cloudDetectorOptions); + + const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({ + onSuccess: labels => { + const result = { + labels: [] + }; + + for (let i = 0; i < labels.size(); i++) { + const label = labels.get(i); + result.labels.push({ + text: label.getLabel(), + confidence: label.getConfidence() + }); + } + + console.log(">>> cloud image labeling result: " + JSON.stringify(result)); + resolve(result); + firebaseVisionCloudLabelDetector.close(); + } + }); + + const onFailureListener = new com.google.android.gms.tasks.OnFailureListener({ + onFailure: exception => reject(exception.getMessage()) + }); + + firebaseVisionCloudLabelDetector + .detectInImage(getImage(options)) + .addOnSuccessListener(onSuccessListener) + .addOnFailureListener(onFailureListener); + + } catch (ex) { + console.log("Error in firebase.mlkit.labelImageCloud: " + ex); reject(ex); } }); diff --git a/src/mlkit/imagelabeling/index.d.ts b/src/mlkit/imagelabeling/index.d.ts index c4dd7aef..b7c5d6ea 100644 --- a/src/mlkit/imagelabeling/index.d.ts +++ b/src/mlkit/imagelabeling/index.d.ts @@ -1,7 +1,7 @@ import { MLKitOptions } from "../"; -import { MLKitCameraView, MLKitResult } from "../index"; +import { MLKitCameraView, MLKitCloudOptions, MLKitResult } from "../index"; -export interface MLKitImageLabelingResult extends MLKitResult { +export interface MLKitImageLabelingCloudResult extends MLKitResult { labels: Array<{ text: string; confidence: number; @@ -9,17 +9,21 @@ export interface MLKitImageLabelingResult extends MLKitResult { }>; } -export interface MLKitImageLabelingOptions extends MLKitOptions { +export interface MLKitImageLabelingOnDeviceResult extends MLKitImageLabelingCloudResult { +} + +export interface MLKitImageLabelingOnDeviceOptions extends MLKitOptions { /** * 0.5 by default */ confidenceThreshold?: number; - /** - * 10 by default - */ - maxResults?: number; } -export declare function labelImage(options: MLKitImageLabelingOptions): Promise; +export interface MLKitImageLabelingCloudOptions extends MLKitCloudOptions { +} + +export declare function labelImageOnDevice(options: MLKitImageLabelingOnDeviceOptions): Promise; + +export declare function labelImageCloud(options: MLKitImageLabelingCloudOptions): Promise; export declare class MLKitImageLabeling extends MLKitCameraView {} diff --git a/src/mlkit/imagelabeling/index.ios.ts b/src/mlkit/imagelabeling/index.ios.ts index 070e073f..48d49cc5 100644 --- a/src/mlkit/imagelabeling/index.ios.ts +++ b/src/mlkit/imagelabeling/index.ios.ts @@ -1,7 +1,8 @@ import { ImageSource } from "tns-core-modules/image-source"; import { MLKitOptions } from "../"; -import { MLKitImageLabelingOptions, MLKitImageLabelingResult } from "./"; +import { MLKitImageLabelingOnDeviceOptions, MLKitImageLabelingOnDeviceResult } from "./"; import { MLKitImageLabeling as MLKitImageLabelingBase } from "./imagelabeling-common"; +import { MLKitImageLabelingCloudOptions, MLKitImageLabelingCloudResult } from "./index"; export class MLKitImageLabeling extends MLKitImageLabelingBase { @@ -15,7 +16,7 @@ export class MLKitImageLabeling extends MLKitImageLabelingBase { console.log(error.localizedDescription); } else if (labels !== null && labels.count > 0) { - const result = { + const result = { labels: [] }; @@ -48,7 +49,7 @@ function getDetector(confidenceThreshold: number): FIRVisionLabelDetector { return firVision.labelDetectorWithOptions(fIRVisionLabelDetectorOptions); } -export function labelImage(options: MLKitImageLabelingOptions): Promise { +export function labelImageOnDevice(options: MLKitImageLabelingOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const labelDetector = getDetector(options.confidenceThreshold || 0.5); @@ -58,7 +59,7 @@ export function labelImage(options: MLKitImageLabelingOptions): Promise{ + const result = { labels: [] }; @@ -75,7 +76,44 @@ export function labelImage(options: MLKitImageLabelingOptions): Promise { + return new Promise((resolve, reject) => { + try { + const fIRVisionCloudDetectorOptions = FIRVisionCloudDetectorOptions.new(); + fIRVisionCloudDetectorOptions.modelType = options.modelType === "latest" ? FIRVisionCloudModelType.Latest : FIRVisionCloudModelType.Stable; + fIRVisionCloudDetectorOptions.maxResults = options.maxResults || 10; + + const firVision: FIRVision = FIRVision.vision(); + const labelDetector = firVision.cloudLabelDetectorWithOptions(fIRVisionCloudDetectorOptions); + + labelDetector.detectInImageCompletion(getImage(options), (labels: NSArray, error: NSError) => { + if (error !== null) { + reject(error.localizedDescription); + + } else if (labels !== null) { + const result = { + labels: [] + }; + + for (let i = 0, l = labels.count; i < l; i++) { + const label: FIRVisionCloudLabel = labels.objectAtIndex(i); + result.labels.push({ + text: label.label, + confidence: label.confidence + }); + } + console.log(">>> cloud image labeling result: " + JSON.stringify(result.labels)); + resolve(result); + } + }); + } catch (ex) { + console.log("Error in firebase.mlkit.labelImageCloud: " + ex); reject(ex); } }); diff --git a/src/mlkit/mlkit-cameraview.android.ts b/src/mlkit/mlkit-cameraview.android.ts index 739ba322..ed5b5fc1 100644 --- a/src/mlkit/mlkit-cameraview.android.ts +++ b/src/mlkit/mlkit-cameraview.android.ts @@ -101,10 +101,7 @@ export abstract class MLKitCameraView extends MLKitCameraViewBase { } const camera = android.hardware.Camera.open(requestedCameraId); - const sizePair = this.selectSizePair(camera, 800, 600); // TODO based on wrapping frame - console.log(">>> sizePair.pictureSize: " + sizePair.pictureSize); - console.log(">>> sizePair.pictureSize.xy: " + sizePair.pictureSize.width + " x " + sizePair.pictureSize.height); - console.log(">>> sizePair.previewSize.xy: " + sizePair.previewSize.width + " x " + sizePair.previewSize.height); + const sizePair = this.selectSizePair(camera, 800, 600); // TODO based on wrapping frame if (!sizePair) { console.log("Could not find suitable preview size."); diff --git a/src/mlkit/mlkit-util.ios.ts b/src/mlkit/mlkit-util.ios.ts deleted file mode 100644 index 50a9c2c5..00000000 --- a/src/mlkit/mlkit-util.ios.ts +++ /dev/null @@ -1,3 +0,0 @@ -export function getVisionImage(image: any /* native image */): any /* native VisionImage */ { - -} diff --git a/src/mlkit/mlkit-util.ts b/src/mlkit/mlkit-util.ts deleted file mode 100644 index 0bdef6dd..00000000 --- a/src/mlkit/mlkit-util.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function getVisionImage(image: any /* native image */): any /* native VisionImage */; diff --git a/src/mlkit/textrecognition/index.android.ts b/src/mlkit/textrecognition/index.android.ts index de1e3b38..bb3dff9d 100644 --- a/src/mlkit/textrecognition/index.android.ts +++ b/src/mlkit/textrecognition/index.android.ts @@ -1,6 +1,6 @@ import { ImageSource } from "tns-core-modules/image-source"; import { MLKitOptions, } from "../"; -import { MLKitRecognizeTextLocalOptions, MLKitRecognizeTextLocalResult } from "./"; +import { MLKitRecognizeTextOnDeviceOptions, MLKitRecognizeTextOnDeviceResult } from "./"; import { MLKitTextRecognition as MLKitTextRecognitionBase } from "./textrecognition-common"; import { MLKitRecognizeTextCloudOptions, @@ -23,7 +23,7 @@ export class MLKitTextRecognition extends MLKitTextRecognitionBase { this.notify({ eventName: MLKitTextRecognition.scanResultEvent, object: this, - value: getLocalResult(textBlocks.getBlocks()) + value: getOnDeviceResult(textBlocks.getBlocks()) }); } } @@ -31,8 +31,8 @@ export class MLKitTextRecognition extends MLKitTextRecognitionBase { } } -function getLocalResult(blocks: any): MLKitRecognizeTextLocalResult { - const result = { +function getOnDeviceResult(blocks: any): MLKitRecognizeTextOnDeviceResult { + const result = { features: [] }; @@ -72,14 +72,14 @@ function getLocalResult(blocks: any): MLKitRecognizeTextLocalResult { return result; } -export function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Promise { +export function recognizeTextOnDevice(options: MLKitRecognizeTextOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const firebaseVisionTextDetector = com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionTextDetector(); const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({ onSuccess: textBlocks => { - resolve(getLocalResult(textBlocks.getBlocks())); + resolve(getOnDeviceResult(textBlocks.getBlocks())); firebaseVisionTextDetector.close(); } }); @@ -94,7 +94,7 @@ export function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Pro .addOnFailureListener(onFailureListener); } catch (ex) { - console.log("Error in firebase.mlkit.recognizeTextLocal: " + ex); + console.log("Error in firebase.mlkit.recognizeTextOnDevice: " + ex); reject(ex); } }); diff --git a/src/mlkit/textrecognition/index.d.ts b/src/mlkit/textrecognition/index.d.ts index 11fd83bd..b62361ef 100644 --- a/src/mlkit/textrecognition/index.d.ts +++ b/src/mlkit/textrecognition/index.d.ts @@ -17,7 +17,7 @@ export interface MLKitRecognizeTextResultFeature { }> } -export interface MLKitRecognizeTextLocalResult extends MLKitResult { +export interface MLKitRecognizeTextOnDeviceResult extends MLKitResult { features: Array; } @@ -25,13 +25,13 @@ export interface MLKitRecognizeTextCloudResult extends MLKitResult { text: string; } -export interface MLKitRecognizeTextLocalOptions extends MLKitOptions { +export interface MLKitRecognizeTextOnDeviceOptions extends MLKitOptions { } export interface MLKitRecognizeTextCloudOptions extends MLKitCloudOptions { } -export declare function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Promise; +export declare function recognizeTextOnDevice(options: MLKitRecognizeTextOnDeviceOptions): Promise; export declare function recognizeTextCloud(options: MLKitRecognizeTextCloudOptions): Promise; diff --git a/src/mlkit/textrecognition/index.ios.ts b/src/mlkit/textrecognition/index.ios.ts index ceb1b8b3..f5f6e794 100644 --- a/src/mlkit/textrecognition/index.ios.ts +++ b/src/mlkit/textrecognition/index.ios.ts @@ -1,6 +1,6 @@ import { ImageSource } from "tns-core-modules/image-source"; import { MLKitOptions } from "../"; -import { MLKitRecognizeTextLocalOptions, MLKitRecognizeTextCloudOptions, MLKitRecognizeTextLocalResult, MLKitRecognizeTextCloudResult } from "./"; +import { MLKitRecognizeTextOnDeviceOptions, MLKitRecognizeTextCloudOptions, MLKitRecognizeTextOnDeviceResult, MLKitRecognizeTextCloudResult } from "./"; import { MLKitTextRecognition as MLKitTextRecognitionBase } from "./textrecognition-common"; import { MLKitRecognizeTextResultFeature } from "./index"; @@ -18,7 +18,7 @@ export class MLKitTextRecognition extends MLKitTextRecognitionBase { this.notify({ eventName: MLKitTextRecognition.scanResultEvent, object: this, - value: getLocalResult(features) + value: getOnDeviceResult(features) }); } }; @@ -30,8 +30,8 @@ export class MLKitTextRecognition extends MLKitTextRecognitionBase { } -function getLocalResult(features: NSArray): MLKitRecognizeTextLocalResult { - const result = { +function getOnDeviceResult(features: NSArray): MLKitRecognizeTextOnDeviceResult { + const result = { features: [] }; @@ -70,7 +70,7 @@ function getLocalResult(features: NSArray): MLKitRecognizeTextLoc return result; } -export function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Promise { +export function recognizeTextOnDevice(options: MLKitRecognizeTextOnDeviceOptions): Promise { return new Promise((resolve, reject) => { try { const firVision: FIRVision = FIRVision.vision(); @@ -80,11 +80,11 @@ export function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Pro if (error !== null) { reject(error.localizedDescription); } else if (features !== null) { - resolve(getLocalResult(features)); + resolve(getOnDeviceResult(features)); } }); } catch (ex) { - console.log("Error in firebase.mlkit.recognizeTextLocal: " + ex); + console.log("Error in firebase.mlkit.recognizeTextOnDevice: " + ex); reject(ex); } }); @@ -101,12 +101,16 @@ export function recognizeTextCloud(options: MLKitRecognizeTextCloudOptions): Pro const textDetector = firVision.cloudTextDetectorWithOptions(fIRVisionCloudDetectorOptions); textDetector.detectInImageCompletion(getImage(options), (cloudText: FIRVisionCloudText, error: NSError) => { + console.log(">>> recognizeTextCloud error? " + error + ", cloudText? " + cloudText); if (error !== null) { reject(error.localizedDescription); } else if (cloudText !== null) { + console.log(">>> recognizeTextCloud result: " + cloudText); resolve({ text: cloudText.text }); + } else { + reject("Unknown error :'("); } }); } catch (ex) {