Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
#699 Add ML Kit support (iOS camera stream)
Browse files Browse the repository at this point in the history
  • Loading branch information
EddyVerbruggen committed May 11, 2018
1 parent ebdb66a commit a79d05e
Show file tree
Hide file tree
Showing 30 changed files with 833 additions and 27 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ package-lock.json
build/
*.log.*
src/**/*.d.ts
src/platforms/ios_lib/TNSMLKitCamera/TNSMLKitCamera.xcodeproj/project.xcworkspace
src/platforms/ios_lib/TNSMLKitCamera/TNSMLKitCamera.xcodeproj/xcuserdata
src/platforms/ios/Podfile
src/platforms/ios/build.xcconfig
src/platforms/android/include.gradle
Expand Down
6 changes: 6 additions & 0 deletions demo-ng/app/app.module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ import { AppComponent } from "./app.component";

import { ItemsComponent } from "./item/items.component";

import { registerElement } from "nativescript-angular/element-registry";

registerElement("MLKitBarcodeScanner", () => require("nativescript-plugin-firebase/mlkit/barcodescanning").MLKitBarcodeScanner);
// registerElement("MLKitFaceRecognition", () => require("nativescript-plugin-firebase/mlkit/facedetection").MLKitFaceRecognition);
// registerElement("MLKitTextRecognition", () => require("nativescript-plugin-firebase/mlkit/textrecognition").MLKitTextRecognition);

@NgModule({
bootstrap: [
AppComponent
Expand Down
7 changes: 7 additions & 0 deletions demo-ng/app/item/items.component.html
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,13 @@
<StackLayout class="page">
<Label text="After pressing those buttons, check the console log" textWrap="true"></Label>

<MLKitBarcodeScanner
width="400"
height="300"
formats="QR_CODE, EAN_8"
(scanResult)="onBarcodeScanResult($event)">
</MLKitBarcodeScanner>

<Label text="Authentication" class="h2"></Label>
<Button text="login anonymously" (tap)="loginAnonymously()" class="button button-user"></Button>

Expand Down
6 changes: 6 additions & 0 deletions demo-ng/app/item/items.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { Component, NgZone } from "@angular/core";
import { firestore } from "nativescript-plugin-firebase";
import { Observable } from "rxjs/Observable";
import { City } from "../model/City";
import { MLKitScanBarcodesResult } from "../../../src/mlkit/barcodescanning";

const firebase = require("nativescript-plugin-firebase/app");
const firebaseWebApi = require("nativescript-plugin-firebase/app");
Expand All @@ -26,6 +27,11 @@ export class ItemsComponent {
// AngularFireModule.initializeApp({});
}

onBarcodeScanResult(event): void {
const result: MLKitScanBarcodesResult = event.value;
console.log("Received barcode(s): " + JSON.stringify(result));
}

public loginAnonymously(): void {
firebase.auth().signInAnonymously()
.then(() => console.log("Logged in"))
Expand Down
1 change: 1 addition & 0 deletions src/.npmignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ tsconfig.json
references.d.ts
platforms/android/libraryproject/
platforms/ios/typings/
platforms/ios_lib/
platforms/android/typings/
platforms/web
platforms/ios/Podfile
Expand Down
2 changes: 0 additions & 2 deletions src/mlkit/barcodescanning/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@ import { MLKitOptions } from "../";
import { BarcodeFormat } from "./barcodescanning-common";
import { MLKitResult, MLKitCameraView } from "../index";

export declare const BarcodeFormat: typeof BarcodeFormat;

export interface MLKitScanBarcodesResult extends MLKitResult {
barcodes: Array<{
value: string;
Expand Down
63 changes: 47 additions & 16 deletions src/mlkit/barcodescanning/index.ios.ts
Original file line number Diff line number Diff line change
@@ -1,37 +1,68 @@
import { ImageSource } from "tns-core-modules/image-source";
import { BarcodeFormat } from "./barcodescanning-common";
import { MLKitCameraView as MLKitBarcodeScannerBase } from "../mlkit-cameraview";
import { MLKitScanBarcodesOptions, MLKitScanBarcodesResult } from "./index";
import { MLKitOptions } from "../index";
import { BarcodeFormat, MLKitBarcodeScanner as MLKitBarcodeScannerBase } from "./barcodescanning-common";

export { BarcodeFormat };

export class MLKitBarcodeScanner extends MLKitBarcodeScannerBase {

protected createDetector(): any {
}

protected createFailureListener(): any {
let formats: Array<BarcodeFormat>;
if (this.formats) {
formats = [];
const requestedFormats = this.formats.split(",");
requestedFormats.forEach(format => formats.push(BarcodeFormat[format.trim().toUpperCase()]))
}
return getBarcodeDetector(formats);
}

protected createSuccessListener(): any {
return (barcodes: NSArray<FIRVisionBarcode>, error: NSError) => {
if (error !== null) {
console.log(error.localizedDescription);

} else if (barcodes !== null) {
const result = <MLKitScanBarcodesResult>{
barcodes: []
};

for (let i = 0, l = barcodes.count; i < l; i++) {
const barcode: FIRVisionBarcode = barcodes.objectAtIndex(i);
result.barcodes.push({
value: barcode.rawValue,
format: BarcodeFormat[barcode.format]
});
}

this.notify({
eventName: MLKitBarcodeScanner.scanResultEvent,
object: this,
value: result
});
}
}
}
}

// public onLayout(left: number, top: number, right: number, bottom: number): void {
// super.onLayout(left, top, right, bottom);
// if (this._hasSupport && this.ios) {
// this._reader.previewLayer.frame = this.ios.layer.bounds;
// }
// }
function getBarcodeDetector(formats?: Array<BarcodeFormat>): any {
if (formats && formats.length > 0) {
// TODO
const barcodeDetector: FIRVisionBarcodeDetector = FIRVision.vision().barcodeDetector();
return barcodeDetector;
// const firebaseVisionBarcodeDetectorOptions =
// new com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions.Builder()
// .setBarcodeFormats(formats[0], formats) // the seconds argument is a varargs.. let's make it easy and just do it like this
// .build();
// return com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionBarcodeDetector(firebaseVisionBarcodeDetectorOptions);
} else {
return FIRVision.vision().barcodeDetector();
}
}

export function scanBarcodes(options: MLKitScanBarcodesOptions): Promise<MLKitScanBarcodesResult> {
return new Promise((resolve, reject) => {
try {
const firVision: FIRVision = FIRVision.vision();
// TODO pass in formats
const barcodeDetector: FIRVisionBarcodeDetector = firVision.barcodeDetector();
// const textDetector: FIRVisionBarcodeDetector = firVision.barcodeDetectorWithOptions();
const barcodeDetector = getBarcodeDetector(options.formats);

barcodeDetector.detectInImageCompletion(getImage(options), (barcodes: NSArray<FIRVisionBarcode>, error: NSError) => {
if (error !== null) {
Expand Down
1 change: 0 additions & 1 deletion src/mlkit/facedetection/index.d.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { MLKitOptions } from "../";
import { BarcodeFormat } from "./barcodescanning-common";
import { MLKitCameraView, MLKitResult } from "../index";

export interface MLKitDetectFacesResult extends MLKitResult {
Expand Down
49 changes: 49 additions & 0 deletions src/mlkit/facedetection/index.ios.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import { ImageSource } from "tns-core-modules/image-source";
import { MLKitDetectFacesOptions, MLKitDetectFacesResult } from "./";
import { MLKitOptions } from "../index";

// export class MLKitFaceDetection extends MLKitFaceDetectionBase {
// constructor() {
// super();
// console.log(">>> MLKitFaceDetection constr");
// }
// }

export function detectFaces(options: MLKitDetectFacesOptions): Promise<MLKitDetectFacesResult> {
return new Promise((resolve, reject) => {
try {
const firVision: FIRVision = FIRVision.vision();
const faceDetector: FIRVisionFaceDetector = firVision.faceDetector();

faceDetector.detectInImageCompletion(getImage(options), (faces: NSArray<FIRVisionFace>, error: NSError) => {
if (error !== null) {
reject(error.localizedDescription);

} else if (faces !== null) {
const result = <MLKitDetectFacesResult>{
faces: []
};

for (let i = 0, l = faces.count; i < l; i++) {
const face: FIRVisionFace = faces.objectAtIndex(i);
result.faces.push({
smilingProbability: face.hasSmilingProbability ? face.smilingProbability : undefined,
leftEyeOpenProbability: face.hasLeftEyeOpenProbability ? face.leftEyeOpenProbability : undefined,
rightEyeOpenProbability: face.hasRightEyeOpenProbability ? face.rightEyeOpenProbability : undefined
});
}
resolve(result);
}
});
} catch (ex) {
console.log("Error in firebase.mlkit.detectFaces: " + ex);
reject(ex);
}
});
}

// TODO move
function getImage(options: MLKitOptions): FIRVisionImage {
const image: UIImage = options.image instanceof ImageSource ? options.image.ios : options.image.imageSource.ios;
return FIRVisionImage.alloc().initWithImage(image);
}
3 changes: 3 additions & 0 deletions src/mlkit/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ export interface MLKitResult {
imageSource?: ImageSource;
}

export declare class MLKitCameraView {
}

export declare class MLKitBarcodeScanner {
}

Expand Down
7 changes: 1 addition & 6 deletions src/mlkit/mlkit-cameraview.android.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ export abstract class MLKitCameraView extends ContentView {
protected lastVisionImage;

createNativeView(): Object {
let v = super.createNativeView(); // TODO required even?
console.log("-- createNativeView");
let v = super.createNativeView();

if (this.hasCamera()) {
const permissionCb = (args: application.AndroidActivityRequestPermissionsEventData) => {
Expand Down Expand Up @@ -89,7 +88,6 @@ export abstract class MLKitCameraView extends ContentView {
break;
}
}
console.log(">>> selected camera id: " + requestedCameraId);
const camera = android.hardware.Camera.open(requestedCameraId);

const sizePair = this.selectSizePair(camera, 640, 480); // TODO based on wrapping frame
Expand All @@ -106,14 +104,11 @@ export abstract class MLKitCameraView extends ContentView {
let previewSize = sizePair.previewSize;

const parameters = camera.getParameters();
console.log(">>> parameters: " + parameters);

if (pictureSize) {
parameters.setPictureSize(pictureSize.width, pictureSize.height);
}

parameters.setPreviewSize(previewSize.width, previewSize.height);

parameters.setPreviewFormat(android.graphics.ImageFormat.NV21);

this.setRotation(camera, parameters, requestedCameraId);
Expand Down
104 changes: 104 additions & 0 deletions src/mlkit/mlkit-cameraview.ios.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import { ContentView } from "tns-core-modules/ui/content-view";

// TODO pause/resume handling
export abstract class MLKitCameraView extends ContentView {
private captureSession: AVCaptureSession;
private captureDevice: AVCaptureDevice;
private previewLayer: CALayer;
private cameraView: TNSMLKitCameraView;

private bytesToByteBuffer = new Map();
private pendingFrameData = null;
protected rotation;
protected lastVisionImage;

createNativeView(): Object {
let v = super.createNativeView();

if (this.canUseCamera()) {
this.initView();
} else {
console.log("There's no Camera on this device :(");
}
return v;
}

private canUseCamera() {
// TODO also check for availability of AVCaptureDeviceDiscoverySession (iOS 10)
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) !== null;
}

private initView() {
// if (this.preferFrontCamera) {
// this._reader.switchDeviceInput();
// }

// find a suitable device
this.captureDevice = AVCaptureDeviceDiscoverySession.discoverySessionWithDeviceTypesMediaTypePosition(
<any>[AVCaptureDeviceTypeBuiltInWideAngleCamera],
AVMediaTypeVideo,
AVCaptureDevicePosition.Back
).devices.firstObject;

// begin the session
this.captureSession = AVCaptureSession.new();
this.captureSession.sessionPreset = AVCaptureSessionPresetMedium;

const captureDeviceInput = AVCaptureDeviceInput.deviceInputWithDeviceError(this.captureDevice);
this.captureSession.addInput(captureDeviceInput);

this.previewLayer = AVCaptureVideoPreviewLayer.layerWithSession(this.captureSession);

if (this.ios) {
this.ios.layer.addSublayer(this.previewLayer);
}

this.captureSession.startRunning();

this.cameraView = TNSMLKitCameraView.alloc().initWithCaptureSession(this.captureSession);
this.cameraView.processEveryXFrames = 5;
this.cameraView.delegate = TNSMLKitCameraViewDelegateImpl.createWithOwnerResultCallbackAndOptions(
new WeakRef(this),
data => {
},
{});
}

public onLayout(left: number, top: number, right: number, bottom: number): void {
super.onLayout(left, top, right, bottom);
if (this.ios && this.canUseCamera) {
this.previewLayer.frame = this.ios.layer.bounds;
}
}

abstract createDetector(): any;

abstract createSuccessListener(): any;
}

class TNSMLKitCameraViewDelegateImpl extends NSObject implements TNSMLKitCameraViewDelegate {
public static ObjCProtocols = [TNSMLKitCameraViewDelegate];

private owner: WeakRef<MLKitCameraView>;
private resultCallback: (message: any) => void;
private options?: any;

private detector: any;
private onSuccessListener: any;

public static createWithOwnerResultCallbackAndOptions(owner: WeakRef<MLKitCameraView>, callback: (message: any) => void, options?: any): TNSMLKitCameraViewDelegateImpl {
let delegate = <TNSMLKitCameraViewDelegateImpl>TNSMLKitCameraViewDelegateImpl.new();
delegate.owner = owner;
delegate.options = options;
delegate.resultCallback = callback;
delegate.detector = owner.get().createDetector();
delegate.onSuccessListener = owner.get().createSuccessListener();
return delegate;
}

cameraDidOutputSampleBuffer(image: UIImage): void {
if (image) {
this.detector.detectInImageCompletion(FIRVisionImage.alloc().initWithImage(image), this.onSuccessListener);
}
}
}
4 changes: 3 additions & 1 deletion src/mlkit/textrecognition/index.d.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { MLKitMultiEngineOptions } from "../";
import { MLKitResult } from "../index";
import { MLKitCameraView, MLKitResult } from "../index";

export interface MLKitRecognizeTextResult extends MLKitResult {
features: Array<{
Expand All @@ -11,3 +11,5 @@ export interface MLKitRecognizeTextOptions extends MLKitMultiEngineOptions {
}

export declare function recognizeText(options: MLKitRecognizeTextOptions): Promise<MLKitRecognizeTextResult>;

export declare class MLKitTextRecognition extends MLKitCameraView {}
2 changes: 1 addition & 1 deletion src/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"plugin.tscwatch": "npm run tsc -- -w",
"package": "cd ../publish && rimraf ./package && ./pack.sh",
"demo.ios": "npm run preparedemo && cd ../demo && tns run ios",
"demo-ng.ios": "npm run preparedemo-ng && cd ../demo-ng && rimraf platforms/ios && tns run ios",
"demo-ng.ios": "npm run preparedemo-ng && cd ../demo-ng && tns run ios",
"demo.android": "npm run preparedemo && cd ../demo && rimraf platforms/android && tns run android",
"demo.android.linked": "npm run tsc && cd ../demo && tns run android --syncAllFiles",
"demo-ng.android": "npm run preparedemo-ng && cd ../demo-ng && tns run android",
Expand Down
8 changes: 8 additions & 0 deletions src/platforms/ios/Info.plist
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSCameraUsageDescription</key>
<string/>
</dict>
</plist>
Loading

0 comments on commit a79d05e

Please sign in to comment.