Skip to content

Commit

Permalink
Merge pull request capacitor-community#1 from nick-krantz/android-cap…
Browse files Browse the repository at this point in the history
…ture-sample

add captureSample plugin call to android & ios module
  • Loading branch information
Nick Krantz authored Mar 30, 2021
2 parents 8905cb2 + afcf4c0 commit 6c41719
Show file tree
Hide file tree
Showing 7 changed files with 199 additions and 68 deletions.
26 changes: 24 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ ion-content {
--background: transparent;
}
```
Take into account that this will make transparent all ion-content on application, if you want to show camera preview only in one page, just add a cutom class to your ion-content and make it transparent:
Take into account that this will make transparent all ion-content on application, if you want to show camera preview only in one page, just add a custom class to your ion-content and make it transparent:

```css
.my-custom-camera-preview-content {
Expand Down Expand Up @@ -203,7 +203,7 @@ CameraPreview.hide();
<!-- <info>Take the picture. If width and height are not specified or are 0 it will use the defaults. If width and height are specified, it will choose a supported photo size that is closest to width and height specified and has closest aspect ratio to the preview. The argument `quality` defaults to `85` and specifies the quality/compression value: `0=max compression`, `100=max quality`.</info><br/> -->

```javascript
import { CameraPreviewFlashMode } from 'c@capacitor-community/camera-preview';
import { CameraPreviewFlashMode } from '@capacitor-community/camera-preview';

const cameraPreviewPictureOptions: CameraPreviewPictureOptions = {
quality: 50
Expand All @@ -216,6 +216,28 @@ const base64PictureData = result.value;

```

### captureSample(options)

| Option | values | descriptions |
|----------|---------------|----------------------------------------------------------------------|
| quality | number | (optional) The picture quality, 0 - 100, default 85 |

<info>Captures a sample image from the video stream. Only for Android and iOS, web implementation falls back to `capture` method. This can be used to perform real-time analysis on the current frame in the video. The argument `quality` defaults to `85` and specifies the quality/compression value: `0=max compression`, `100=max quality`.</info><br/>

```javascript
import { CameraSampleOptions } from '@capacitor-community/camera-preview';

const cameraSampleOptions: CameraSampleOptions = {
quality: 50
};

const result = await CameraPreview.captureSample(cameraSampleOptions);
const base64PictureData = result.value;

// do something with base64PictureData

```

### getSupportedFlashModes()

<info>Get the flash modes supported by the camera device currently started. Returns an array containing supported flash modes. See <code>[FLASH_MODE](#camera_Settings.FlashMode)</code> for possible values that can be returned</info><br/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,18 @@ public void capture(PluginCall call) {
fragment.takePicture(width, height, quality);
}


@PluginMethod()
public void captureSample(PluginCall call) {
if(this.hasCamera(call) == false){
call.error("Camera is not running");
return;
}
saveCall(call);
Integer quality = call.getInt("quality", 85);
fragment.takeSnapshot(quality);
}

@PluginMethod()
public void stop(final PluginCall call) {
bridge.getActivity().runOnUiThread(new Runnable() {
Expand Down Expand Up @@ -309,14 +321,9 @@ public void onPictureTakenError(String message) {

@Override
public void onSnapshotTaken(String originalPicture) {
JSONArray data = new JSONArray();
data.put(originalPicture);

PluginCall call = getSavedCall();

JSObject jsObject = new JSObject();
jsObject.put("result", data);
call.success(jsObject);
jsObject.put("value", originalPicture);
getSavedCall().success(jsObject);
}

@Override
Expand Down
153 changes: 105 additions & 48 deletions ios/Plugin/CameraController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class CameraController: NSObject {
var frontCamera: AVCaptureDevice?
var frontCameraInput: AVCaptureDeviceInput?

var dataOutput: AVCaptureVideoDataOutput?
var photoOutput: AVCapturePhotoOutput?

var rearCamera: AVCaptureDevice?
Expand All @@ -27,6 +28,8 @@ class CameraController: NSObject {
var flashMode = AVCaptureDevice.FlashMode.off
var photoCaptureCompletionBlock: ((UIImage?, Error?) -> Void)?

var sampleBufferCaptureCompletionBlock: ((UIImage?, Error?) -> Void)?

var highResolutionOutput: Bool = false
}

Expand Down Expand Up @@ -91,12 +94,31 @@ extension CameraController {
captureSession.startRunning()
}

func configureDataOutput() throws {
guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }

self.dataOutput = AVCaptureVideoDataOutput()
self.dataOutput?.videoSettings = [
(kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
]
self.dataOutput?.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(self.dataOutput!) {
captureSession.addOutput(self.dataOutput!)
}

captureSession.commitConfiguration()

let queue = DispatchQueue(label: "DataOutput", attributes: [])
self.dataOutput?.setSampleBufferDelegate(self, queue: queue)
}

DispatchQueue(label: "prepare").async {
do {
createCaptureSession()
try configureCaptureDevices()
try configureDeviceInputs()
try configurePhotoOutput()
try configureDataOutput()
}

catch {
Expand All @@ -108,6 +130,8 @@ extension CameraController {
}

DispatchQueue.main.async {
self.updateVideoOrientation()

completionHandler(nil)
}
}
Expand All @@ -119,36 +143,44 @@ extension CameraController {
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill

let orientation: UIDeviceOrientation = UIDevice.current.orientation
let statusBarOrientation = UIApplication.shared.statusBarOrientation
switch (orientation) {
view.layer.insertSublayer(self.previewLayer!, at: 0)
self.previewLayer?.frame = view.frame
}

func updateVideoOrientation() {
assert(Thread.isMainThread) // UIApplication.statusBarOrientation requires the main thread.

let videoOrientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
self.previewLayer?.connection?.videoOrientation = .portrait
case .landscapeRight:
self.previewLayer?.connection?.videoOrientation = .landscapeLeft
videoOrientation = .portrait
case .landscapeLeft:
self.previewLayer?.connection?.videoOrientation = .landscapeRight
videoOrientation = .landscapeRight
case .landscapeRight:
videoOrientation = .landscapeLeft
case .portraitUpsideDown:
self.previewLayer?.connection?.videoOrientation = .portraitUpsideDown
case .faceUp, .faceDown:
switch (statusBarOrientation) {
videoOrientation = .portraitUpsideDown
case .faceUp, .faceDown, .unknown:
fallthrough
@unknown default:
switch UIApplication.shared.statusBarOrientation {
case .portrait:
self.previewLayer?.connection?.videoOrientation = .portrait
case .landscapeRight:
self.previewLayer?.connection?.videoOrientation = .landscapeRight
videoOrientation = .portrait
case .landscapeLeft:
self.previewLayer?.connection?.videoOrientation = .landscapeLeft
videoOrientation = .landscapeLeft
case .landscapeRight:
videoOrientation = .landscapeRight
case .portraitUpsideDown:
self.previewLayer?.connection?.videoOrientation = .portraitUpsideDown
default:
self.previewLayer?.connection?.videoOrientation = .portrait
videoOrientation = .portraitUpsideDown
case .unknown:
fallthrough
@unknown default:
videoOrientation = .portrait
}
default:
self.previewLayer?.connection?.videoOrientation = .portrait
}

view.layer.insertSublayer(self.previewLayer!, at: 0)
self.previewLayer?.frame = view.frame
previewLayer?.connection?.videoOrientation = videoOrientation
dataOutput?.connections.forEach { $0.videoOrientation = videoOrientation }
}

func switchCameras() throws {
Expand Down Expand Up @@ -212,36 +244,19 @@ extension CameraController {
settings.flashMode = self.flashMode
settings.isHighResolutionPhotoEnabled = self.highResolutionOutput;

let currentDevice: UIDevice = .current
let deviceOrientation: UIDeviceOrientation = currentDevice.orientation
let statusBarOrientation = UIApplication.shared.statusBarOrientation
if deviceOrientation == .portrait {
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.portrait
}else if (deviceOrientation == .landscapeLeft){
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.landscapeRight
}else if (deviceOrientation == .landscapeRight){
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.landscapeLeft
}else if (deviceOrientation == .portraitUpsideDown){
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.portraitUpsideDown
}else if (deviceOrientation == .faceUp || deviceOrientation == .faceDown){
switch (statusBarOrientation) {
case .portrait:
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.landscapeRight
case .landscapeLeft:
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.portrait
}
}else {
self.photoOutput?.connection(with: AVMediaType.video)?.videoOrientation = AVCaptureVideoOrientation.portrait
}
self.photoOutput?.capturePhoto(with: settings, delegate: self)
self.photoCaptureCompletionBlock = completion
}

func captureSample(completion: @escaping (UIImage?, Error?) -> Void) {
guard let captureSession = captureSession,
captureSession.isRunning else {
completion(nil, CameraControllerError.captureSessionIsMissing)
return
}

self.sampleBufferCaptureCompletionBlock = completion
}

func getSupportedFlashModes() throws -> [String] {
var currentCamera: AVCaptureDevice?
Expand Down Expand Up @@ -377,6 +392,48 @@ extension CameraController: AVCapturePhotoCaptureDelegate {
}
}

extension CameraController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let completion = sampleBufferCaptureCompletionBlock else { return }

guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
completion(nil, CameraControllerError.unknown)
return
}

CVPixelBufferLockBaseAddress(imageBuffer, .readOnly)
defer { CVPixelBufferUnlockBaseAddress(imageBuffer, .readOnly) }

let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue |
CGImageAlphaInfo.premultipliedFirst.rawValue

let context = CGContext(
data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: bitmapInfo
)

guard let cgImage = context?.makeImage() else {
completion(nil, CameraControllerError.unknown)
return
}

let image = UIImage(cgImage: cgImage)
completion(image.fixedOrientation(), nil)

sampleBufferCaptureCompletionBlock = nil
}
}




Expand Down
1 change: 1 addition & 0 deletions ios/Plugin/Plugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
CAP_PLUGIN_METHOD(start, CAPPluginReturnPromise);
CAP_PLUGIN_METHOD(stop, CAPPluginReturnPromise);
CAP_PLUGIN_METHOD(capture, CAPPluginReturnPromise);
CAP_PLUGIN_METHOD(captureSample, CAPPluginReturnPromise);
CAP_PLUGIN_METHOD(flip, CAPPluginReturnPromise);
CAP_PLUGIN_METHOD(getSupportedFlashModes, CAPPluginReturnPromise);
CAP_PLUGIN_METHOD(setFlashMode, CAPPluginReturnPromise);
Expand Down
47 changes: 37 additions & 10 deletions ios/Plugin/Plugin.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,24 +26,16 @@ public class CameraPreview: CAPPlugin {
let height = self.paddingBottom != nil ? self.height! - self.paddingBottom!: self.height!;

if UIDevice.current.orientation.isLandscape {

self.previewView.frame = CGRect(x: self.y!, y: self.x!, width: height, height: self.width!)
self.cameraController.previewLayer?.frame = self.previewView.frame

if (UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft) {
self.cameraController.previewLayer?.connection?.videoOrientation = .landscapeRight
}

if (UIDevice.current.orientation == UIDeviceOrientation.landscapeRight) {
self.cameraController.previewLayer?.connection?.videoOrientation = .landscapeLeft
}
}

if UIDevice.current.orientation.isPortrait {
self.previewView.frame = CGRect(x: self.x!, y: self.y!, width: self.width!, height: self.height!)
self.cameraController.previewLayer?.frame = self.previewView.frame
self.cameraController.previewLayer?.connection?.videoOrientation = .portrait
}

cameraController.updateVideoOrientation()
}

@objc func start(_ call: CAPPluginCall) {
Expand Down Expand Up @@ -178,6 +170,41 @@ public class CameraPreview: CAPPlugin {
}
}
}

@objc func captureSample(_ call: CAPPluginCall) {
DispatchQueue.main.async {
let quality: Int? = call.getInt("quality", 85)

self.cameraController.captureSample { image, error in
guard let image = image else {
print("Image capture error: \(String(describing: error))")
call.reject("Image capture error: \(String(describing: error))")
return
}

let imageData: Data?
if (self.cameraPosition == "front") {
let flippedImage = image.withHorizontallyFlippedOrientation()
imageData = flippedImage.jpegData(compressionQuality: CGFloat(quality!/100))
} else {
imageData = image.jpegData(compressionQuality: CGFloat(quality!/100))
}

if (self.storeToFile == false){
let imageBase64 = imageData?.base64EncodedString()
call.resolve(["value": imageBase64!])
} else {
do {
let fileUrl = self.getTempFilePath()
try imageData?.write(to:fileUrl)
call.resolve(["value": fileUrl.absoluteString])
} catch {
call.reject("Error writing image to file")
}
}
}
}
}

@objc func getSupportedFlashModes(_ call: CAPPluginCall) {
do {
Expand Down
7 changes: 7 additions & 0 deletions src/definitions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,19 @@ export interface CameraPreviewPictureOptions {
/** The picture quality, 0 - 100, default 85 */
quality?: number;
}

export interface CameraSampleOptions {
/** The picture quality, 0 - 100, default 85 */
quality?: number;
}

export type CameraPreviewFlashMode = 'off' | 'on' | 'auto' | 'red-eye' | 'torch';

export interface CameraPreviewPlugin {
start(options: CameraPreviewOptions): Promise<{}>;
stop(): Promise<{}>;
capture(options: CameraPreviewPictureOptions): Promise<{ value: string }>;
captureSample(options: CameraSampleOptions): Promise<{ value: string }>;
getSupportedFlashModes(): Promise<{
result: CameraPreviewFlashMode[]
}>;
Expand Down
Loading

0 comments on commit 6c41719

Please sign in to comment.