Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[camera_android] Add NV21 as an image stream format flutter#3277 (flu…
Browse files Browse the repository at this point in the history
…tter#3639)

This contains the changes for camera_android from flutter#3277
acoutts authored and nploi committed Jul 16, 2023

Unverified

This user has not yet uploaded their public signing key.
1 parent 3e78b25 commit 8a6b9b5
Showing 10 changed files with 696 additions and 46 deletions.
5 changes: 5 additions & 0 deletions packages/camera/camera_android/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 0.10.7

* Adds support for NV21 as a new streaming format in Android which includes correct handling of
image padding when present.

## 0.10.6+2

* Fixes compatibility with AGP versions older than 4.2.
Original file line number Diff line number Diff line change
@@ -21,7 +21,6 @@
import android.hardware.camera2.params.SessionConfiguration;
import android.media.CamcorderProfile;
import android.media.EncoderProfiles;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaRecorder;
import android.os.Build;
@@ -58,19 +57,18 @@
import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
import io.flutter.plugins.camera.media.ImageStreamReader;
import io.flutter.plugins.camera.media.MediaRecorderBuilder;
import io.flutter.plugins.camera.types.CameraCaptureProperties;
import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
import io.flutter.view.TextureRegistry.SurfaceTextureEntry;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.Executors;

@FunctionalInterface
@@ -90,6 +88,7 @@ class Camera
supportedImageFormats = new HashMap<>();
supportedImageFormats.put("yuv420", ImageFormat.YUV_420_888);
supportedImageFormats.put("jpeg", ImageFormat.JPEG);
supportedImageFormats.put("nv21", ImageFormat.NV21);
}

/**
@@ -131,7 +130,7 @@ class Camera
CameraDeviceWrapper cameraDevice;
CameraCaptureSession captureSession;
private ImageReader pictureImageReader;
ImageReader imageStreamReader;
ImageStreamReader imageStreamReader;
/** {@link CaptureRequest.Builder} for the camera preview */
CaptureRequest.Builder previewRequestBuilder;

@@ -306,7 +305,7 @@ public void open(String imageFormatGroup) throws CameraAccessException {
imageFormat = ImageFormat.YUV_420_888;
}
imageStreamReader =
ImageReader.newInstance(
new ImageStreamReader(
resolutionFeature.getPreviewSize().getWidth(),
resolutionFeature.getPreviewSize().getHeight(),
imageFormat,
@@ -536,7 +535,7 @@ private void startCapture(boolean record, boolean stream) throws CameraAccessExc
surfaces.add(mediaRecorder.getSurface());
successCallback = () -> mediaRecorder.start();
}
if (stream) {
if (stream && imageStreamReader != null) {
surfaces.add(imageStreamReader.getSurface());
}

@@ -1191,49 +1190,21 @@ public void onListen(Object o, EventChannel.EventSink imageStreamSink) {

@Override
public void onCancel(Object o) {
imageStreamReader.setOnImageAvailableListener(null, backgroundHandler);
if (imageStreamReader == null) {
return;
}

imageStreamReader.removeListener(backgroundHandler);
}
});
}

void setImageStreamImageAvailableListener(final EventChannel.EventSink imageStreamSink) {
imageStreamReader.setOnImageAvailableListener(
reader -> {
Image img = reader.acquireNextImage();
// Use acquireNextImage since image reader is only for one image.
if (img == null) return;

List<Map<String, Object>> planes = new ArrayList<>();
for (Image.Plane plane : img.getPlanes()) {
ByteBuffer buffer = plane.getBuffer();

byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes, 0, bytes.length);

Map<String, Object> planeBuffer = new HashMap<>();
planeBuffer.put("bytesPerRow", plane.getRowStride());
planeBuffer.put("bytesPerPixel", plane.getPixelStride());
planeBuffer.put("bytes", bytes);

planes.add(planeBuffer);
}
if (imageStreamReader == null) {
return;
}

Map<String, Object> imageBuffer = new HashMap<>();
imageBuffer.put("width", img.getWidth());
imageBuffer.put("height", img.getHeight());
imageBuffer.put("format", img.getFormat());
imageBuffer.put("planes", planes);
imageBuffer.put("lensAperture", this.captureProps.getLastLensAperture());
imageBuffer.put("sensorExposureTime", this.captureProps.getLastSensorExposureTime());
Integer sensorSensitivity = this.captureProps.getLastSensorSensitivity();
imageBuffer.put(
"sensorSensitivity", sensorSensitivity == null ? null : (double) sensorSensitivity);

final Handler handler = new Handler(Looper.getMainLooper());
handler.post(() -> imageStreamSink.success(imageBuffer));
img.close();
},
backgroundHandler);
imageStreamReader.subscribeListener(this.captureProps, imageStreamSink, backgroundHandler);
}

void closeCaptureSession() {
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

package io.flutter.plugins.camera.media;

import android.graphics.ImageFormat;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.Looper;
import android.view.Surface;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugins.camera.types.CameraCaptureProperties;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

// Wraps an ImageReader to allow for testing of the image handler.
public class ImageStreamReader {

/**
* The image format we are going to send back to dart. Usually it's the same as streamImageFormat
* but in the case of NV21 we will actually request YUV frames but convert it to NV21 before
* sending to dart.
*/
private final int dartImageFormat;

private final ImageReader imageReader;
private final ImageStreamReaderUtils imageStreamReaderUtils;

/**
* Creates a new instance of the {@link ImageStreamReader}.
*
* @param imageReader is the image reader that will receive frames
* @param imageStreamReaderUtils is an instance of {@link ImageStreamReaderUtils}
*/
@VisibleForTesting
public ImageStreamReader(
@NonNull ImageReader imageReader,
int dartImageFormat,
@NonNull ImageStreamReaderUtils imageStreamReaderUtils) {
this.imageReader = imageReader;
this.dartImageFormat = dartImageFormat;
this.imageStreamReaderUtils = imageStreamReaderUtils;
}

/**
* Creates a new instance of the {@link ImageStreamReader}.
*
* @param width is the image width
* @param height is the image height
* @param imageFormat is the {@link ImageFormat} that should be returned to dart.
* @param maxImages is how many images can be acquired at one time, usually 1.
*/
public ImageStreamReader(int width, int height, int imageFormat, int maxImages) {
this.dartImageFormat = imageFormat;
this.imageReader =
ImageReader.newInstance(width, height, computeStreamImageFormat(imageFormat), maxImages);
this.imageStreamReaderUtils = new ImageStreamReaderUtils();
}

/**
* Returns the image format to stream based on a requested input format. Usually it's the same
* except when dart is requesting NV21. In that case we stream YUV420 and process it into NV21
* before sending the frames over.
*
* @param dartImageFormat is the image format dart is requesting.
* @return the image format that should be streamed from the camera.
*/
@VisibleForTesting
public static int computeStreamImageFormat(int dartImageFormat) {
if (dartImageFormat == ImageFormat.NV21) {
return ImageFormat.YUV_420_888;
} else {
return dartImageFormat;
}
}

/**
* Processes a new frame (image) from the image reader and send the frame to Dart.
*
* @param image is the image which needs processed as an {@link Image}
* @param captureProps is the capture props from the camera class as {@link
* CameraCaptureProperties}
* @param imageStreamSink is the image stream sink from dart as a dart {@link
* EventChannel.EventSink}
*/
@VisibleForTesting
public void onImageAvailable(
@NonNull Image image,
@NonNull CameraCaptureProperties captureProps,
@NonNull EventChannel.EventSink imageStreamSink) {
try {
Map<String, Object> imageBuffer = new HashMap<>();

// Get plane data ready
if (dartImageFormat == ImageFormat.NV21) {
imageBuffer.put("planes", parsePlanesForNv21(image));
} else {
imageBuffer.put("planes", parsePlanesForYuvOrJpeg(image));
}

imageBuffer.put("width", image.getWidth());
imageBuffer.put("height", image.getHeight());
imageBuffer.put("format", dartImageFormat);
imageBuffer.put("lensAperture", captureProps.getLastLensAperture());
imageBuffer.put("sensorExposureTime", captureProps.getLastSensorExposureTime());
Integer sensorSensitivity = captureProps.getLastSensorSensitivity();
imageBuffer.put(
"sensorSensitivity", sensorSensitivity == null ? null : (double) sensorSensitivity);

final Handler handler = new Handler(Looper.getMainLooper());
handler.post(() -> imageStreamSink.success(imageBuffer));
image.close();

} catch (IllegalStateException e) {
// Handle "buffer is inaccessible" errors that can happen on some devices from ImageStreamReaderUtils.yuv420ThreePlanesToNV21()
final Handler handler = new Handler(Looper.getMainLooper());
handler.post(
() ->
imageStreamSink.error(
"IllegalStateException",
"Caught IllegalStateException: " + e.getMessage(),
null));
image.close();
}
}

/**
* Given an input image, will return a list of maps suitable to send back to dart where each map
* describes the image plane.
*
* <p>For Yuv / Jpeg, we do no further processing on the frame so we simply send it as-is.
*
* @param image - the image to process.
* @return parsed map describing the image planes to be sent to dart.
*/
@NonNull
public List<Map<String, Object>> parsePlanesForYuvOrJpeg(@NonNull Image image) {
List<Map<String, Object>> planes = new ArrayList<>();

// For YUV420 and JPEG, just send the data as-is for each plane.
for (Image.Plane plane : image.getPlanes()) {
ByteBuffer buffer = plane.getBuffer();

byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes, 0, bytes.length);

Map<String, Object> planeBuffer = new HashMap<>();
planeBuffer.put("bytesPerRow", plane.getRowStride());
planeBuffer.put("bytesPerPixel", plane.getPixelStride());
planeBuffer.put("bytes", bytes);

planes.add(planeBuffer);
}
return planes;
}

/**
* Given an input image, will return a single-plane NV21 image. Assumes YUV420 as an input type.
*
* @param image - the image to process.
* @return parsed map describing the image planes to be sent to dart.
*/
@NonNull
public List<Map<String, Object>> parsePlanesForNv21(@NonNull Image image) {
List<Map<String, Object>> planes = new ArrayList<>();

// We will convert the YUV data to NV21 which is a single-plane image
ByteBuffer bytes =
imageStreamReaderUtils.yuv420ThreePlanesToNV21(
image.getPlanes(), image.getWidth(), image.getHeight());

Map<String, Object> planeBuffer = new HashMap<>();
planeBuffer.put("bytesPerRow", image.getWidth());
planeBuffer.put("bytesPerPixel", 1);
planeBuffer.put("bytes", bytes.array());
planes.add(planeBuffer);
return planes;
}

/** Returns the image reader surface. */
@NonNull
public Surface getSurface() {
return imageReader.getSurface();
}

/**
* Subscribes the image stream reader to handle incoming images using onImageAvailable().
*
* @param captureProps is the capture props from the camera class as {@link
* CameraCaptureProperties}
* @param imageStreamSink is the image stream sink from dart as {@link EventChannel.EventSink}
* @param handler is generally the background handler of the camera as {@link Handler}
*/
public void subscribeListener(
@NonNull CameraCaptureProperties captureProps,
@NonNull EventChannel.EventSink imageStreamSink,
@NonNull Handler handler) {
imageReader.setOnImageAvailableListener(
reader -> {
Image image = reader.acquireNextImage();
if (image == null) return;

onImageAvailable(image, captureProps, imageStreamSink);
},
handler);
}

/**
* Removes the listener from the image reader.
*
* @param handler is generally the background handler of the camera
*/
public void removeListener(@NonNull Handler handler) {
imageReader.setOnImageAvailableListener(null, handler);
}

/** Closes the image reader. */
public void close() {
imageReader.close();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Note: the code in this file is taken directly from the official Google MLKit example:
// https://github.com/googlesamples/mlkit

package io.flutter.plugins.camera.media;

import android.media.Image;
import androidx.annotation.NonNull;
import java.nio.ByteBuffer;

public class ImageStreamReaderUtils {
/**
* Converts YUV_420_888 to NV21 bytebuffer.
*
* <p>The NV21 format consists of a single byte array containing the Y, U and V values. For an
* image of size S, the first S positions of the array contain all the Y values. The remaining
* positions contain interleaved V and U values. U and V are subsampled by a factor of 2 in both
* dimensions, so there are S/4 U values and S/4 V values. In summary, the NV21 array will contain
* S Y values followed by S/4 VU values: YYYYYYYYYYYYYY(...)YVUVUVUVU(...)VU
*
* <p>YUV_420_888 is a generic format that can describe any YUV image where U and V are subsampled
* by a factor of 2 in both dimensions. {@link Image#getPlanes} returns an array with the Y, U and
* V planes. The Y plane is guaranteed not to be interleaved, so we can just copy its values into
* the first part of the NV21 array. The U and V planes may already have the representation in the
* NV21 format. This happens if the planes share the same buffer, the V buffer is one position
* before the U buffer and the planes have a pixelStride of 2. If this is case, we can just copy
* them to the NV21 array.
*
* <p>https://github.com/googlesamples/mlkit/blob/master/android/vision-quickstart/app/src/main/java/com/google/mlkit/vision/demo/BitmapUtils.java
*/
@NonNull
public ByteBuffer yuv420ThreePlanesToNV21(
@NonNull Image.Plane[] yuv420888planes, int width, int height) {
int imageSize = width * height;
byte[] out = new byte[imageSize + 2 * (imageSize / 4)];

if (areUVPlanesNV21(yuv420888planes, width, height)) {
// Copy the Y values.
yuv420888planes[0].getBuffer().get(out, 0, imageSize);

ByteBuffer uBuffer = yuv420888planes[1].getBuffer();
ByteBuffer vBuffer = yuv420888planes[2].getBuffer();
// Get the first V value from the V buffer, since the U buffer does not contain it.
vBuffer.get(out, imageSize, 1);
// Copy the first U value and the remaining VU values from the U buffer.
uBuffer.get(out, imageSize + 1, 2 * imageSize / 4 - 1);
} else {
// Fallback to copying the UV values one by one, which is slower but also works.
// Unpack Y.
unpackPlane(yuv420888planes[0], width, height, out, 0, 1);
// Unpack U.
unpackPlane(yuv420888planes[1], width, height, out, imageSize + 1, 2);
// Unpack V.
unpackPlane(yuv420888planes[2], width, height, out, imageSize, 2);
}

return ByteBuffer.wrap(out);
}

/**
* Copyright 2020 Google LLC. All rights reserved.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
* <p>Checks if the UV plane buffers of a YUV_420_888 image are in the NV21 format.
*
* <p>https://github.com/googlesamples/mlkit/blob/master/android/vision-quickstart/app/src/main/java/com/google/mlkit/vision/demo/BitmapUtils.java
*/
private static boolean areUVPlanesNV21(@NonNull Image.Plane[] planes, int width, int height) {
int imageSize = width * height;

ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();

// Backup buffer properties.
int vBufferPosition = vBuffer.position();
int uBufferLimit = uBuffer.limit();

// Advance the V buffer by 1 byte, since the U buffer will not contain the first V value.
vBuffer.position(vBufferPosition + 1);
// Chop off the last byte of the U buffer, since the V buffer will not contain the last U value.
uBuffer.limit(uBufferLimit - 1);

// Check that the buffers are equal and have the expected number of elements.
boolean areNV21 =
(vBuffer.remaining() == (2 * imageSize / 4 - 2)) && (vBuffer.compareTo(uBuffer) == 0);

// Restore buffers to their initial state.
vBuffer.position(vBufferPosition);
uBuffer.limit(uBufferLimit);

return areNV21;
}

/**
* Copyright 2020 Google LLC. All rights reserved.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
* <p>Unpack an image plane into a byte array.
*
* <p>The input plane data will be copied in 'out', starting at 'offset' and every pixel will be
* spaced by 'pixelStride'. Note that there is no row padding on the output.
*
* <p>https://github.com/googlesamples/mlkit/blob/master/android/vision-quickstart/app/src/main/java/com/google/mlkit/vision/demo/BitmapUtils.java
*/
private static void unpackPlane(
@NonNull Image.Plane plane, int width, int height, byte[] out, int offset, int pixelStride)
throws IllegalStateException {
ByteBuffer buffer = plane.getBuffer();
buffer.rewind();

// Compute the size of the current plane.
// We assume that it has the aspect ratio as the original image.
int numRow = (buffer.limit() + plane.getRowStride() - 1) / plane.getRowStride();
if (numRow == 0) {
return;
}
int scaleFactor = height / numRow;
int numCol = width / scaleFactor;

// Extract the data in the output buffer.
int outputPos = offset;
int rowStart = 0;
for (int row = 0; row < numRow; row++) {
int inputPos = rowStart;
for (int col = 0; col < numCol; col++) {
out[outputPos] = buffer.get(inputPos);
outputPos += pixelStride;
inputPos += plane.getPixelStride();
}
rowStart += plane.getRowStride();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

package io.flutter.plugins.camera.media;

import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;

import android.graphics.ImageFormat;
import android.media.Image;
import android.media.ImageReader;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugins.camera.types.CameraCaptureProperties;
import java.nio.ByteBuffer;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;

@RunWith(RobolectricTestRunner.class)
public class ImageStreamReaderTest {
/** If we request YUV42 we should stream in YUV420. */
@Test
public void computeStreamImageFormat_computesCorrectStreamFormatYuv() {
int requestedStreamFormat = ImageFormat.YUV_420_888;
int result = ImageStreamReader.computeStreamImageFormat(requestedStreamFormat);
assertEquals(result, ImageFormat.YUV_420_888);
}

/**
* When we want to stream in NV21, we should still request YUV420 from the camera because we will
* convert it to NV21 before sending it to dart.
*/
@Test
public void computeStreamImageFormat_computesCorrectStreamFormatNv21() {
int requestedStreamFormat = ImageFormat.NV21;
int result = ImageStreamReader.computeStreamImageFormat(requestedStreamFormat);
assertEquals(result, ImageFormat.YUV_420_888);
}

/**
* If we are requesting NV21, then the planes should be processed and converted to NV21 before
* being sent to dart. We make sure yuv420ThreePlanesToNV21 is called when we are requesting
*/
@Test
public void onImageAvailable_parsesPlanesForNv21() {
// Dart wants NV21 frames
int dartImageFormat = ImageFormat.NV21;

ImageReader mockImageReader = mock(ImageReader.class);
ImageStreamReaderUtils mockImageStreamReaderUtils = mock(ImageStreamReaderUtils.class);
ImageStreamReader imageStreamReader =
new ImageStreamReader(mockImageReader, dartImageFormat, mockImageStreamReaderUtils);

ByteBuffer mockBytes = ByteBuffer.allocate(0);
when(mockImageStreamReaderUtils.yuv420ThreePlanesToNV21(any(), anyInt(), anyInt()))
.thenReturn(mockBytes);

// The image format as streamed from the camera
int imageFormat = ImageFormat.YUV_420_888;

// Mock YUV image
Image mockImage = mock(Image.class);
when(mockImage.getWidth()).thenReturn(1280);
when(mockImage.getHeight()).thenReturn(720);
when(mockImage.getFormat()).thenReturn(imageFormat);

// Mock planes. YUV images have 3 planes (Y, U, V).
Image.Plane planeY = mock(Image.Plane.class);
Image.Plane planeU = mock(Image.Plane.class);
Image.Plane planeV = mock(Image.Plane.class);

// Y plane is width*height
// Row stride is generally == width but when there is padding it will
// be larger. The numbers in this example are from a Vivo V2135 on 'high'
// setting (1280x720).
when(planeY.getBuffer()).thenReturn(ByteBuffer.allocate(1105664));
when(planeY.getRowStride()).thenReturn(1536);
when(planeY.getPixelStride()).thenReturn(1);

// U and V planes are always the same sizes/values.
// https://developer.android.com/reference/android/graphics/ImageFormat#YUV_420_888
when(planeU.getBuffer()).thenReturn(ByteBuffer.allocate(552703));
when(planeV.getBuffer()).thenReturn(ByteBuffer.allocate(552703));
when(planeU.getRowStride()).thenReturn(1536);
when(planeV.getRowStride()).thenReturn(1536);
when(planeU.getPixelStride()).thenReturn(2);
when(planeV.getPixelStride()).thenReturn(2);

// Add planes to image
Image.Plane[] planes = {planeY, planeU, planeV};
when(mockImage.getPlanes()).thenReturn(planes);

CameraCaptureProperties mockCaptureProps = mock(CameraCaptureProperties.class);
EventChannel.EventSink mockEventSink = mock(EventChannel.EventSink.class);
imageStreamReader.onImageAvailable(mockImage, mockCaptureProps, mockEventSink);

// Make sure we processed the frame with parsePlanesForNv21
verify(mockImageStreamReaderUtils)
.yuv420ThreePlanesToNV21(planes, mockImage.getWidth(), mockImage.getHeight());
}

/** If we are requesting YUV420, then we should send the 3-plane image as it is. */
@Test
public void onImageAvailable_parsesPlanesForYuv420() {
// Dart wants NV21 frames
int dartImageFormat = ImageFormat.YUV_420_888;

ImageReader mockImageReader = mock(ImageReader.class);
ImageStreamReaderUtils mockImageStreamReaderUtils = mock(ImageStreamReaderUtils.class);
ImageStreamReader imageStreamReader =
new ImageStreamReader(mockImageReader, dartImageFormat, mockImageStreamReaderUtils);

ByteBuffer mockBytes = ByteBuffer.allocate(0);
when(mockImageStreamReaderUtils.yuv420ThreePlanesToNV21(any(), anyInt(), anyInt()))
.thenReturn(mockBytes);

// The image format as streamed from the camera
int imageFormat = ImageFormat.YUV_420_888;

// Mock YUV image
Image mockImage = mock(Image.class);
when(mockImage.getWidth()).thenReturn(1280);
when(mockImage.getHeight()).thenReturn(720);
when(mockImage.getFormat()).thenReturn(imageFormat);

// Mock planes. YUV images have 3 planes (Y, U, V).
Image.Plane planeY = mock(Image.Plane.class);
Image.Plane planeU = mock(Image.Plane.class);
Image.Plane planeV = mock(Image.Plane.class);

// Y plane is width*height
// Row stride is generally == width but when there is padding it will
// be larger. The numbers in this example are from a Vivo V2135 on 'high'
// setting (1280x720).
when(planeY.getBuffer()).thenReturn(ByteBuffer.allocate(1105664));
when(planeY.getRowStride()).thenReturn(1536);
when(planeY.getPixelStride()).thenReturn(1);

// U and V planes are always the same sizes/values.
// https://developer.android.com/reference/android/graphics/ImageFormat#YUV_420_888
when(planeU.getBuffer()).thenReturn(ByteBuffer.allocate(552703));
when(planeV.getBuffer()).thenReturn(ByteBuffer.allocate(552703));
when(planeU.getRowStride()).thenReturn(1536);
when(planeV.getRowStride()).thenReturn(1536);
when(planeU.getPixelStride()).thenReturn(2);
when(planeV.getPixelStride()).thenReturn(2);

// Add planes to image
Image.Plane[] planes = {planeY, planeU, planeV};
when(mockImage.getPlanes()).thenReturn(planes);

CameraCaptureProperties mockCaptureProps = mock(CameraCaptureProperties.class);
EventChannel.EventSink mockEventSink = mock(EventChannel.EventSink.class);
imageStreamReader.onImageAvailable(mockImage, mockCaptureProps, mockEventSink);

// Make sure we processed the frame with parsePlanesForYuvOrJpeg
verify(mockImageStreamReaderUtils, never()).yuv420ThreePlanesToNV21(any(), anyInt(), anyInt());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

package io.flutter.plugins.camera.media;

import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

import android.graphics.ImageFormat;
import android.media.Image;
import java.nio.ByteBuffer;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;

@RunWith(RobolectricTestRunner.class)
public class ImageStreamReaderUtilsTest {
private ImageStreamReaderUtils imageStreamReaderUtils;

@Before
public void setUp() {
this.imageStreamReaderUtils = new ImageStreamReaderUtils();
}

Image getImage(int imageWidth, int imageHeight, int padding) {
int rowStride = imageWidth + padding;

int ySize = (rowStride * imageHeight) - padding;
int uSize = (ySize / 2) - (padding / 2);
int vSize = uSize;

// Mock YUV image
Image mockImage = mock(Image.class);
when(mockImage.getWidth()).thenReturn(imageWidth);
when(mockImage.getHeight()).thenReturn(imageHeight);
when(mockImage.getFormat()).thenReturn(ImageFormat.YUV_420_888);

// Mock planes. YUV images have 3 planes (Y, U, V).
Image.Plane planeY = mock(Image.Plane.class);
Image.Plane planeU = mock(Image.Plane.class);
Image.Plane planeV = mock(Image.Plane.class);

// Y plane is width*height
// Row stride is generally == width but when there is padding it will
// be larger.
// Here we are adding 256 padding.
when(planeY.getBuffer()).thenReturn(ByteBuffer.allocate(ySize));
when(planeY.getRowStride()).thenReturn(rowStride);
when(planeY.getPixelStride()).thenReturn(1);

// U and V planes are always the same sizes/values.
// https://developer.android.com/reference/android/graphics/ImageFormat#YUV_420_888
when(planeU.getBuffer()).thenReturn(ByteBuffer.allocate(uSize));
when(planeV.getBuffer()).thenReturn(ByteBuffer.allocate(vSize));
when(planeU.getRowStride()).thenReturn(rowStride);
when(planeV.getRowStride()).thenReturn(rowStride);
when(planeU.getPixelStride()).thenReturn(2);
when(planeV.getPixelStride()).thenReturn(2);

// Add planes to image
Image.Plane[] planes = {planeY, planeU, planeV};
when(mockImage.getPlanes()).thenReturn(planes);

return mockImage;
}

/** Ensure that passing in an image with padding returns one without padding */
@Test
public void yuv420ThreePlanesToNV21_trimsPaddingWhenPresent() {
Image mockImage = getImage(160, 120, 16);
int imageWidth = mockImage.getWidth();
int imageHeight = mockImage.getHeight();

ByteBuffer result =
imageStreamReaderUtils.yuv420ThreePlanesToNV21(
mockImage.getPlanes(), mockImage.getWidth(), mockImage.getHeight());
Assert.assertEquals(
((long) imageWidth * imageHeight) + (2 * ((long) (imageWidth / 2) * (imageHeight / 2))),
result.limit());
}

/** Ensure that passing in an image without padding returns the same size */
@Test
public void yuv420ThreePlanesToNV21_trimsPaddingWhenAbsent() {
Image mockImage = getImage(160, 120, 0);
int imageWidth = mockImage.getWidth();
int imageHeight = mockImage.getHeight();

ByteBuffer result =
imageStreamReaderUtils.yuv420ThreePlanesToNV21(
mockImage.getPlanes(), mockImage.getWidth(), mockImage.getHeight());
Assert.assertEquals(
((long) imageWidth * imageHeight) + (2 * ((long) (imageWidth / 2) * (imageHeight / 2))),
result.limit());
}
}
6 changes: 5 additions & 1 deletion packages/camera/camera_android/example/pubspec.yaml
Original file line number Diff line number Diff line change
@@ -14,7 +14,7 @@ dependencies:
# The example app is bundled with the plugin so we use a path dependency on
# the parent directory to use the current plugin's version.
path: ../
camera_platform_interface: ^2.4.0
camera_platform_interface: ^2.5.0
flutter:
sdk: flutter
path_provider: ^2.0.0
@@ -33,3 +33,7 @@ dev_dependencies:
flutter:
uses-material-design: true

# FOR TESTING ONLY. DO NOT MERGE.
dependency_overrides:
camera_android:
path: ../../../camera/camera_android
2 changes: 2 additions & 0 deletions packages/camera/camera_android/lib/src/type_conversion.dart
Original file line number Diff line number Diff line change
@@ -34,6 +34,8 @@ ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
return ImageFormatGroup.yuv420;
case 256: // android.graphics.ImageFormat.JPEG
return ImageFormatGroup.jpeg;
case 17: // android.graphics.ImageFormat.NV21
return ImageFormatGroup.nv21;
}

return ImageFormatGroup.unknown;
4 changes: 2 additions & 2 deletions packages/camera/camera_android/pubspec.yaml
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@ name: camera_android
description: Android implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_android
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.10.6+2
version: 0.10.7

environment:
sdk: ">=2.17.0 <4.0.0"
@@ -18,7 +18,7 @@ flutter:
dartPluginClass: AndroidCamera

dependencies:
camera_platform_interface: ^2.4.0
camera_platform_interface: ^2.5.0
flutter:
sdk: flutter
flutter_plugin_android_lifecycle: ^2.0.2
22 changes: 22 additions & 0 deletions packages/camera/camera_android/test/type_conversion_test.dart
Original file line number Diff line number Diff line change
@@ -57,4 +57,26 @@ void main() {
});
expect(cameraImage.format.group, ImageFormatGroup.yuv420);
});

test('CameraImageData has ImageFormatGroup.nv21', () {
final CameraImageData cameraImage =
cameraImageFromPlatformData(<dynamic, dynamic>{
'format': 17,
'height': 1,
'width': 4,
'lensAperture': 1.8,
'sensorExposureTime': 9991324,
'sensorSensitivity': 92.0,
'planes': <dynamic>[
<dynamic, dynamic>{
'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
'bytesPerPixel': 1,
'bytesPerRow': 4,
'height': 1,
'width': 4
}
]
});
expect(cameraImage.format.group, ImageFormatGroup.nv21);
});
}

0 comments on commit 8a6b9b5

Please sign in to comment.