Add support for Exif and JPEG thumbnails
Also:
* This fixes a bug with jpeg compression when the color plane stride
doesn't match image width
* This fixes missing marker at the end of BLOB buffer.
Bug: 324383963
Test: atest virtual_camera_tests
Test: atest VirtualCameraTest
Test: camera cts
Test: Manually examining captured image with exiftool
Change-Id: I4db804b264a98b8873b5465d4d8d0b8477e9d9a5
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index 74b2e2c..90530f6 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -12,6 +12,7 @@
"libbinder",
"libbinder_ndk",
"libcamera_metadata",
+ "libexif",
"liblog",
"libfmq",
"libgui",
@@ -47,7 +48,7 @@
name: "libvirtualcamera_utils",
srcs: [
"util/JpegUtil.cc",
- "util/MetadataBuilder.cc",
+ "util/MetadataUtil.cc",
"util/Util.cc",
"util/TestPatternHelper.cc",
"util/EglDisplayContext.cc",
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 947b355..a70f6cf 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -38,7 +38,7 @@
#include "android/binder_status.h"
#include "log/log.h"
#include "system/camera_metadata.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/Util.h"
namespace android {
@@ -81,10 +81,45 @@
constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
+constexpr float kAspectRatioEpsilon = 0.05;
+
+const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
+ Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
+ Resolution(240, 160), Resolution(240, 180)};
+
const std::array<PixelFormat, 3> kOutputFormats{
PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
PixelFormat::BLOB};
+bool isApproximatellySameAspectRatio(const Resolution r1, const Resolution r2) {
+ float aspectRatio1 =
+ static_cast<float>(r1.width) / static_cast<float>(r1.height);
+ float aspectRatio2 =
+ static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+ return abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
+std::vector<Resolution> getSupportedJpegThumbnailSizes(
+ const std::vector<SupportedStreamConfiguration>& configs) {
+ auto isSupportedByAnyInputConfig =
+ [&configs](const Resolution thumbnailResolution) {
+ return std::any_of(
+ configs.begin(), configs.end(),
+ [thumbnailResolution](const SupportedStreamConfiguration& config) {
+ return isApproximatellySameAspectRatio(
+ thumbnailResolution, Resolution(config.width, config.height));
+ });
+ };
+
+ std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
+ std::copy_if(kStandardJpegThumbnailSizes.begin(),
+ kStandardJpegThumbnailSizes.end(),
+ std::back_insert_iterator(supportedThumbnailSizes),
+ isSupportedByAnyInputConfig);
+ return supportedThumbnailSizes;
+}
+
bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
kOutputFormats.end();
@@ -199,8 +234,8 @@
.setControlAeLockAvailable(false)
.setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
.setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
- // TODO(b/301023410) Add JPEG Exif + thumbnail support.
- .setJpegAvailableThumbnailSizes({Resolution(0, 0)})
+ .setJpegAvailableThumbnailSizes(
+ getSupportedJpegThumbnailSizes(supportedInputConfig))
.setMaxJpegSize(kMaxJpegSize)
.setMaxFrameDuration(kMaxFrameDuration)
.setMaxNumberOutputStreams(
@@ -209,24 +244,34 @@
VirtualCameraDevice::kMaxNumberOfStallStreams)
.setPipelineMaxDepth(kPipelineMaxDepth)
.setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
- .setAvailableRequestKeys(
- {ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_AE_MODE,
- ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
- ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE,
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_TRIGGER,
- ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
- ANDROID_SCALER_CROP_REGION, ANDROID_CONTROL_EFFECT_MODE,
- ANDROID_CONTROL_MODE, ANDROID_CONTROL_SCENE_MODE,
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
- ANDROID_CONTROL_ZOOM_RATIO, ANDROID_STATISTICS_FACE_DETECT_MODE,
- ANDROID_FLASH_MODE})
+ .setAvailableRequestKeys({ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_SCALER_CROP_REGION,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_CONTROL_ZOOM_RATIO,
+ ANDROID_STATISTICS_FACE_DETECT_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY})
.setAvailableResultKeys(
{ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
- ANDROID_SENSOR_TIMESTAMP, ANDROID_LENS_FOCAL_LENGTH})
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_SENSOR_TIMESTAMP,
+ ANDROID_LENS_FOCAL_LENGTH})
.setAvailableCapabilities(
{ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index 720f02e..c274dc9 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -103,20 +103,23 @@
Resolution getMaxInputResolution() const;
// Maximal number of RAW streams - virtual camera doesn't support RAW streams.
- static const int32_t kMaxNumberOfRawStreams = 0;
+ static constexpr int32_t kMaxNumberOfRawStreams = 0;
// Maximal number of non-jpeg streams configured concurrently in single
// session. This should be at least 3 and can be increased at the potential
// cost of more CPU/GPU load if there are many concurrent streams.
- static const int32_t kMaxNumberOfProcessedStreams = 3;
+ static constexpr int32_t kMaxNumberOfProcessedStreams = 3;
// Maximal number of stalling (in case of virtual camera only jpeg for now)
// streams. Can be increaed at the cost of potential cost of more GPU/CPU
// load.
- static const int32_t kMaxNumberOfStallStreams = 1;
+ static constexpr int32_t kMaxNumberOfStallStreams = 1;
// Focal length for full frame sensor.
- constexpr static const float kFocalLength = 43.0;
+ static constexpr float kFocalLength = 43.0;
+
+ // Default JPEG compression quality.
+ static constexpr uint8_t kDefaultJpegQuality = 80;
private:
std::shared_ptr<VirtualCameraDevice> sharedFromThis();
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 7bbc6ea..218e114 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -19,16 +19,21 @@
#include <chrono>
#include <cstdint>
+#include <cstring>
#include <future>
#include <memory>
#include <mutex>
#include <thread>
+#include <vector>
+#include "Exif.h"
#include "GLES/gl.h"
#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/CaptureResult.h"
#include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -42,7 +47,7 @@
#include "ui/GraphicBuffer.h"
#include "util/EglFramebuffer.h"
#include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/TestPatternHelper.h"
#include "util/Util.h"
#include "utils/Errors.h"
@@ -53,6 +58,8 @@
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::CaptureResult;
using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,6 +72,8 @@
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::android::base::ScopedLockAssertion;
+using ::android::hardware::camera::common::helper::ExifUtils;
+
namespace {
using namespace std::chrono_literals;
@@ -76,8 +85,11 @@
// documented minimum of 2.
static constexpr uint8_t kPipelineDepth = 2;
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
+
CameraMetadata createCaptureResultMetadata(
const std::chrono::nanoseconds timestamp,
+ const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
std::unique_ptr<CameraMetadata> metadata =
MetadataBuilder()
@@ -93,6 +105,10 @@
.setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
.setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
.setFocalLength(VirtualCameraDevice::kFocalLength)
+ .setJpegQuality(requestSettings.jpegQuality)
+ .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+ requestSettings.thumbnailResolution.height)
+ .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
.setPipelineDepth(kPipelineDepth)
.setSensorTimestamp(timestamp)
.build();
@@ -171,6 +187,34 @@
}
}
+std::vector<uint8_t> createExif(
+ Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
+ std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+ exifUtils->initialize();
+ exifUtils->setImageWidth(imageSize.width);
+ exifUtils->setImageHeight(imageSize.height);
+ // TODO(b/324383963) Set Make/Model and orientation.
+
+ std::vector<uint8_t> app1Data;
+
+ size_t thumbnailDataSize = compressedThumbnail.size();
+ const void* thumbnailData =
+ thumbnailDataSize > 0
+ ? reinterpret_cast<const void*>(compressedThumbnail.data())
+ : nullptr;
+
+ if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+ ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+ return app1Data;
+ }
+
+ const uint8_t* data = exifUtils->getApp1Buffer();
+ const size_t size = exifUtils->getApp1Length();
+
+ app1Data.insert(app1Data.end(), data, data + size);
+ return app1Data;
+}
+
} // namespace
CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -209,8 +253,11 @@
}
ProcessCaptureRequestTask::ProcessCaptureRequestTask(
- int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
- : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+ int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+ const RequestSettings& requestSettings)
+ : mFrameNumber(frameNumber),
+ mBuffers(requestBuffers),
+ mRequestSettings(requestSettings) {
}
int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -222,6 +269,10 @@
return mBuffers;
}
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+ return mRequestSettings;
+}
+
void VirtualCameraRenderThread::enqueueTask(
std::unique_ptr<ProcessCaptureRequestTask> task) {
std::lock_guard<std::mutex> lock(mLock);
@@ -308,8 +359,8 @@
captureResult.partialResult = 1;
captureResult.inputBuffer.streamId = -1;
captureResult.physicalCameraMetadata.resize(0);
- captureResult.result =
- createCaptureResultMetadata(timestamp, mReportedSensorSize);
+ captureResult.result = createCaptureResultMetadata(
+ timestamp, request.getRequestSettings(), mReportedSensorSize);
const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
captureResult.outputBuffers.resize(buffers.size());
@@ -338,9 +389,9 @@
}
auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence())
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ request.getRequestSettings(), reqBuffer.getFence())
: renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
reqBuffer.getBufferId(),
reqBuffer.getFence());
@@ -420,9 +471,72 @@
}
}
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+ const Resolution resolution, const int quality) {
+ if (resolution.width == 0 || resolution.height == 0) {
+ ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+ return {};
+ }
+
+ ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+ resolution.width, resolution.height, quality);
+ std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+ mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
+ if (framebuffer == nullptr) {
+ ALOGE(
+ "Failed to allocate temporary framebuffer for JPEG thumbnail "
+ "compression");
+ return {};
+ }
+
+ // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+ // doesn't correspond
+ // to input texture aspect ratio.
+ if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
+ ALOGE(
+ "Failed to render input texture into temporary framebuffer for JPEG "
+ "thumbnail");
+ return {};
+ }
+
+ std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+
+ if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // This should never happen since we're allocating the temporary buffer
+ // with YUV420 layout above.
+ ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+ gBuffer->getPixelFormat());
+ return {};
+ }
+
+ android_ycbcr ycbcr;
+ status_t status =
+ gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
+ if (status != NO_ERROR) {
+ ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
+ __func__, status);
+ return {};
+ }
+
+ std::vector<uint8_t> compressedThumbnail;
+ compressedThumbnail.resize(kJpegThumbnailBufferSize);
+ ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
+ gBuffer->getHeight());
+ std::optional<size_t> compressedSize =
+ compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), quality, ycbcr,
+ {}, compressedThumbnail.size(), compressedThumbnail.data());
+ if (!compressedSize.has_value()) {
+ ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+ return {};
+ }
+ compressedThumbnail.resize(compressedSize.value());
+ return compressedThumbnail;
+}
+
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
- const int streamId, const int bufferId, sp<Fence> fence) {
- ALOGV("%s", __func__);
+ const int streamId, const int bufferId,
+ const RequestSettings& requestSettings, sp<Fence> fence) {
std::shared_ptr<AHardwareBuffer> hwBuffer =
mSessionContext.fetchHardwareBuffer(streamId, bufferId);
if (hwBuffer == nullptr) {
@@ -437,6 +551,9 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
+ ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+ stream->width, stream->height, requestSettings.jpegQuality);
+
// Let's create YUV framebuffer and render the surface into this.
// This will take care about rescaling as well as potential format conversion.
std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
@@ -467,7 +584,7 @@
std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
- bool compressionSuccess = true;
+ std::optional<size_t> compressedSize;
if (gBuffer != nullptr) {
android_ycbcr ycbcr;
if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
@@ -488,23 +605,47 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
- compressionSuccess =
- compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
- stream->bufferSize, planes_info.planes[0].data);
+ std::vector<uint8_t> app1ExifData =
+ createExif(Resolution(stream->width, stream->height),
+ createThumbnail(requestSettings.thumbnailResolution,
+ requestSettings.thumbnailJpegQuality));
+ compressedSize = compressJpeg(
+ gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
+ ycbcr, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
+ planes_info.planes[0].data);
status_t res = gBuffer->unlock();
if (res != NO_ERROR) {
ALOGE("Failed to unlock graphic buffer: %d", res);
}
} else {
- compressionSuccess =
- compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
- planes_info.planes[0].data);
+ std::vector<uint8_t> app1ExifData =
+ createExif(Resolution(stream->width, stream->height));
+ compressedSize = compressBlackJpeg(
+ stream->width, stream->height, requestSettings.jpegQuality, app1ExifData,
+ stream->bufferSize - sizeof(CameraBlob), planes_info.planes[0].data);
}
+
+ if (!compressedSize.has_value()) {
+ ALOGE("%s: Failed to compress JPEG image", __func__);
+ AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+
+ CameraBlob cameraBlob{
+ .blobId = CameraBlobId::JPEG,
+ .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+ memcpy(reinterpret_cast<uint8_t*>(planes_info.planes[0].data) +
+ (stream->bufferSize - sizeof(cameraBlob)),
+ &cameraBlob, sizeof(cameraBlob));
+
AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
- ALOGV("Unlocked buffers");
- return compressionSuccess ? ndk::ScopedAStatus::ok()
- : cameraStatus(Status::INTERNAL_ERROR);
+
+ ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+ __func__, compressedSize.value());
+
+ return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index c8f61f4..86dad0b 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,19 +17,23 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
+#include <cstdint>
#include <deque>
#include <future>
#include <memory>
#include <thread>
+#include <vector>
#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
#include "android/binder_auto_utils.h"
#include "util/EglDisplayContext.h"
#include "util/EglFramebuffer.h"
#include "util/EglProgram.h"
#include "util/EglSurfaceTexture.h"
+#include "util/Util.h"
namespace android {
namespace companion {
@@ -50,11 +54,18 @@
const sp<Fence> mFence;
};
+struct RequestSettings {
+ int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+ Resolution thumbnailResolution = Resolution(0, 0);
+ int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+};
+
// Represents single capture request to fill set of buffers.
class ProcessCaptureRequestTask {
public:
ProcessCaptureRequestTask(
- int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers);
+ int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+ const RequestSettings& RequestSettings = {});
// Returns frame number corresponding to the request.
int getFrameNumber() const;
@@ -66,9 +77,12 @@
// so it cannot be access outside of its lifetime.
const std::vector<CaptureRequestBuffer>& getBuffers() const;
+ const RequestSettings& getRequestSettings() const;
+
private:
const int mFrameNumber;
const std::vector<CaptureRequestBuffer> mBuffers;
+ const RequestSettings mRequestSettings;
};
// Wraps dedicated rendering thread and rendering business with corresponding
@@ -123,13 +137,19 @@
// TODO(b/301023410) - Refactor the actual rendering logic off this class for
// easier testability.
+ // Create thumbnail with specified size for current image.
+ // The compressed image size is limited by 32KiB.
+ // Returns vector with compressed thumbnail if successful,
+ // empty vector otherwise.
+ std::vector<uint8_t> createThumbnail(Resolution resolution, int quality);
+
// Render current image to the BLOB buffer.
// If fence is specified, this function will block until the fence is cleared
// before writing to the buffer.
// Always called on render thread.
- ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
- const int bufferId,
- sp<Fence> fence = nullptr);
+ ndk::ScopedAStatus renderIntoBlobStreamBuffer(
+ const int streamId, const int bufferId,
+ const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
// Render current image to the YCbCr buffer.
// If fence is specified, this function will block until the fence is cleared
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index d1ec763..dfa71f3 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -42,6 +42,7 @@
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferCache.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/CaptureRequest.h"
#include "aidl/android/hardware/camera/device/HalStream.h"
#include "aidl/android/hardware/camera/device/NotifyMsg.h"
@@ -61,7 +62,7 @@
#include "util/EglFramebuffer.h"
#include "util/EglProgram.h"
#include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/TestPatternHelper.h"
#include "util/Util.h"
@@ -100,10 +101,16 @@
// Size of request/result metadata fast message queue.
// Setting to 0 to always disables FMQ.
-static constexpr size_t kMetadataMsgQueueSize = 0;
+constexpr size_t kMetadataMsgQueueSize = 0;
// Maximum number of buffers to use per single stream.
-static constexpr size_t kMaxStreamBuffers = 2;
+constexpr size_t kMaxStreamBuffers = 2;
+
+constexpr int32_t kDefaultJpegQuality = 80;
+constexpr int32_t kDefaultJpegThumbnailQuality = 70;
+
+// Thumbnail size (0,0) correspods to disabling thumbnail.
+const Resolution kDefaultJpegThumbnailSize(0, 0);
camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
const RequestTemplate type) {
@@ -150,6 +157,9 @@
.setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
.setFlashMode(ANDROID_FLASH_MODE_OFF)
.setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+ .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
+ .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
+ .setJpegThumbnailSize(0, 0)
.build();
if (metadata == nullptr) {
ALOGE("%s: Failed to construct metadata for default request type %s",
@@ -191,6 +201,16 @@
}));
}
+RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
+ return RequestSettings{
+ .jpegQuality = getJpegQuality(metadata).value_or(
+ VirtualCameraDevice::kDefaultJpegQuality),
+ .thumbnailResolution =
+ getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
+ .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
+ VirtualCameraDevice::kDefaultJpegQuality)};
+}
+
} // namespace
VirtualCameraSession::VirtualCameraSession(
@@ -298,7 +318,6 @@
inputWidth, inputHeight, Format::YUV_420_888);
}
- mFirstRequest.store(true);
return ndk::ScopedAStatus::ok();
}
@@ -440,13 +459,25 @@
const CaptureRequest& request) {
ALOGD("%s: request: %s", __func__, request.toString().c_str());
- if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
- return cameraStatus(Status::ILLEGAL_ARGUMENT);
- }
-
std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
+ RequestSettings requestSettings;
{
std::lock_guard<std::mutex> lock(mLock);
+
+ // If metadata it empty, last received metadata applies, if it's non-empty
+ // update it.
+ if (!request.settings.metadata.empty()) {
+ mCurrentRequestMetadata = request.settings;
+ }
+
+ // We don't have any metadata for this request - this means we received none
+ // in first request, this is an error state.
+ if (mCurrentRequestMetadata.metadata.empty()) {
+ return cameraStatus(Status::ILLEGAL_ARGUMENT);
+ }
+
+ requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
+
cameraCallback = mCameraDeviceCallback;
}
@@ -480,7 +511,7 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
- request.frameNumber, taskBuffers));
+ request.frameNumber, taskBuffers, requestSettings));
}
if (mVirtualCameraClientCallback != nullptr) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 82a7a34..556314f 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -25,6 +25,7 @@
#include "VirtualCameraSessionContext.h"
#include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
#include "aidl/android/hardware/camera/device/BnCameraDeviceSession.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
#include "utils/Mutex.h"
@@ -138,7 +139,8 @@
int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
- std::atomic_bool mFirstRequest{true};
+ aidl::android::hardware::camera::device::CameraMetadata mCurrentRequestMetadata
+ GUARDED_BY(mLock);
std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
};
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 35bf752..9146d8a 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -30,6 +30,8 @@
#include "gtest/gtest.h"
#include "log/log_main.h"
#include "system/camera_metadata.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
#include "utils/Errors.h"
namespace android {
@@ -47,6 +49,7 @@
using ::aidl::android::hardware::camera::device::StreamConfiguration;
using ::aidl::android::hardware::camera::device::StreamType;
using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::testing::ElementsAre;
using ::testing::UnorderedElementsAreArray;
using metadata_stream_t =
camera_metadata_enum_android_scaler_available_stream_configurations_t;
@@ -306,6 +309,16 @@
EXPECT_FALSE(aidl_ret);
}
+TEST_F(VirtualCameraDeviceTest, thumbnailSizeWithCompatibleAspectRatio) {
+ CameraMetadata metadata;
+ ASSERT_TRUE(mCamera->getCameraCharacteristics(&metadata).isOk());
+
+ // Camera is configured with VGA input, we expect 240 x 180 thumbnail size in
+ // characteristics, since it has same aspect ratio.
+ EXPECT_THAT(getJpegAvailableThumbnailSizes(metadata),
+ ElementsAre(Resolution(0, 0), Resolution(240, 180)));
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 446c679..1af8b80 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -30,7 +30,7 @@
#include "android/binder_interface_utils.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
namespace android {
namespace companion {
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 2b19c13..98f2448 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -19,7 +19,7 @@
#include <cstddef>
#include <cstdint>
-#include <memory>
+#include <optional>
#include <vector>
#include "android/hardware_buffer.h"
@@ -34,11 +34,9 @@
namespace virtualcamera {
namespace {
-constexpr int kJpegQuality = 80;
-
class LibJpegContext {
public:
- LibJpegContext(int width, int height, const size_t outBufferSize,
+ LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
void* outBuffer)
: mWidth(width),
mHeight(height),
@@ -76,7 +74,7 @@
jpeg_set_defaults(&mCompressStruct);
// Set quality and colorspace.
- jpeg_set_quality(&mCompressStruct, kJpegQuality, 1);
+ jpeg_set_quality(&mCompressStruct, quality, 1);
jpeg_set_colorspace(&mCompressStruct, JCS_YCbCr);
// Configure RAW input mode - this let's libjpeg know we're providing raw,
@@ -94,11 +92,31 @@
mCompressStruct.comp_info[2].v_samp_factor = 1;
}
- bool compress(const android_ycbcr& ycbr) {
+ LibJpegContext& setApp1Data(const uint8_t* app1Data, const size_t size) {
+ mApp1Data = app1Data;
+ mApp1DataSize = size;
+ return *this;
+ }
+
+ std::optional<size_t> compress(const android_ycbcr& ycbr) {
+ // TODO(b/301023410) - Add support for compressing image sizes not aligned
+ // with DCT size.
+ if (mWidth % (2 * DCTSIZE) || (mHeight % (2 * DCTSIZE))) {
+ ALOGE(
+ "%s: Compressing YUV420 image with size %dx%d not aligned with 2 * "
+ "DCTSIZE (%d) is not currently supported.",
+ __func__, mWidth, mHeight, 2 * DCTSIZE);
+ return std::nullopt;
+ }
+
+ // Chroma planes have 1/2 resolution of the original image.
+ const int cHeight = mHeight / 2;
+ const int cWidth = mWidth / 2;
+
// Prepare arrays of pointers to scanlines of each plane.
std::vector<JSAMPROW> yLines(mHeight);
- std::vector<JSAMPROW> cbLines(mHeight / 2);
- std::vector<JSAMPROW> crLines(mHeight / 2);
+ std::vector<JSAMPROW> cbLines(cHeight);
+ std::vector<JSAMPROW> crLines(cHeight);
uint8_t* y = static_cast<uint8_t*>(ycbr.y);
uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
@@ -107,23 +125,27 @@
// Since UV samples might be interleaved (semiplanar) we need to copy
// them to separate planes, since libjpeg doesn't directly
// support processing semiplanar YUV.
- const int c_samples = (mWidth / 2) * (mHeight / 2);
- std::vector<uint8_t> cb_plane(c_samples);
- std::vector<uint8_t> cr_plane(c_samples);
+ const int cSamples = cWidth * cHeight;
+ std::vector<uint8_t> cb_plane(cSamples);
+ std::vector<uint8_t> cr_plane(cSamples);
// TODO(b/301023410) - Use libyuv or ARM SIMD for "unzipping" the data.
- for (int i = 0; i < c_samples; ++i) {
- cb_plane[i] = *cb;
- cr_plane[i] = *cr;
- cb += ycbr.chroma_step;
- cr += ycbr.chroma_step;
+ int out_idx = 0;
+ for (int i = 0; i < cHeight; ++i) {
+ for (int j = 0; j < cWidth; ++j) {
+ cb_plane[out_idx] = cb[j * ycbr.chroma_step];
+ cr_plane[out_idx] = cr[j * ycbr.chroma_step];
+ out_idx++;
+ }
+ cb += ycbr.cstride;
+ cr += ycbr.cstride;
}
// Collect pointers to individual scanline of each plane.
for (int i = 0; i < mHeight; ++i) {
yLines[i] = y + i * ycbr.ystride;
}
- for (int i = 0; i < (mHeight / 2); ++i) {
+ for (int i = 0; i < cHeight; ++i) {
cbLines[i] = cb_plane.data() + i * (mWidth / 2);
crLines[i] = cr_plane.data() + i * (mWidth / 2);
}
@@ -131,7 +153,7 @@
return compress(yLines, cbLines, crLines);
}
- bool compressBlackImage() {
+ std::optional<size_t> compressBlackImage() {
// We only really need to prepare one scanline for Y and one shared scanline
// for Cb & Cr.
std::vector<uint8_t> yLine(mWidth, 0);
@@ -165,11 +187,18 @@
// Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
// each vector needs to correspond to height of corresponding plane.
//
- // Returns true if compression is successful, false otherwise.
- bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
- std::vector<JSAMPROW>& crLines) {
+ // Returns size of compressed image in bytes on success, empty optional otherwise.
+ std::optional<size_t> compress(std::vector<JSAMPROW>& yLines,
+ std::vector<JSAMPROW>& cbLines,
+ std::vector<JSAMPROW>& crLines) {
jpeg_start_compress(&mCompressStruct, TRUE);
+ if (mApp1Data != nullptr && mApp1DataSize > 0) {
+ ALOGV("%s: Writing exif, size %zu B", __func__, mApp1DataSize);
+ jpeg_write_marker(&mCompressStruct, JPEG_APP0 + 1,
+ static_cast<const JOCTET*>(mApp1Data), mApp1DataSize);
+ }
+
while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
const uint32_t batchSize = DCTSIZE * 2;
const uint32_t nl = mCompressStruct.next_scanline;
@@ -181,11 +210,11 @@
ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
__FUNCTION__, done, batchSize, mCompressStruct.next_scanline,
mCompressStruct.image_height);
- return false;
+ return std::nullopt;
}
}
jpeg_finish_compress(&mCompressStruct);
- return mSuccess;
+ return mEncodedSize;
}
// === libjpeg callbacks below ===
@@ -217,6 +246,10 @@
jpeg_error_mgr mErrorMgr;
jpeg_destination_mgr mDestinationMgr;
+ // APP1 data.
+ const uint8_t* mApp1Data = nullptr;
+ size_t mApp1DataSize = 0;
+
// Dimensions of the input image.
int mWidth;
int mHeight;
@@ -235,15 +268,26 @@
} // namespace
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
- size_t outBufferSize, void* outBuffer) {
- return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+std::optional<size_t> compressJpeg(const int width, const int height,
+ const int quality, const android_ycbcr& ycbcr,
+ const std::vector<uint8_t>& app1ExifData,
+ size_t outBufferSize, void* outBuffer) {
+ LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+ if (!app1ExifData.empty()) {
+ context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+ }
+ return context.compress(ycbcr);
}
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
- void* outBuffer) {
- return LibJpegContext(width, height, outBufferSize, outBuffer)
- .compressBlackImage();
+std::optional<size_t> compressBlackJpeg(const int width, const int height,
+ const int quality,
+ const std::vector<uint8_t>& app1ExifData,
+ size_t outBufferSize, void* outBuffer) {
+ LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+ if (!app1ExifData.empty()) {
+ context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+ }
+ return context.compressBlackImage();
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index c44d0a8..e64fb4f 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -18,6 +18,7 @@
#define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
#include <memory>
+#include <optional>
#include "android/hardware_buffer.h"
#include "system/graphics.h"
@@ -27,14 +28,34 @@
namespace virtualcamera {
// Jpeg-compress image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
- size_t outBufferSize, void* outBuffer);
+// * width - width of the image
+// * heigh - height of the image
+// * quality - 0-100, higher number corresponds to higher quality.
+// * ycbr - android_ycbr structure describing layout of input YUV420 image.
+// * app1ExifData - vector containing data to be included in APP1
+// segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressJpeg(int width, int height, int quality,
+ const android_ycbcr& ycbcr,
+ const std::vector<uint8_t>& app1ExifData,
+ size_t outBufferSize, void* outBuffer);
// Jpeg-compress all-black image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
- void* outBuffer);
+// * width - width of the image
+// * heigh - height of the image
+// * quality - 0-100, higher number corresponds to higher quality.
+// * app1ExifData - vector containing data to be included in APP1
+// segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressBlackJpeg(int width, int height, int quality,
+ const std::vector<uint8_t>& app1ExifData,
+ size_t outBufferSize, void* outBuffer);
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
similarity index 88%
rename from services/camera/virtualcamera/util/MetadataBuilder.cc
rename to services/camera/virtualcamera/util/MetadataUtil.cc
index 2bbd58c..d223f17 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -15,9 +15,9 @@
*/
// #define LOG_NDEBUG 0
-#define LOG_TAG "MetadataBuilder"
+#define LOG_TAG "MetadataUtil"
-#include "MetadataBuilder.h"
+#include "MetadataUtil.h"
#include <algorithm>
#include <cstdint>
@@ -396,6 +396,22 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
+ mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailSize(const int width,
+ const int height) {
+ mEntryMap[ANDROID_JPEG_THUMBNAIL_SIZE] = std::vector<int32_t>({width, height});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailQuality(const uint8_t quality) {
+ mEntryMap[ANDROID_JPEG_THUMBNAIL_QUALITY] = asVectorOf<uint8_t>(quality);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
const int32_t maxRawStreams, const int32_t maxProcessedStreams,
const int32_t maxStallStreams) {
@@ -597,6 +613,67 @@
return aidlMetadata;
}
+std::optional<int32_t> getJpegQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_QUALITY, &entry) !=
+ OK) {
+ return std::nullopt;
+ }
+
+ return *entry.data.i32;
+}
+
+std::optional<Resolution> getJpegThumbnailSize(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_SIZE,
+ &entry) != OK) {
+ return std::nullopt;
+ }
+
+ return Resolution(entry.data.i32[0], entry.data.i32[1]);
+}
+
+std::optional<int32_t> getJpegThumbnailQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_QUALITY,
+ &entry) != OK) {
+ return std::nullopt;
+ }
+
+ return *entry.data.i32;
+}
+
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, &entry) != OK) {
+ return {};
+ }
+
+ std::vector<Resolution> thumbnailSizes;
+ thumbnailSizes.reserve(entry.count / 2);
+ for (int i = 0; i < entry.count; i += 2) {
+ thumbnailSizes.emplace_back(entry.data.i32[i], entry.data.i32[i + 1]);
+ }
+ return thumbnailSizes;
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataUtil.h
similarity index 90%
rename from services/camera/virtualcamera/util/MetadataBuilder.h
rename to services/camera/virtualcamera/util/MetadataUtil.h
index df99089..9ddfd81 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
#include <chrono>
#include <cstdint>
@@ -98,7 +98,8 @@
sensorReadoutTimestamp);
// See ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableFocalLengths(const std::vector<float>& focalLengths);
+ MetadataBuilder& setAvailableFocalLengths(
+ const std::vector<float>& focalLengths);
// See ANDROID_LENS_FOCAL_LENGTH in CameraMetadataTag.aidl.
MetadataBuilder& setFocalLength(float focalLength);
@@ -278,6 +279,15 @@
MetadataBuilder& setJpegAvailableThumbnailSizes(
const std::vector<Resolution>& thumbnailSizes);
+ // See JPEG_QUALITY in CaptureRequest.java.
+ MetadataBuilder& setJpegQuality(uint8_t quality);
+
+ // See JPEG_THUMBNAIL_SIZE in CaptureRequest.java.
+ MetadataBuilder& setJpegThumbnailSize(int width, int height);
+
+ // See JPEG_THUMBNAIL_QUALITY in CaptureRequest.java.
+ MetadataBuilder& setJpegThumbnailQuality(uint8_t quality);
+
// The maximum numbers of different types of output streams
// that can be configured and used simultaneously by a camera device.
//
@@ -353,8 +363,25 @@
bool mExtendWithAvailableCharacteristicsKeys = false;
};
+// Returns JPEG_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
+std::optional<Resolution> getJpegThumbnailSize(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegThumbnailQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_AVAILABLE_THUMBNAIL_SIZES from metadata, or nullopt if the key
+// is not present.
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
-#endif // ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
+#endif // ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 2d0545d..ef986a6 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -79,6 +79,10 @@
return true;
}
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution) {
+ return os << resolution.width << "x" << resolution.height;
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index 9f81bb1..15f7969 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -54,6 +54,7 @@
// Representation of resolution / size.
struct Resolution {
+ Resolution() = default;
Resolution(const int w, const int h) : width(w), height(h) {
}
@@ -69,10 +70,12 @@
return width == other.width && height == other.height;
}
- const int width;
- const int height;
+ int width = 0;
+ int height = 0;
};
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android