Add support for Exif and JPEG thumbnails
Also:
* This fixes a bug with jpeg compression when the color plane stride
doesn't match image width
* This fixes missing marker at the end of BLOB buffer.
Bug: 324383963
Test: atest virtual_camera_tests
Test: atest VirtualCameraTest
Test: camera cts
Test: Manually examining captured image with exiftool
Change-Id: I4db804b264a98b8873b5465d4d8d0b8477e9d9a5
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 7bbc6ea..218e114 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -19,16 +19,21 @@
#include <chrono>
#include <cstdint>
+#include <cstring>
#include <future>
#include <memory>
#include <mutex>
#include <thread>
+#include <vector>
+#include "Exif.h"
#include "GLES/gl.h"
#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/CaptureResult.h"
#include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -42,7 +47,7 @@
#include "ui/GraphicBuffer.h"
#include "util/EglFramebuffer.h"
#include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/TestPatternHelper.h"
#include "util/Util.h"
#include "utils/Errors.h"
@@ -53,6 +58,8 @@
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::CaptureResult;
using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,6 +72,8 @@
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::android::base::ScopedLockAssertion;
+using ::android::hardware::camera::common::helper::ExifUtils;
+
namespace {
using namespace std::chrono_literals;
@@ -76,8 +85,11 @@
// documented minimum of 2.
static constexpr uint8_t kPipelineDepth = 2;
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
+
CameraMetadata createCaptureResultMetadata(
const std::chrono::nanoseconds timestamp,
+ const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
std::unique_ptr<CameraMetadata> metadata =
MetadataBuilder()
@@ -93,6 +105,10 @@
.setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
.setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
.setFocalLength(VirtualCameraDevice::kFocalLength)
+ .setJpegQuality(requestSettings.jpegQuality)
+ .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+ requestSettings.thumbnailResolution.height)
+ .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
.setPipelineDepth(kPipelineDepth)
.setSensorTimestamp(timestamp)
.build();
@@ -171,6 +187,34 @@
}
}
+std::vector<uint8_t> createExif(
+ Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
+ std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+ exifUtils->initialize();
+ exifUtils->setImageWidth(imageSize.width);
+ exifUtils->setImageHeight(imageSize.height);
+ // TODO(b/324383963) Set Make/Model and orientation.
+
+ std::vector<uint8_t> app1Data;
+
+ size_t thumbnailDataSize = compressedThumbnail.size();
+ const void* thumbnailData =
+ thumbnailDataSize > 0
+ ? reinterpret_cast<const void*>(compressedThumbnail.data())
+ : nullptr;
+
+ if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+ ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+ return app1Data;
+ }
+
+ const uint8_t* data = exifUtils->getApp1Buffer();
+ const size_t size = exifUtils->getApp1Length();
+
+ app1Data.insert(app1Data.end(), data, data + size);
+ return app1Data;
+}
+
} // namespace
CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -209,8 +253,11 @@
}
ProcessCaptureRequestTask::ProcessCaptureRequestTask(
- int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
- : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+ int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+ const RequestSettings& requestSettings)
+ : mFrameNumber(frameNumber),
+ mBuffers(requestBuffers),
+ mRequestSettings(requestSettings) {
}
int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -222,6 +269,10 @@
return mBuffers;
}
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+ return mRequestSettings;
+}
+
void VirtualCameraRenderThread::enqueueTask(
std::unique_ptr<ProcessCaptureRequestTask> task) {
std::lock_guard<std::mutex> lock(mLock);
@@ -308,8 +359,8 @@
captureResult.partialResult = 1;
captureResult.inputBuffer.streamId = -1;
captureResult.physicalCameraMetadata.resize(0);
- captureResult.result =
- createCaptureResultMetadata(timestamp, mReportedSensorSize);
+ captureResult.result = createCaptureResultMetadata(
+ timestamp, request.getRequestSettings(), mReportedSensorSize);
const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
captureResult.outputBuffers.resize(buffers.size());
@@ -338,9 +389,9 @@
}
auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence())
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ request.getRequestSettings(), reqBuffer.getFence())
: renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
reqBuffer.getBufferId(),
reqBuffer.getFence());
@@ -420,9 +471,72 @@
}
}
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+ const Resolution resolution, const int quality) {
+ if (resolution.width == 0 || resolution.height == 0) {
+ ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+ return {};
+ }
+
+ ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+ resolution.width, resolution.height, quality);
+ std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+ mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
+ if (framebuffer == nullptr) {
+ ALOGE(
+ "Failed to allocate temporary framebuffer for JPEG thumbnail "
+ "compression");
+ return {};
+ }
+
+ // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+ // doesn't correspond
+ // to input texture aspect ratio.
+ if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
+ ALOGE(
+ "Failed to render input texture into temporary framebuffer for JPEG "
+ "thumbnail");
+ return {};
+ }
+
+ std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+
+ if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // This should never happen since we're allocating the temporary buffer
+ // with YUV420 layout above.
+ ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+ gBuffer->getPixelFormat());
+ return {};
+ }
+
+ android_ycbcr ycbcr;
+ status_t status =
+ gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
+ if (status != NO_ERROR) {
+ ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
+ __func__, status);
+ return {};
+ }
+
+ std::vector<uint8_t> compressedThumbnail;
+ compressedThumbnail.resize(kJpegThumbnailBufferSize);
+ ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
+ gBuffer->getHeight());
+ std::optional<size_t> compressedSize =
+ compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), quality, ycbcr,
+ {}, compressedThumbnail.size(), compressedThumbnail.data());
+ if (!compressedSize.has_value()) {
+ ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+ return {};
+ }
+ compressedThumbnail.resize(compressedSize.value());
+ return compressedThumbnail;
+}
+
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
- const int streamId, const int bufferId, sp<Fence> fence) {
- ALOGV("%s", __func__);
+ const int streamId, const int bufferId,
+ const RequestSettings& requestSettings, sp<Fence> fence) {
std::shared_ptr<AHardwareBuffer> hwBuffer =
mSessionContext.fetchHardwareBuffer(streamId, bufferId);
if (hwBuffer == nullptr) {
@@ -437,6 +551,9 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
+ ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+ stream->width, stream->height, requestSettings.jpegQuality);
+
// Let's create YUV framebuffer and render the surface into this.
// This will take care about rescaling as well as potential format conversion.
std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
@@ -467,7 +584,7 @@
std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
- bool compressionSuccess = true;
+ std::optional<size_t> compressedSize;
if (gBuffer != nullptr) {
android_ycbcr ycbcr;
if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
@@ -488,23 +605,47 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
- compressionSuccess =
- compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
- stream->bufferSize, planes_info.planes[0].data);
+ std::vector<uint8_t> app1ExifData =
+ createExif(Resolution(stream->width, stream->height),
+ createThumbnail(requestSettings.thumbnailResolution,
+ requestSettings.thumbnailJpegQuality));
+ compressedSize = compressJpeg(
+ gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
+ ycbcr, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
+ planes_info.planes[0].data);
status_t res = gBuffer->unlock();
if (res != NO_ERROR) {
ALOGE("Failed to unlock graphic buffer: %d", res);
}
} else {
- compressionSuccess =
- compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
- planes_info.planes[0].data);
+ std::vector<uint8_t> app1ExifData =
+ createExif(Resolution(stream->width, stream->height));
+ compressedSize = compressBlackJpeg(
+ stream->width, stream->height, requestSettings.jpegQuality, app1ExifData,
+ stream->bufferSize - sizeof(CameraBlob), planes_info.planes[0].data);
}
+
+ if (!compressedSize.has_value()) {
+ ALOGE("%s: Failed to compress JPEG image", __func__);
+ AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+
+ CameraBlob cameraBlob{
+ .blobId = CameraBlobId::JPEG,
+ .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+ memcpy(reinterpret_cast<uint8_t*>(planes_info.planes[0].data) +
+ (stream->bufferSize - sizeof(cameraBlob)),
+ &cameraBlob, sizeof(cameraBlob));
+
AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
- ALOGV("Unlocked buffers");
- return compressionSuccess ? ndk::ScopedAStatus::ok()
- : cameraStatus(Status::INTERNAL_ERROR);
+
+ ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+ __func__, compressedSize.value());
+
+ return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(