Support RGBA input buffers.
This change adds separate EGL shader for RGBA->YUV conversion
and modifies the JPEG compression to render input texture
into temporary framebuffer (doing the compression if necessary).
Bug: 301023410
Test: atest virtual_camera_tests
Test: atest VirtualCameraTest
Change-Id: Id3bd19d4c364691e2b1554fcf78d5f9940754314
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 1f61e34..79c91ef 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -19,6 +19,7 @@
#include <chrono>
#include <cstddef>
+#include <cstdint>
#include <future>
#include <memory>
#include <mutex>
@@ -38,6 +39,7 @@
#include "android-base/thread_annotations.h"
#include "android/binder_auto_utils.h"
#include "android/hardware_buffer.h"
+#include "ui/GraphicBuffer.h"
#include "util/EglFramebuffer.h"
#include "util/JpegUtil.h"
#include "util/MetadataBuilder.h"
@@ -109,6 +111,45 @@
return msg;
}
+std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
+ EGLDisplay eglDisplay, const uint width, const int height) {
+ const AHardwareBuffer_Desc desc{
+ .width = static_cast<uint32_t>(width),
+ .height = static_cast<uint32_t>(height),
+ .layers = 1,
+ .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+ .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
+ AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
+ .rfu0 = 0,
+ .rfu1 = 0};
+
+ AHardwareBuffer* hwBufferPtr;
+ int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
+ if (status != NO_ERROR) {
+ ALOGE(
+ "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
+ __func__, status);
+ return nullptr;
+ }
+
+ return std::make_shared<EglFrameBuffer>(
+ eglDisplay,
+ std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
+}
+
+bool isYuvFormat(const PixelFormat pixelFormat) {
+ switch (static_cast<android_pixel_format_t>(pixelFormat)) {
+ case HAL_PIXEL_FORMAT_YCBCR_422_I:
+ case HAL_PIXEL_FORMAT_YCBCR_422_SP:
+ case HAL_PIXEL_FORMAT_Y16:
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YCBCR_420_888:
+ return true;
+ default:
+ return false;
+ }
+}
+
} // namespace
CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -218,7 +259,10 @@
ALOGV("Render thread starting");
mEglDisplayContext = std::make_unique<EglDisplayContext>();
- mEglTextureProgram = std::make_unique<EglTextureProgram>();
+ mEglTextureYuvProgram =
+ std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
+ mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
+ EglTextureProgram::TextureFormat::RGBA);
mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
mInputSurfaceHeight);
mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
@@ -371,6 +415,22 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
+ // Let's create YUV framebuffer and render the surface into this.
+ // This will take care about rescaling as well as potential format conversion.
+ std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+ mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
+ if (framebuffer == nullptr) {
+ ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+
+ // Render into temporary framebuffer.
+ ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
+ if (!status.isOk()) {
+ ALOGE("Failed to render input texture into temporary framebuffer");
+ return status;
+ }
+
AHardwareBuffer_Planes planes_info;
int32_t rawFence = fence != nullptr ? fence->get() : -1;
@@ -382,11 +442,15 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
- sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
+ std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+
bool compressionSuccess = true;
if (gBuffer != nullptr) {
android_ycbcr ycbcr;
if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // This should never happen since we're allocating the temporary buffer
+ // with YUV420 layout above.
ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
gBuffer->getPixelFormat());
AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
@@ -441,31 +505,7 @@
return cameraStatus(Status::ILLEGAL_ARGUMENT);
}
- // Wait for fence to clear.
- if (fence != nullptr && fence->isValid()) {
- status_t ret = fence->wait(kAcquireFenceTimeout.count());
- if (ret != 0) {
- ALOGE(
- "Timeout while waiting for the acquire fence for buffer %d"
- " for streamId %d",
- bufferId, streamId);
- return cameraStatus(Status::INTERNAL_ERROR);
- }
- }
-
- mEglDisplayContext->makeCurrent();
- framebuffer->beforeDraw();
-
- if (mEglSurfaceTexture->getCurrentBuffer() == nullptr) {
- // If there's no current buffer, nothing was written to the surface and
- // texture is not initialized yet. Let's render the framebuffer black
- // instead of rendering the texture.
- glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
- glClear(GL_COLOR_BUFFER_BIT);
- } else {
- mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
- }
- framebuffer->afterDraw();
+ ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
const std::chrono::nanoseconds after =
std::chrono::duration_cast<std::chrono::nanoseconds>(
@@ -477,6 +517,43 @@
return ndk::ScopedAStatus::ok();
}
+ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
+ EglFrameBuffer& framebuffer, sp<Fence> fence) {
+ ALOGV("%s", __func__);
+ // Wait for fence to clear.
+ if (fence != nullptr && fence->isValid()) {
+ status_t ret = fence->wait(kAcquireFenceTimeout.count());
+ if (ret != 0) {
+ ALOGE("Timeout while waiting for the acquire fence for buffer");
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+ }
+
+ mEglDisplayContext->makeCurrent();
+ framebuffer.beforeDraw();
+
+ sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
+ if (textureBuffer == nullptr) {
+ // If there's no current buffer, nothing was written to the surface and
+ // texture is not initialized yet. Let's render the framebuffer black
+ // instead of rendering the texture.
+ glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
+ } else {
+ const bool renderSuccess =
+ isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
+ ? mEglTextureYuvProgram->draw(mEglSurfaceTexture->updateTexture())
+ : mEglTextureRgbProgram->draw(mEglSurfaceTexture->updateTexture());
+ if (!renderSuccess) {
+ ALOGE("%s: Failed to render texture", __func__);
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+ }
+ framebuffer.afterDraw();
+
+ return ndk::ScopedAStatus::ok();
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android