Do not write into EglSurfaceTexture after creation
... beacuse it prevents other clients from connecting.
Instead, render/compress black image into output buffers
directly if the input surface texture doesn't have any
buffer yet.
Bug: 301023410
Test: OpenCamera
Test: atest virtual_camera_tests
Change-Id: I289ff00b590c9bc18052ae0cc15a9d7320b8f033
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 8a2db1c..8621160 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -24,6 +24,7 @@
#include <mutex>
#include <thread>
+#include "GLES/gl.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
@@ -271,9 +272,9 @@
}
auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(
- reqBuffer.getStreamId(), reqBuffer.getBufferId(),
- streamConfig->bufferSize, reqBuffer.getFence())
+ ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
+ reqBuffer.getBufferId(),
+ reqBuffer.getFence())
: renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
reqBuffer.getBufferId(),
reqBuffer.getFence());
@@ -354,17 +355,21 @@
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
- const int streamId, const int bufferId, const size_t bufferSize,
- sp<Fence> fence) {
+ const int streamId, const int bufferId, sp<Fence> fence) {
ALOGV("%s", __func__);
- sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
- if (gBuffer == nullptr) {
- // Most probably nothing was yet written to input surface if we reached this.
- ALOGE("%s: Cannot fetch most recent buffer from SurfaceTexture", __func__);
- return cameraStatus(Status::INTERNAL_ERROR);
- }
std::shared_ptr<AHardwareBuffer> hwBuffer =
mSessionContext.fetchHardwareBuffer(streamId, bufferId);
+ if (hwBuffer == nullptr) {
+ ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
+ bufferId, streamId);
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+
+ std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
+ if (!stream.has_value()) {
+ ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
AHardwareBuffer_Planes planes_info;
@@ -377,27 +382,36 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
- android_ycbcr ycbcr;
- status_t status =
- gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
- ALOGV("Locked buffers");
- if (status != NO_ERROR) {
- AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
- ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
- return cameraStatus(Status::INTERNAL_ERROR);
- }
+ sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
+ bool compressionSuccess = true;
+ if (gBuffer != nullptr) {
+ android_ycbcr ycbcr;
+ status_t status =
+ gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
+ ALOGV("Locked buffers");
+ if (status != NO_ERROR) {
+ AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
+ ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
- bool success = compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
- bufferSize, planes_info.planes[0].data);
+ compressionSuccess =
+ compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
+ stream->bufferSize, planes_info.planes[0].data);
- status_t res = gBuffer->unlock();
- if (res != NO_ERROR) {
- ALOGE("Failed to unlock graphic buffer: %d", res);
+ status_t res = gBuffer->unlock();
+ if (res != NO_ERROR) {
+ ALOGE("Failed to unlock graphic buffer: %d", res);
+ }
+ } else {
+ compressionSuccess =
+ compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
+ planes_info.planes[0].data);
}
AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
ALOGV("Unlocked buffers");
- return success ? ndk::ScopedAStatus::ok()
- : cameraStatus(Status::INTERNAL_ERROR);
+ return compressionSuccess ? ndk::ScopedAStatus::ok()
+ : cameraStatus(Status::INTERNAL_ERROR);
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
@@ -435,7 +449,15 @@
mEglDisplayContext->makeCurrent();
framebuffer->beforeDraw();
- mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
+ if (mEglSurfaceTexture->getCurrentBuffer() == nullptr) {
+ // If there's no current buffer, nothing was written to the surface and
+ // texture is not initialized yet. Let's render the framebuffer black
+ // instead of rendering the texture.
+ glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
+ } else {
+ mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
+ }
framebuffer->afterDraw();
const std::chrono::nanoseconds after =
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 30de7c2..a4374e6 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -126,7 +126,6 @@
// Always called on render thread.
ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
const int bufferId,
- const size_t bufferSize,
sp<Fence> fence = nullptr);
// Render current image to the YCbCr buffer.
diff --git a/services/camera/virtualcamera/tests/EglUtilTest.cc b/services/camera/virtualcamera/tests/EglUtilTest.cc
index d387ebf..d0b7218 100644
--- a/services/camera/virtualcamera/tests/EglUtilTest.cc
+++ b/services/camera/virtualcamera/tests/EglUtilTest.cc
@@ -31,28 +31,12 @@
namespace virtualcamera {
namespace {
-using ::testing::Eq;
-using ::testing::NotNull;
+using ::testing::IsNull;
constexpr int kWidth = 64;
constexpr int kHeight = 64;
constexpr char kGlExtYuvTarget[] = "GL_EXT_YUV_target";
-uint8_t getY(const android_ycbcr& ycbcr, const int x, const int y) {
- uint8_t* yPtr = reinterpret_cast<uint8_t*>(ycbcr.y);
- return *(yPtr + ycbcr.ystride * y + x);
-}
-
-uint8_t getCb(const android_ycbcr& ycbcr, const int x, const int y) {
- uint8_t* cbPtr = reinterpret_cast<uint8_t*>(ycbcr.cb);
- return *(cbPtr + ycbcr.cstride * (y / 2) + (x / 2) * ycbcr.chroma_step);
-}
-
-uint8_t getCr(const android_ycbcr& ycbcr, const int x, const int y) {
- uint8_t* crPtr = reinterpret_cast<uint8_t*>(ycbcr.cr);
- return *(crPtr + ycbcr.cstride * (y / 2) + (x / 2) * ycbcr.chroma_step);
-}
-
TEST(EglDisplayContextTest, SuccessfulInitialization) {
EglDisplayContext displayContext;
@@ -88,7 +72,7 @@
EXPECT_TRUE(eglTextureProgram.isInitialized());
}
-TEST_F(EglTest, EglSurfaceTextureBlackAfterInit) {
+TEST_F(EglTest, EglSurfaceCurrentBufferNullAfterInit) {
if (!isGlExtensionSupported(kGlExtYuvTarget)) {
GTEST_SKIP() << "Skipping test because of missing required GL extension " << kGlExtYuvTarget;
}
@@ -97,24 +81,7 @@
surfaceTexture.updateTexture();
sp<GraphicBuffer> buffer = surfaceTexture.getCurrentBuffer();
- ASSERT_THAT(buffer, NotNull());
- const int width = buffer->getWidth();
- const int height = buffer->getHeight();
- ASSERT_THAT(width, Eq(kWidth));
- ASSERT_THAT(height, Eq(kHeight));
-
- android_ycbcr ycbcr;
- status_t ret = buffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
- ASSERT_THAT(ret, Eq(NO_ERROR));
- for (int i = 0; i < width; ++i) {
- for (int j = 0; j < height; ++j) {
- EXPECT_THAT(getY(ycbcr, i, j), Eq(0x00));
- EXPECT_THAT(getCb(ycbcr, i, j), Eq(0x7f));
- EXPECT_THAT(getCr(ycbcr, i, j), Eq(0x7f));
- }
- }
-
- buffer->unlock();
+ EXPECT_THAT(buffer, IsNull());
}
} // namespace
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 266d65a..5b479c0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -31,26 +31,6 @@
namespace companion {
namespace virtualcamera {
-namespace {
-
-void submitBlackBufferYCbCr420(Surface& surface) {
- ANativeWindow_Buffer buffer;
-
- int ret = surface.lock(&buffer, nullptr);
- if (ret != NO_ERROR) {
- ALOGE("%s: Cannot lock output surface: %d", __func__, ret);
- return;
- }
- uint8_t* data = reinterpret_cast<uint8_t*>(buffer.bits);
- const int yPixNr = buffer.width * buffer.height;
- const int uvPixNr = (buffer.width / 2) * (buffer.height / 2);
- memset(data, 0x00, yPixNr);
- memset(data + yPixNr, 0x7f, 2 * uvPixNr);
- surface.unlockAndPost();
-}
-
-} // namespace
-
EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
: mWidth(width), mHeight(height) {
glGenTextures(1, &mTextureId);
@@ -67,14 +47,6 @@
mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
mSurface = sp<Surface>::make(mBufferProducer);
- // Submit black buffer to the surface to make sure there's input buffer
- // to process in case capture request comes before client writes something
- // to the surface.
- //
- // Note that if the client does write something before capture request is
- // processed (& updateTexture is called), this black buffer will be
- // skipped (and recycled).
- submitBlackBufferYCbCr420(*mSurface);
}
EglSurfaceTexture::~EglSurfaceTexture() {
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 6f10376..2b19c13 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -20,6 +20,7 @@
#include <cstddef>
#include <cstdint>
#include <memory>
+#include <vector>
#include "android/hardware_buffer.h"
#include "jpeglib.h"
@@ -37,10 +38,9 @@
class LibJpegContext {
public:
- LibJpegContext(int width, int height, const android_ycbcr& ycbcr,
- const size_t outBufferSize, void* outBuffer)
- : mYCbCr(ycbcr),
- mWidth(width),
+ LibJpegContext(int width, int height, const size_t outBufferSize,
+ void* outBuffer)
+ : mWidth(width),
mHeight(height),
mDstBufferSize(outBufferSize),
mDstBuffer(outBuffer) {
@@ -94,15 +94,15 @@
mCompressStruct.comp_info[2].v_samp_factor = 1;
}
- bool compress() {
+ bool compress(const android_ycbcr& ycbr) {
// Prepare arrays of pointers to scanlines of each plane.
std::vector<JSAMPROW> yLines(mHeight);
std::vector<JSAMPROW> cbLines(mHeight / 2);
std::vector<JSAMPROW> crLines(mHeight / 2);
- uint8_t* y = static_cast<uint8_t*>(mYCbCr.y);
- uint8_t* cb = static_cast<uint8_t*>(mYCbCr.cb);
- uint8_t* cr = static_cast<uint8_t*>(mYCbCr.cr);
+ uint8_t* y = static_cast<uint8_t*>(ycbr.y);
+ uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
+ uint8_t* cr = static_cast<uint8_t*>(ycbr.cr);
// Since UV samples might be interleaved (semiplanar) we need to copy
// them to separate planes, since libjpeg doesn't directly
@@ -115,20 +115,59 @@
for (int i = 0; i < c_samples; ++i) {
cb_plane[i] = *cb;
cr_plane[i] = *cr;
- cb += mYCbCr.chroma_step;
- cr += mYCbCr.chroma_step;
+ cb += ycbr.chroma_step;
+ cr += ycbr.chroma_step;
}
// Collect pointers to individual scanline of each plane.
for (int i = 0; i < mHeight; ++i) {
- yLines[i] = y + i * mYCbCr.ystride;
+ yLines[i] = y + i * ycbr.ystride;
}
for (int i = 0; i < (mHeight / 2); ++i) {
cbLines[i] = cb_plane.data() + i * (mWidth / 2);
crLines[i] = cr_plane.data() + i * (mWidth / 2);
}
- // Perform actual compression.
+ return compress(yLines, cbLines, crLines);
+ }
+
+ bool compressBlackImage() {
+ // We only really need to prepare one scanline for Y and one shared scanline
+ // for Cb & Cr.
+ std::vector<uint8_t> yLine(mWidth, 0);
+ std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
+
+ std::vector<JSAMPROW> yLines(mHeight, yLine.data());
+ std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
+
+ return compress(yLines, cLines, cLines);
+ }
+
+ private:
+ void setSuccess(const boolean success) {
+ mSuccess = success;
+ }
+
+ void initDestination() {
+ mDestinationMgr.next_output_byte = reinterpret_cast<JOCTET*>(mDstBuffer);
+ mDestinationMgr.free_in_buffer = mDstBufferSize;
+ ALOGV("%s:%d jpeg start: %p [%zu]", __FUNCTION__, __LINE__, mDstBuffer,
+ mDstBufferSize);
+ }
+
+ void termDestination() {
+ mEncodedSize = mDstBufferSize - mDestinationMgr.free_in_buffer;
+ ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, mEncodedSize);
+ }
+
+ // Perform actual compression.
+ //
+ // Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
+ // each vector needs to correspond to height of corresponding plane.
+ //
+ // Returns true if compression is successful, false otherwise.
+ bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
+ std::vector<JSAMPROW>& crLines) {
jpeg_start_compress(&mCompressStruct, TRUE);
while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
@@ -149,23 +188,6 @@
return mSuccess;
}
- private:
- void setSuccess(const boolean success) {
- mSuccess = success;
- }
-
- void initDestination() {
- mDestinationMgr.next_output_byte = reinterpret_cast<JOCTET*>(mDstBuffer);
- mDestinationMgr.free_in_buffer = mDstBufferSize;
- ALOGV("%s:%d jpeg start: %p [%zu]", __FUNCTION__, __LINE__, mDstBuffer,
- mDstBufferSize);
- }
-
- void termDestination() {
- mEncodedSize = mDstBufferSize - mDestinationMgr.free_in_buffer;
- ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, mEncodedSize);
- }
-
// === libjpeg callbacks below ===
static void onOutputError(j_common_ptr cinfo) {
@@ -195,9 +217,6 @@
jpeg_error_mgr mErrorMgr;
jpeg_destination_mgr mDestinationMgr;
- // Layout of the input image.
- android_ycbcr mYCbCr;
-
// Dimensions of the input image.
int mWidth;
int mHeight;
@@ -216,11 +235,15 @@
} // namespace
-// Returns true if the EGL is in an error state and logs the error.
bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
size_t outBufferSize, void* outBuffer) {
- return LibJpegContext(width, height, ycbcr, outBufferSize, outBuffer)
- .compress();
+ return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+}
+
+bool compressBlackJpeg(int width, int height, size_t outBufferSize,
+ void* outBuffer) {
+ return LibJpegContext(width, height, outBufferSize, outBuffer)
+ .compressBlackImage();
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index 8bff008..c44d0a8 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -27,9 +27,15 @@
namespace virtualcamera {
// Jpeg-compress image into the output buffer.
+// Returns true if the compression was successful, false otherwise.
bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
size_t outBufferSize, void* outBuffer);
+// Jpeg-compress all-black image into the output buffer.
+// Returns true if the compression was successful, false otherwise.
+bool compressBlackJpeg(int width, int height, size_t outBufferSize,
+ void* outBuffer);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android