SF: add buffer checks to Render Engine

Make sure that both input and output buffers are GPU accessible
before Render Engine tries to do something with the buffers.

Test: Boot, launch an app, take a screenshot
Bug: 158790260
Change-Id: I905a670f6cc2d419b49fe79694df6e71d60a2a92
diff --git a/libs/renderengine/RenderEngine.cpp b/libs/renderengine/RenderEngine.cpp
index 79839c1..0c5a851 100644
--- a/libs/renderengine/RenderEngine.cpp
+++ b/libs/renderengine/RenderEngine.cpp
@@ -85,5 +85,15 @@
 
 RenderEngine::~RenderEngine() = default;
 
+void RenderEngine::validateInputBufferUsage(const sp<GraphicBuffer>& buffer) {
+    LOG_ALWAYS_FATAL_IF(!(buffer->getUsage() & GraphicBuffer::USAGE_HW_TEXTURE),
+                        "input buffer not gpu readable");
+}
+
+void RenderEngine::validateOutputBufferUsage(const sp<GraphicBuffer>& buffer) {
+    LOG_ALWAYS_FATAL_IF(!(buffer->getUsage() & GraphicBuffer::USAGE_HW_RENDER),
+                        "output buffer not gpu writeable");
+}
+
 } // namespace renderengine
 } // namespace android
diff --git a/libs/renderengine/gl/GLESRenderEngine.cpp b/libs/renderengine/gl/GLESRenderEngine.cpp
index 397f038..2b09c15 100644
--- a/libs/renderengine/gl/GLESRenderEngine.cpp
+++ b/libs/renderengine/gl/GLESRenderEngine.cpp
@@ -1125,6 +1125,8 @@
         return BAD_VALUE;
     }
 
+    validateOutputBufferUsage(buffer);
+
     std::unique_ptr<BindNativeBufferAsFramebuffer> fbo;
     // Gathering layers that requested blur, we'll need them to decide when to render to an
     // offscreen buffer, and when to render to the native buffer.
@@ -1249,6 +1251,7 @@
             isOpaque = layer->source.buffer.isOpaque;
 
             sp<GraphicBuffer> gBuf = layer->source.buffer.buffer;
+            validateInputBufferUsage(gBuf);
             bindExternalTextureBuffer(layer->source.buffer.textureName, gBuf,
                                       layer->source.buffer.fence);
 
diff --git a/libs/renderengine/include/renderengine/RenderEngine.h b/libs/renderengine/include/renderengine/RenderEngine.h
index 572d348..ddae34a 100644
--- a/libs/renderengine/include/renderengine/RenderEngine.h
+++ b/libs/renderengine/include/renderengine/RenderEngine.h
@@ -201,6 +201,9 @@
     // we should not allow in general, so remove this.
     RenderEngineType getRenderEngineType() const { return mRenderEngineType; }
 
+    static void validateInputBufferUsage(const sp<GraphicBuffer>&);
+    static void validateOutputBufferUsage(const sp<GraphicBuffer>&);
+
 protected:
     friend class threaded::RenderEngineThreaded;
     const RenderEngineType mRenderEngineType;
diff --git a/libs/renderengine/skia/SkiaGLRenderEngine.cpp b/libs/renderengine/skia/SkiaGLRenderEngine.cpp
index cbb02a3..91b163e 100644
--- a/libs/renderengine/skia/SkiaGLRenderEngine.cpp
+++ b/libs/renderengine/skia/SkiaGLRenderEngine.cpp
@@ -598,6 +598,8 @@
         return BAD_VALUE;
     }
 
+    validateOutputBufferUsage(buffer);
+
     auto grContext = mInProtectedContext ? mProtectedGrContext : mGrContext;
     auto& cache = mInProtectedContext ? mProtectedTextureCache : mTextureCache;
     AHardwareBuffer_Desc bufferDesc;
@@ -815,6 +817,7 @@
         SkPaint paint;
         if (layer->source.buffer.buffer) {
             ATRACE_NAME("DrawImage");
+            validateInputBufferUsage(layer->source.buffer.buffer);
             const auto& item = layer->source.buffer;
             std::shared_ptr<AutoBackendTexture::LocalRef> imageTextureRef = nullptr;
             auto iter = mTextureCache.find(item.buffer->getId());