Fixing overflow bug
Client should not be requesting such large buffers.
Limit byte size to max(size_t)
Bug: 137801859
Test: build, boot
Change-Id: Idef0c1e926c180bfaf640b627046adba5d3043c3
diff --git a/libs/ui/GraphicBufferAllocator.cpp b/libs/ui/GraphicBufferAllocator.cpp
index 0861a1f..9c7d1fd 100644
--- a/libs/ui/GraphicBufferAllocator.cpp
+++ b/libs/ui/GraphicBufferAllocator.cpp
@@ -20,6 +20,7 @@
#include <ui/GraphicBufferAllocator.h>
+#include <limits.h>
#include <stdio.h>
#include <grallocusage/GrallocUsageConversion.h>
@@ -114,6 +115,14 @@
if (!width || !height)
width = height = 1;
+ const uint32_t bpp = bytesPerPixel(format);
+ if (std::numeric_limits<size_t>::max() / width / height < static_cast<size_t>(bpp)) {
+ ALOGE("Failed to allocate (%u x %u) layerCount %u format %d "
+ "usage %" PRIx64 ": Requesting too large a buffer size",
+ width, height, layerCount, format, usage);
+ return BAD_VALUE;
+ }
+
// Ensure that layerCount is valid.
if (layerCount < 1)
layerCount = 1;
@@ -126,7 +135,6 @@
if (error == NO_ERROR) {
Mutex::Autolock _l(sLock);
KeyedVector<buffer_handle_t, alloc_rec_t>& list(sAllocList);
- uint32_t bpp = bytesPerPixel(format);
alloc_rec_t rec;
rec.width = width;
rec.height = height;