blob: 3eb2e63153681b05ab61eee9450fd027551495d3 [file] [log] [blame]
Pawin Vongmasa36653902018-11-15 00:10:25 -08001/*
2 * Copyright 2018, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "Codec2Buffer"
My Name6bd9a7d2022-03-25 12:37:58 -070019#define ATRACE_TAG ATRACE_TAG_VIDEO
Pawin Vongmasa36653902018-11-15 00:10:25 -080020#include <utils/Log.h>
My Name6bd9a7d2022-03-25 12:37:58 -070021#include <utils/Trace.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080022
Wonsik Kima79c5522022-01-18 16:29:24 -080023#include <aidl/android/hardware/graphics/common/Cta861_3.h>
24#include <aidl/android/hardware/graphics/common/Smpte2086.h>
Wonsik Kim89814312023-02-01 10:46:16 -080025#include <android-base/no_destructor.h>
bohua222c0b2021-01-12 18:54:53 -080026#include <android-base/properties.h>
Wonsik Kim41d83432020-04-27 16:40:49 -070027#include <android/hardware/cas/native/1.0/types.h>
28#include <android/hardware/drm/1.0/types.h>
Wonsik Kima79c5522022-01-18 16:29:24 -080029#include <android/hardware/graphics/common/1.2/types.h>
30#include <android/hardware/graphics/mapper/4.0/IMapper.h>
31#include <gralloctypes/Gralloc4.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080032#include <hidlmemory/FrameworkUtils.h>
33#include <media/hardware/HardwareAPI.h>
Robert Shih895fba92019-07-16 16:29:44 -070034#include <media/stagefright/CodecBase.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080035#include <media/stagefright/MediaCodecConstants.h>
36#include <media/stagefright/foundation/ABuffer.h>
37#include <media/stagefright/foundation/AMessage.h>
38#include <media/stagefright/foundation/AUtils.h>
Wonsik Kim41d83432020-04-27 16:40:49 -070039#include <mediadrm/ICrypto.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080040#include <nativebase/nativebase.h>
Wonsik Kimebe0f9e2019-07-03 11:06:51 -070041#include <ui/Fence.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080042
43#include <C2AllocatorGralloc.h>
44#include <C2BlockInternal.h>
45#include <C2Debug.h>
46
47#include "Codec2Buffer.h"
48
49namespace android {
50
51// Codec2Buffer
52
53bool Codec2Buffer::canCopyLinear(const std::shared_ptr<C2Buffer> &buffer) const {
54 if (const_cast<Codec2Buffer *>(this)->base() == nullptr) {
55 return false;
56 }
57 if (!buffer) {
58 // Nothing to copy, so we can copy by doing nothing.
59 return true;
60 }
61 if (buffer->data().type() != C2BufferData::LINEAR) {
62 return false;
63 }
64 if (buffer->data().linearBlocks().size() == 0u) {
65 // Nothing to copy, so we can copy by doing nothing.
66 return true;
67 } else if (buffer->data().linearBlocks().size() > 1u) {
68 // We don't know how to copy more than one blocks.
69 return false;
70 }
71 if (buffer->data().linearBlocks()[0].size() > capacity()) {
72 // It won't fit.
73 return false;
74 }
75 return true;
76}
77
78bool Codec2Buffer::copyLinear(const std::shared_ptr<C2Buffer> &buffer) {
79 // We assume that all canCopyLinear() checks passed.
80 if (!buffer || buffer->data().linearBlocks().size() == 0u
81 || buffer->data().linearBlocks()[0].size() == 0u) {
82 setRange(0, 0);
83 return true;
84 }
85 C2ReadView view = buffer->data().linearBlocks()[0].map().get();
86 if (view.error() != C2_OK) {
87 ALOGD("Error while mapping: %d", view.error());
88 return false;
89 }
90 if (view.capacity() > capacity()) {
91 ALOGD("C2ConstLinearBlock lied --- it actually doesn't fit: view(%u) > this(%zu)",
92 view.capacity(), capacity());
93 return false;
94 }
95 memcpy(base(), view.data(), view.capacity());
96 setRange(0, view.capacity());
97 return true;
98}
99
100void Codec2Buffer::setImageData(const sp<ABuffer> &imageData) {
Wonsik Kimc48ddcf2019-02-11 16:16:57 -0800101 mImageData = imageData;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800102}
103
104// LocalLinearBuffer
105
106bool LocalLinearBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
107 return canCopyLinear(buffer);
108}
109
110bool LocalLinearBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
111 return copyLinear(buffer);
112}
113
114// DummyContainerBuffer
115
Pawin Vongmasa8be93112018-12-11 14:01:42 -0800116static uint8_t sDummyByte[1] = { 0 };
117
Pawin Vongmasa36653902018-11-15 00:10:25 -0800118DummyContainerBuffer::DummyContainerBuffer(
119 const sp<AMessage> &format, const std::shared_ptr<C2Buffer> &buffer)
Pawin Vongmasa8be93112018-12-11 14:01:42 -0800120 : Codec2Buffer(format, new ABuffer(sDummyByte, 1)),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800121 mBufferRef(buffer) {
122 setRange(0, buffer ? 1 : 0);
123}
124
125std::shared_ptr<C2Buffer> DummyContainerBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700126 return mBufferRef;
127}
128
129void DummyContainerBuffer::clearC2BufferRefs() {
130 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800131}
132
133bool DummyContainerBuffer::canCopy(const std::shared_ptr<C2Buffer> &) const {
134 return !mBufferRef;
135}
136
137bool DummyContainerBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
138 mBufferRef = buffer;
139 setRange(0, mBufferRef ? 1 : 0);
140 return true;
141}
142
143// LinearBlockBuffer
144
145// static
146sp<LinearBlockBuffer> LinearBlockBuffer::Allocate(
147 const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block) {
148 C2WriteView writeView(block->map().get());
149 if (writeView.error() != C2_OK) {
150 return nullptr;
151 }
152 return new LinearBlockBuffer(format, std::move(writeView), block);
153}
154
155std::shared_ptr<C2Buffer> LinearBlockBuffer::asC2Buffer() {
156 return C2Buffer::CreateLinearBuffer(mBlock->share(offset(), size(), C2Fence()));
157}
158
159bool LinearBlockBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
160 return canCopyLinear(buffer);
161}
162
163bool LinearBlockBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
164 return copyLinear(buffer);
165}
166
167LinearBlockBuffer::LinearBlockBuffer(
168 const sp<AMessage> &format,
169 C2WriteView&& writeView,
170 const std::shared_ptr<C2LinearBlock> &block)
171 : Codec2Buffer(format, new ABuffer(writeView.data(), writeView.size())),
172 mWriteView(writeView),
173 mBlock(block) {
174}
175
176// ConstLinearBlockBuffer
177
178// static
179sp<ConstLinearBlockBuffer> ConstLinearBlockBuffer::Allocate(
180 const sp<AMessage> &format, const std::shared_ptr<C2Buffer> &buffer) {
181 if (!buffer
182 || buffer->data().type() != C2BufferData::LINEAR
183 || buffer->data().linearBlocks().size() != 1u) {
184 return nullptr;
185 }
186 C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
187 if (readView.error() != C2_OK) {
188 return nullptr;
189 }
190 return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
191}
192
193ConstLinearBlockBuffer::ConstLinearBlockBuffer(
194 const sp<AMessage> &format,
195 C2ReadView&& readView,
196 const std::shared_ptr<C2Buffer> &buffer)
197 : Codec2Buffer(format, new ABuffer(
198 // NOTE: ABuffer only takes non-const pointer but this data is
199 // supposed to be read-only.
200 const_cast<uint8_t *>(readView.data()), readView.capacity())),
201 mReadView(readView),
202 mBufferRef(buffer) {
203}
204
205std::shared_ptr<C2Buffer> ConstLinearBlockBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700206 return mBufferRef;
207}
208
209void ConstLinearBlockBuffer::clearC2BufferRefs() {
210 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800211}
212
213// GraphicView2MediaImageConverter
214
215namespace {
216
217class GraphicView2MediaImageConverter {
218public:
219 /**
220 * Creates a C2GraphicView <=> MediaImage converter
221 *
222 * \param view C2GraphicView object
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700223 * \param format buffer format
Wonsik Kim7d966312019-06-04 14:00:49 -0700224 * \param copy whether the converter is used for copy or not
Pawin Vongmasa36653902018-11-15 00:10:25 -0800225 */
226 GraphicView2MediaImageConverter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700227 const C2GraphicView &view, const sp<AMessage> &format, bool copy)
Pawin Vongmasa36653902018-11-15 00:10:25 -0800228 : mInitCheck(NO_INIT),
229 mView(view),
230 mWidth(view.width()),
231 mHeight(view.height()),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800232 mAllocatedDepth(0),
233 mBackBufferSize(0),
234 mMediaImage(new ABuffer(sizeof(MediaImage2))) {
My Name6bd9a7d2022-03-25 12:37:58 -0700235 ATRACE_CALL();
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700236 if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
237 mClientColorFormat = COLOR_FormatYUV420Flexible;
238 }
239 if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
240 mComponentColorFormat = COLOR_FormatYUV420Flexible;
241 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800242 if (view.error() != C2_OK) {
243 ALOGD("Converter: view.error() = %d", view.error());
244 mInitCheck = BAD_VALUE;
245 return;
246 }
247 MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
248 const C2PlanarLayout &layout = view.layout();
249 if (layout.numPlanes == 0) {
250 ALOGD("Converter: 0 planes");
251 mInitCheck = BAD_VALUE;
252 return;
253 }
Harish Mahendrakarcac53852019-02-20 10:59:10 -0800254 memset(mediaImage, 0, sizeof(*mediaImage));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800255 mAllocatedDepth = layout.planes[0].allocatedDepth;
256 uint32_t bitDepth = layout.planes[0].bitDepth;
257
258 // align width and height to support subsampling cleanly
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700259 uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
260 uint32_t vStride = align(view.crop().height, 2);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800261
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700262 bool tryWrapping = !copy;
263
Pawin Vongmasa36653902018-11-15 00:10:25 -0800264 switch (layout.type) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700265 case C2PlanarLayout::TYPE_YUV: {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800266 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
267 if (layout.numPlanes != 3) {
268 ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
269 mInitCheck = BAD_VALUE;
270 return;
271 }
Arun Johnson3ab32cd2022-06-10 18:58:01 +0000272 std::optional<int> clientBitDepth = {};
273 switch (mClientColorFormat) {
274 case COLOR_FormatYUVP010:
275 clientBitDepth = 10;
276 break;
277 case COLOR_FormatYUV411PackedPlanar:
278 case COLOR_FormatYUV411Planar:
279 case COLOR_FormatYUV420Flexible:
280 case COLOR_FormatYUV420PackedPlanar:
281 case COLOR_FormatYUV420PackedSemiPlanar:
282 case COLOR_FormatYUV420Planar:
283 case COLOR_FormatYUV420SemiPlanar:
284 case COLOR_FormatYUV422Flexible:
285 case COLOR_FormatYUV422PackedPlanar:
286 case COLOR_FormatYUV422PackedSemiPlanar:
287 case COLOR_FormatYUV422Planar:
288 case COLOR_FormatYUV422SemiPlanar:
289 case COLOR_FormatYUV444Flexible:
290 case COLOR_FormatYUV444Interleaved:
291 clientBitDepth = 8;
292 break;
293 default:
294 // no-op; used with optional
295 break;
296
297 }
298 // conversion fails if client bit-depth and the component bit-depth differs
299 if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
300 ALOGD("Bit depth of client: %d and component: %d differs",
301 *clientBitDepth, bitDepth);
302 mInitCheck = BAD_VALUE;
303 return;
304 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700305 C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
306 C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
307 C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
308 if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
309 || uPlane.channel != C2PlaneInfo::CHANNEL_CB
310 || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
311 ALOGD("Converter: not YUV layout");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800312 mInitCheck = BAD_VALUE;
313 return;
314 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700315 bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
316 && uPlane.rowSampling == 2 && uPlane.colSampling == 2
317 && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
318 if (yuv420888) {
319 for (uint32_t i = 0; i < 3; ++i) {
320 const C2PlaneInfo &plane = layout.planes[i];
321 if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
322 yuv420888 = false;
323 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800324 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700325 }
326 yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
327 }
328 int32_t copyFormat = mClientColorFormat;
329 if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
330 if (uPlane.colInc == 2 && vPlane.colInc == 2
331 && yPlane.rowInc == uPlane.rowInc) {
332 copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
333 } else if (uPlane.colInc == 1 && vPlane.colInc == 1
334 && yPlane.rowInc == uPlane.rowInc * 2) {
335 copyFormat = COLOR_FormatYUV420PackedPlanar;
336 }
337 }
338 ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
339 "v:{colInc=%d rowInc=%d}",
340 mClientColorFormat,
341 yPlane.colInc, yPlane.rowInc,
342 uPlane.colInc, uPlane.rowInc,
343 vPlane.colInc, vPlane.rowInc);
344 switch (copyFormat) {
345 case COLOR_FormatYUV420Flexible:
Pawin Vongmasa36653902018-11-15 00:10:25 -0800346 case COLOR_FormatYUV420Planar:
347 case COLOR_FormatYUV420PackedPlanar:
348 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
349 mediaImage->mPlane[mediaImage->Y].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700350 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800351 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
352 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
353
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700354 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800355 mediaImage->mPlane[mediaImage->U].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700356 mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800357 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
358 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
359
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700360 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800361 mediaImage->mPlane[mediaImage->V].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700362 mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800363 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
364 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700365
366 if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
367 tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
368 && yPlane.rowInc == uPlane.rowInc * 2
369 && view.data()[0] < view.data()[1]
370 && view.data()[1] < view.data()[2];
371 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800372 break;
373
374 case COLOR_FormatYUV420SemiPlanar:
375 case COLOR_FormatYUV420PackedSemiPlanar:
376 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
377 mediaImage->mPlane[mediaImage->Y].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700378 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800379 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
380 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
381
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700382 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800383 mediaImage->mPlane[mediaImage->U].mColInc = 2;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700384 mediaImage->mPlane[mediaImage->U].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800385 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
386 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
387
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700388 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800389 mediaImage->mPlane[mediaImage->V].mColInc = 2;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700390 mediaImage->mPlane[mediaImage->V].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800391 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
392 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700393
394 if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
395 tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
396 && yPlane.rowInc == uPlane.rowInc
397 && view.data()[0] < view.data()[1]
398 && view.data()[1] < view.data()[2];
399 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800400 break;
401
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700402 case COLOR_FormatYUVP010:
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700403 // stride is in bytes
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700404 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
405 mediaImage->mPlane[mediaImage->Y].mColInc = 2;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700406 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700407 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
408 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
409
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700410 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700411 mediaImage->mPlane[mediaImage->U].mColInc = 4;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700412 mediaImage->mPlane[mediaImage->U].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700413 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
414 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
415
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700416 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700417 mediaImage->mPlane[mediaImage->V].mColInc = 4;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700418 mediaImage->mPlane[mediaImage->V].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700419 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
420 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700421 if (tryWrapping) {
422 tryWrapping = yPlane.allocatedDepth == 16
423 && uPlane.allocatedDepth == 16
424 && vPlane.allocatedDepth == 16
425 && yPlane.bitDepth == 10
426 && uPlane.bitDepth == 10
427 && vPlane.bitDepth == 10
428 && yPlane.rightShift == 6
429 && uPlane.rightShift == 6
430 && vPlane.rightShift == 6
431 && yPlane.rowSampling == 1 && yPlane.colSampling == 1
432 && uPlane.rowSampling == 2 && uPlane.colSampling == 2
433 && vPlane.rowSampling == 2 && vPlane.colSampling == 2
434 && yPlane.colInc == 2
435 && uPlane.colInc == 4
436 && vPlane.colInc == 4
437 && yPlane.rowInc == uPlane.rowInc
438 && yPlane.rowInc == vPlane.rowInc;
439 }
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700440 break;
441
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700442 default: {
443 // default to fully planar format --- this will be overridden if wrapping
444 // TODO: keep interleaved format
445 int32_t colInc = divUp(mAllocatedDepth, 8u);
446 int32_t rowInc = stride * colInc / yPlane.colSampling;
447 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
448 mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
449 mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
450 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
451 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
452 int32_t offset = rowInc * vStride / yPlane.rowSampling;
453
454 rowInc = stride * colInc / uPlane.colSampling;
455 mediaImage->mPlane[mediaImage->U].mOffset = offset;
456 mediaImage->mPlane[mediaImage->U].mColInc = colInc;
457 mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
458 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
459 mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
460 offset += rowInc * vStride / uPlane.rowSampling;
461
462 rowInc = stride * colInc / vPlane.colSampling;
463 mediaImage->mPlane[mediaImage->V].mOffset = offset;
464 mediaImage->mPlane[mediaImage->V].mColInc = colInc;
465 mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
466 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
467 mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
468 break;
469 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800470 }
471 break;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700472 }
473
Pawin Vongmasa36653902018-11-15 00:10:25 -0800474 case C2PlanarLayout::TYPE_YUVA:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700475 ALOGD("Converter: unrecognized color format "
476 "(client %d component %d) for YUVA layout",
477 mClientColorFormat, mComponentColorFormat);
478 mInitCheck = NO_INIT;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800479 return;
480 case C2PlanarLayout::TYPE_RGB:
Slawomir Rosek580a6442023-05-10 16:23:38 +0000481 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700482 // TODO: support MediaImage layout
Slawomir Rosek580a6442023-05-10 16:23:38 +0000483 switch (mClientColorFormat) {
484 case COLOR_FormatSurface:
485 case COLOR_FormatRGBFlexible:
486 case COLOR_Format24bitBGR888:
487 case COLOR_Format24bitRGB888:
488 ALOGD("Converter: accept color format "
489 "(client %d component %d) for RGB layout",
490 mClientColorFormat, mComponentColorFormat);
491 break;
492 default:
493 ALOGD("Converter: unrecognized color format "
494 "(client %d component %d) for RGB layout",
495 mClientColorFormat, mComponentColorFormat);
496 mInitCheck = BAD_VALUE;
497 return;
498 }
499 if (layout.numPlanes != 3) {
500 ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
501 mInitCheck = BAD_VALUE;
502 return;
503 }
504 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800505 case C2PlanarLayout::TYPE_RGBA:
Slawomir Rosek580a6442023-05-10 16:23:38 +0000506 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700507 // TODO: support MediaImage layout
Slawomir Rosek580a6442023-05-10 16:23:38 +0000508 switch (mClientColorFormat) {
509 case COLOR_FormatSurface:
510 case COLOR_FormatRGBAFlexible:
511 case COLOR_Format32bitABGR8888:
512 case COLOR_Format32bitARGB8888:
513 case COLOR_Format32bitBGRA8888:
514 ALOGD("Converter: accept color format "
515 "(client %d component %d) for RGBA layout",
516 mClientColorFormat, mComponentColorFormat);
517 break;
518 default:
519 ALOGD("Converter: unrecognized color format "
520 "(client %d component %d) for RGBA layout",
521 mClientColorFormat, mComponentColorFormat);
522 mInitCheck = BAD_VALUE;
523 return;
524 }
525 if (layout.numPlanes != 4) {
526 ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
527 mInitCheck = BAD_VALUE;
528 return;
529 }
530 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800531 default:
532 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700533 if (layout.numPlanes == 1) {
534 const C2PlaneInfo &plane = layout.planes[0];
535 if (plane.colInc < 0 || plane.rowInc < 0) {
536 // Copy-only if we have negative colInc/rowInc
537 tryWrapping = false;
538 }
539 mediaImage->mPlane[0].mOffset = 0;
540 mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
541 mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
542 mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
543 mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
544 } else {
545 ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
546 mClientColorFormat, mComponentColorFormat);
547 mInitCheck = NO_INIT;
548 return;
549 }
550 break;
551 }
552 if (tryWrapping) {
553 // try to map directly. check if the planes are near one another
554 const uint8_t *minPtr = mView.data()[0];
555 const uint8_t *maxPtr = mView.data()[0];
556 int32_t planeSize = 0;
557 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
558 const C2PlaneInfo &plane = layout.planes[i];
559 int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
560 ssize_t minOffset = plane.minOffset(
561 mWidth / plane.colSampling, mHeight / plane.rowSampling);
562 ssize_t maxOffset = plane.maxOffset(
563 mWidth / plane.colSampling, mHeight / plane.rowSampling);
564 if (minPtr > mView.data()[i] + minOffset) {
565 minPtr = mView.data()[i] + minOffset;
566 }
567 if (maxPtr < mView.data()[i] + maxOffset) {
568 maxPtr = mView.data()[i] + maxOffset;
569 }
570 planeSize += planeStride * divUp(mAllocatedDepth, 8u)
571 * align(mHeight, 64) / plane.rowSampling;
572 }
573
Wonsik Kim1bf84162022-09-20 11:00:42 -0700574 if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700575 // FIXME: this is risky as reading/writing data out of bound results
576 // in an undefined behavior, but gralloc does assume a
577 // contiguous mapping
578 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
579 const C2PlaneInfo &plane = layout.planes[i];
580 mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
581 mediaImage->mPlane[i].mColInc = plane.colInc;
582 mediaImage->mPlane[i].mRowInc = plane.rowInc;
583 mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
584 mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
585 }
Wonsik Kim1bf84162022-09-20 11:00:42 -0700586 mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700587 ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
588 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800589 }
590 mediaImage->mNumPlanes = layout.numPlanes;
Harish Mahendrakarf7c49e22019-05-24 14:19:16 -0700591 mediaImage->mWidth = view.crop().width;
592 mediaImage->mHeight = view.crop().height;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800593 mediaImage->mBitDepth = bitDepth;
594 mediaImage->mBitDepthAllocated = mAllocatedDepth;
595
596 uint32_t bufferSize = 0;
597 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
598 const C2PlaneInfo &plane = layout.planes[i];
599 if (plane.allocatedDepth < plane.bitDepth
600 || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
601 ALOGD("rightShift value of %u unsupported", plane.rightShift);
602 mInitCheck = BAD_VALUE;
603 return;
604 }
605 if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
606 ALOGD("endianness value of %u unsupported", plane.endianness);
607 mInitCheck = BAD_VALUE;
608 return;
609 }
610 if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700611 ALOGD("different allocatedDepth/bitDepth per plane unsupported");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800612 mInitCheck = BAD_VALUE;
613 return;
614 }
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700615 // stride is in bytes
616 bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800617 }
618
619 mBackBufferSize = bufferSize;
620 mInitCheck = OK;
621 }
622
623 status_t initCheck() const { return mInitCheck; }
624
625 uint32_t backBufferSize() const { return mBackBufferSize; }
626
627 /**
628 * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
629 * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
630 * data into a backing buffer explicitly.
631 *
632 * \return media buffer. This is null if wrapping failed.
633 */
634 sp<ABuffer> wrap() const {
635 if (mBackBuffer == nullptr) {
636 return mWrapped;
637 }
638 return nullptr;
639 }
640
641 bool setBackBuffer(const sp<ABuffer> &backBuffer) {
Wonsik Kim186fdbf2019-01-29 13:30:01 -0800642 if (backBuffer == nullptr) {
643 return false;
644 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800645 if (backBuffer->capacity() < mBackBufferSize) {
646 return false;
647 }
648 backBuffer->setRange(0, mBackBufferSize);
649 mBackBuffer = backBuffer;
650 return true;
651 }
652
653 /**
654 * Copy C2GraphicView to MediaImage2.
655 */
656 status_t copyToMediaImage() {
My Name6bd9a7d2022-03-25 12:37:58 -0700657 ATRACE_CALL();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800658 if (mInitCheck != OK) {
659 return mInitCheck;
660 }
661 return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
662 }
663
664 const sp<ABuffer> &imageData() const { return mMediaImage; }
665
666private:
667 status_t mInitCheck;
668
669 const C2GraphicView mView;
670 uint32_t mWidth;
671 uint32_t mHeight;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700672 int32_t mClientColorFormat; ///< SDK color format for MediaImage
673 int32_t mComponentColorFormat; ///< SDK color format from component
Pawin Vongmasa36653902018-11-15 00:10:25 -0800674 sp<ABuffer> mWrapped; ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
675 uint32_t mAllocatedDepth;
676 uint32_t mBackBufferSize;
677 sp<ABuffer> mMediaImage;
678 std::function<sp<ABuffer>(size_t)> mAlloc;
679
680 sp<ABuffer> mBackBuffer; ///< backing buffer if we have to copy C2Buffer <=> ABuffer
681
682 MediaImage2 *getMediaImage() {
683 return (MediaImage2 *)mMediaImage->base();
684 }
685};
686
687} // namespace
688
689// GraphicBlockBuffer
690
691// static
692sp<GraphicBlockBuffer> GraphicBlockBuffer::Allocate(
693 const sp<AMessage> &format,
694 const std::shared_ptr<C2GraphicBlock> &block,
695 std::function<sp<ABuffer>(size_t)> alloc) {
My Name6bd9a7d2022-03-25 12:37:58 -0700696 ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800697 C2GraphicView view(block->map().get());
My Name6bd9a7d2022-03-25 12:37:58 -0700698 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800699 if (view.error() != C2_OK) {
700 ALOGD("C2GraphicBlock::map failed: %d", view.error());
701 return nullptr;
702 }
703
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700704 GraphicView2MediaImageConverter converter(view, format, false /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800705 if (converter.initCheck() != OK) {
706 ALOGD("Converter init failed: %d", converter.initCheck());
707 return nullptr;
708 }
709 bool wrapped = true;
710 sp<ABuffer> buffer = converter.wrap();
711 if (buffer == nullptr) {
712 buffer = alloc(converter.backBufferSize());
713 if (!converter.setBackBuffer(buffer)) {
714 ALOGD("Converter failed to set back buffer");
715 return nullptr;
716 }
717 wrapped = false;
718 }
719 return new GraphicBlockBuffer(
720 format,
721 buffer,
722 std::move(view),
723 block,
724 converter.imageData(),
725 wrapped);
726}
727
728GraphicBlockBuffer::GraphicBlockBuffer(
729 const sp<AMessage> &format,
730 const sp<ABuffer> &buffer,
731 C2GraphicView &&view,
732 const std::shared_ptr<C2GraphicBlock> &block,
733 const sp<ABuffer> &imageData,
734 bool wrapped)
735 : Codec2Buffer(format, buffer),
736 mView(view),
737 mBlock(block),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800738 mWrapped(wrapped) {
739 setImageData(imageData);
740}
741
742std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
My Name6bd9a7d2022-03-25 12:37:58 -0700743 ATRACE_CALL();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800744 uint32_t width = mView.width();
745 uint32_t height = mView.height();
746 if (!mWrapped) {
747 (void)ImageCopy(mView, base(), imageData());
748 }
749 return C2Buffer::CreateGraphicBuffer(
750 mBlock->share(C2Rect(width, height), C2Fence()));
751}
752
753// GraphicMetadataBuffer
754GraphicMetadataBuffer::GraphicMetadataBuffer(
755 const sp<AMessage> &format,
756 const std::shared_ptr<C2Allocator> &alloc)
757 : Codec2Buffer(format, new ABuffer(sizeof(VideoNativeMetadata))),
758 mAlloc(alloc) {
759 ((VideoNativeMetadata *)base())->pBuffer = nullptr;
760}
761
762std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
bohua222c0b2021-01-12 18:54:53 -0800763#ifdef __LP64__
764 static std::once_flag s_checkOnce;
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700765 static bool s_is64bitOk {true};
bohua222c0b2021-01-12 18:54:53 -0800766 std::call_once(s_checkOnce, [&](){
767 const std::string abi32list =
768 ::android::base::GetProperty("ro.product.cpu.abilist32", "");
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700769 if (!abi32list.empty()) {
770 int32_t inputSurfaceSetting =
771 ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
772 s_is64bitOk = inputSurfaceSetting != 0;
bohua222c0b2021-01-12 18:54:53 -0800773 }
774 });
775
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700776 if (!s_is64bitOk) {
777 ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
778 "when debug.stagefright.c2inputsurface is set to 0");
bohua222c0b2021-01-12 18:54:53 -0800779 return nullptr;
780 }
781#endif
782
Pawin Vongmasa36653902018-11-15 00:10:25 -0800783 VideoNativeMetadata *meta = (VideoNativeMetadata *)base();
784 ANativeWindowBuffer *buffer = (ANativeWindowBuffer *)meta->pBuffer;
785 if (buffer == nullptr) {
786 ALOGD("VideoNativeMetadata contains null buffer");
787 return nullptr;
788 }
789
790 ALOGV("VideoNativeMetadata: %dx%d", buffer->width, buffer->height);
791 C2Handle *handle = WrapNativeCodec2GrallocHandle(
Sungtak Leea4d13be2019-01-23 15:24:46 -0800792 buffer->handle,
Pawin Vongmasa36653902018-11-15 00:10:25 -0800793 buffer->width,
794 buffer->height,
795 buffer->format,
796 buffer->usage,
797 buffer->stride);
798 std::shared_ptr<C2GraphicAllocation> alloc;
799 c2_status_t err = mAlloc->priorGraphicAllocation(handle, &alloc);
800 if (err != C2_OK) {
801 ALOGD("Failed to wrap VideoNativeMetadata into C2GraphicAllocation");
Chih-Yu Huangc0ac3552021-03-11 14:37:10 +0900802 native_handle_close(handle);
803 native_handle_delete(handle);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800804 return nullptr;
805 }
806 std::shared_ptr<C2GraphicBlock> block = _C2BlockFactory::CreateGraphicBlock(alloc);
807
808 meta->pBuffer = 0;
Wonsik Kimebe0f9e2019-07-03 11:06:51 -0700809 // TODO: wrap this in C2Fence so that the component can wait when it
810 // actually starts processing.
811 if (meta->nFenceFd >= 0) {
812 sp<Fence> fence(new Fence(meta->nFenceFd));
813 fence->waitForever(LOG_TAG);
814 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800815 return C2Buffer::CreateGraphicBuffer(
816 block->share(C2Rect(buffer->width, buffer->height), C2Fence()));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800817}
818
819// ConstGraphicBlockBuffer
820
821// static
822sp<ConstGraphicBlockBuffer> ConstGraphicBlockBuffer::Allocate(
823 const sp<AMessage> &format,
824 const std::shared_ptr<C2Buffer> &buffer,
825 std::function<sp<ABuffer>(size_t)> alloc) {
826 if (!buffer
827 || buffer->data().type() != C2BufferData::GRAPHIC
828 || buffer->data().graphicBlocks().size() != 1u) {
829 ALOGD("C2Buffer precond fail");
830 return nullptr;
831 }
My Name6bd9a7d2022-03-25 12:37:58 -0700832 ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800833 std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
834 buffer->data().graphicBlocks()[0].map().get()));
My Name6bd9a7d2022-03-25 12:37:58 -0700835 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800836 std::unique_ptr<const C2GraphicView> holder;
837
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700838 GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800839 if (converter.initCheck() != OK) {
840 ALOGD("Converter init failed: %d", converter.initCheck());
841 return nullptr;
842 }
843 bool wrapped = true;
844 sp<ABuffer> aBuffer = converter.wrap();
845 if (aBuffer == nullptr) {
846 aBuffer = alloc(converter.backBufferSize());
847 if (!converter.setBackBuffer(aBuffer)) {
848 ALOGD("Converter failed to set back buffer");
849 return nullptr;
850 }
851 wrapped = false;
852 converter.copyToMediaImage();
853 // We don't need the view.
854 holder = std::move(view);
855 }
856 return new ConstGraphicBlockBuffer(
857 format,
858 aBuffer,
859 std::move(view),
860 buffer,
861 converter.imageData(),
862 wrapped);
863}
864
865// static
866sp<ConstGraphicBlockBuffer> ConstGraphicBlockBuffer::AllocateEmpty(
867 const sp<AMessage> &format,
868 std::function<sp<ABuffer>(size_t)> alloc) {
869 int32_t width, height;
870 if (!format->findInt32("width", &width)
871 || !format->findInt32("height", &height)) {
872 ALOGD("format had no width / height");
873 return nullptr;
874 }
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700875 int32_t colorFormat = COLOR_FormatYUV420Flexible;
876 int32_t bpp = 12; // 8(Y) + 2(U) + 2(V)
877 if (format->findInt32(KEY_COLOR_FORMAT, &colorFormat)) {
878 if (colorFormat == COLOR_FormatYUVP010) {
879 bpp = 24; // 16(Y) + 4(U) + 4(V)
880 }
881 }
882 sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
Jiajia Cong9ed0ab22020-12-02 12:00:49 +0800883 if (aBuffer == nullptr) {
884 ALOGD("%s: failed to allocate buffer", __func__);
885 return nullptr;
886 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800887 return new ConstGraphicBlockBuffer(
888 format,
889 aBuffer,
890 nullptr,
891 nullptr,
892 nullptr,
893 false);
894}
895
896ConstGraphicBlockBuffer::ConstGraphicBlockBuffer(
897 const sp<AMessage> &format,
898 const sp<ABuffer> &aBuffer,
899 std::unique_ptr<const C2GraphicView> &&view,
900 const std::shared_ptr<C2Buffer> &buffer,
901 const sp<ABuffer> &imageData,
902 bool wrapped)
903 : Codec2Buffer(format, aBuffer),
904 mView(std::move(view)),
905 mBufferRef(buffer),
906 mWrapped(wrapped) {
Wonsik Kimc48ddcf2019-02-11 16:16:57 -0800907 setImageData(imageData);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800908}
909
910std::shared_ptr<C2Buffer> ConstGraphicBlockBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700911 return mBufferRef;
912}
913
914void ConstGraphicBlockBuffer::clearC2BufferRefs() {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800915 mView.reset();
Wonsik Kimf9b32122020-04-02 11:30:17 -0700916 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800917}
918
919bool ConstGraphicBlockBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
920 if (mWrapped || mBufferRef) {
921 ALOGD("ConstGraphicBlockBuffer::canCopy: %swrapped ; buffer ref %s",
922 mWrapped ? "" : "not ", mBufferRef ? "exists" : "doesn't exist");
923 return false;
924 }
925 if (!buffer) {
926 // Nothing to copy, so we can copy by doing nothing.
927 return true;
928 }
929 if (buffer->data().type() != C2BufferData::GRAPHIC) {
930 ALOGD("ConstGraphicBlockBuffer::canCopy: buffer precondition unsatisfied");
931 return false;
932 }
933 if (buffer->data().graphicBlocks().size() == 0) {
934 return true;
935 } else if (buffer->data().graphicBlocks().size() != 1u) {
936 ALOGD("ConstGraphicBlockBuffer::canCopy: too many blocks");
937 return false;
938 }
939
My Name6bd9a7d2022-03-25 12:37:58 -0700940 ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800941 GraphicView2MediaImageConverter converter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700942 buffer->data().graphicBlocks()[0].map().get(),
943 // FIXME: format() is not const, but we cannot change it, so do a const cast here
944 const_cast<ConstGraphicBlockBuffer *>(this)->format(),
945 true /* copy */);
My Name6bd9a7d2022-03-25 12:37:58 -0700946 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800947 if (converter.initCheck() != OK) {
948 ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
949 return false;
950 }
951 if (converter.backBufferSize() > capacity()) {
952 ALOGD("ConstGraphicBlockBuffer::canCopy: insufficient capacity: req %u has %zu",
953 converter.backBufferSize(), capacity());
954 return false;
955 }
956 return true;
957}
958
959bool ConstGraphicBlockBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
960 if (!buffer || buffer->data().graphicBlocks().size() == 0) {
961 setRange(0, 0);
962 return true;
963 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800964
965 GraphicView2MediaImageConverter converter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700966 buffer->data().graphicBlocks()[0].map().get(), format(), true /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800967 if (converter.initCheck() != OK) {
968 ALOGD("ConstGraphicBlockBuffer::copy: converter init failed: %d", converter.initCheck());
969 return false;
970 }
971 sp<ABuffer> aBuffer = new ABuffer(base(), capacity());
972 if (!converter.setBackBuffer(aBuffer)) {
973 ALOGD("ConstGraphicBlockBuffer::copy: set back buffer failed");
974 return false;
975 }
Pin-chih Lin1971e2c2019-04-15 19:36:26 +0800976 setRange(0, aBuffer->size()); // align size info
Pawin Vongmasa36653902018-11-15 00:10:25 -0800977 converter.copyToMediaImage();
978 setImageData(converter.imageData());
979 mBufferRef = buffer;
980 return true;
981}
982
983// EncryptedLinearBlockBuffer
984
985EncryptedLinearBlockBuffer::EncryptedLinearBlockBuffer(
986 const sp<AMessage> &format,
987 const std::shared_ptr<C2LinearBlock> &block,
988 const sp<IMemory> &memory,
989 int32_t heapSeqNum)
Ytai Ben-Tsvi7dd39722019-09-05 15:14:30 -0700990 // TODO: Using unsecurePointer() has some associated security pitfalls
991 // (see declaration for details).
992 // Either document why it is safe in this case or address the
993 // issue (e.g. by copying).
994 : Codec2Buffer(format, new ABuffer(memory->unsecurePointer(), memory->size())),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800995 mBlock(block),
996 mMemory(memory),
997 mHeapSeqNum(heapSeqNum) {
998}
999
1000std::shared_ptr<C2Buffer> EncryptedLinearBlockBuffer::asC2Buffer() {
1001 return C2Buffer::CreateLinearBuffer(mBlock->share(offset(), size(), C2Fence()));
1002}
1003
1004void EncryptedLinearBlockBuffer::fillSourceBuffer(
Robert Shih895fba92019-07-16 16:29:44 -07001005 hardware::drm::V1_0::SharedBuffer *source) {
1006 BufferChannelBase::IMemoryToSharedBuffer(mMemory, mHeapSeqNum, source);
Pawin Vongmasa36653902018-11-15 00:10:25 -08001007}
1008
1009void EncryptedLinearBlockBuffer::fillSourceBuffer(
1010 hardware::cas::native::V1_0::SharedBuffer *source) {
1011 ssize_t offset;
1012 size_t size;
1013
1014 mHidlMemory = hardware::fromHeap(mMemory->getMemory(&offset, &size));
1015 source->heapBase = *mHidlMemory;
1016 source->offset = offset;
1017 source->size = size;
1018}
1019
1020bool EncryptedLinearBlockBuffer::copyDecryptedContent(
1021 const sp<IMemory> &decrypted, size_t length) {
1022 C2WriteView view = mBlock->map().get();
1023 if (view.error() != C2_OK) {
1024 return false;
1025 }
1026 if (view.size() < length) {
1027 return false;
1028 }
Ytai Ben-Tsvi7dd39722019-09-05 15:14:30 -07001029 memcpy(view.data(), decrypted->unsecurePointer(), length);
Pawin Vongmasa36653902018-11-15 00:10:25 -08001030 return true;
1031}
1032
1033bool EncryptedLinearBlockBuffer::copyDecryptedContentFromMemory(size_t length) {
1034 return copyDecryptedContent(mMemory, length);
1035}
1036
1037native_handle_t *EncryptedLinearBlockBuffer::handle() const {
1038 return const_cast<native_handle_t *>(mBlock->handle());
1039}
1040
Wonsik Kima79c5522022-01-18 16:29:24 -08001041using ::aidl::android::hardware::graphics::common::Cta861_3;
Hongguangfc1478a2022-07-20 22:56:06 -07001042using ::aidl::android::hardware::graphics::common::Dataspace;
Wonsik Kima79c5522022-01-18 16:29:24 -08001043using ::aidl::android::hardware::graphics::common::Smpte2086;
1044
1045using ::android::gralloc4::MetadataType_Cta861_3;
Hongguangfc1478a2022-07-20 22:56:06 -07001046using ::android::gralloc4::MetadataType_Dataspace;
Wonsik Kima79c5522022-01-18 16:29:24 -08001047using ::android::gralloc4::MetadataType_Smpte2086;
1048using ::android::gralloc4::MetadataType_Smpte2094_40;
1049
1050using ::android::hardware::Return;
1051using ::android::hardware::hidl_vec;
1052
1053using Error4 = ::android::hardware::graphics::mapper::V4_0::Error;
1054using IMapper4 = ::android::hardware::graphics::mapper::V4_0::IMapper;
1055
1056namespace {
1057
1058sp<IMapper4> GetMapper4() {
Wonsik Kim89814312023-02-01 10:46:16 -08001059 static ::android::base::NoDestructor<sp<IMapper4>> sMapper(IMapper4::getService());
1060 return *sMapper;
Wonsik Kima79c5522022-01-18 16:29:24 -08001061}
1062
Wonsik Kime2aa2402022-04-08 14:07:14 -07001063class Gralloc4Buffer {
Wonsik Kima79c5522022-01-18 16:29:24 -08001064public:
Wonsik Kime2aa2402022-04-08 14:07:14 -07001065 Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
1066 sp<IMapper4> mapper = GetMapper4();
1067 if (!mapper) {
1068 return;
1069 }
1070 // Unwrap raw buffer handle from the C2Handle
1071 native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
1072 if (!nh) {
1073 return;
1074 }
1075 // Import the raw handle so IMapper can use the buffer. The imported
1076 // handle must be freed when the client is done with the buffer.
1077 mapper->importBuffer(
1078 hardware::hidl_handle(nh),
1079 [&](const Error4 &error, void *buffer) {
1080 if (error == Error4::NONE) {
1081 mBuffer = buffer;
1082 }
1083 });
1084
1085 // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
1086 // does not clone the fds. Thus we need to delete the handle
1087 // without closing it.
1088 native_handle_delete(nh);
1089 }
1090
1091 ~Gralloc4Buffer() {
1092 sp<IMapper4> mapper = GetMapper4();
1093 if (mapper && mBuffer) {
1094 // Free the imported buffer handle. This does not release the
1095 // underlying buffer itself.
1096 mapper->freeBuffer(mBuffer);
Wonsik Kima79c5522022-01-18 16:29:24 -08001097 }
1098 }
Wonsik Kime2aa2402022-04-08 14:07:14 -07001099
1100 void *get() const { return mBuffer; }
1101 operator bool() const { return (mBuffer != nullptr); }
Wonsik Kima79c5522022-01-18 16:29:24 -08001102private:
Wonsik Kime2aa2402022-04-08 14:07:14 -07001103 void *mBuffer;
Wonsik Kima79c5522022-01-18 16:29:24 -08001104};
1105
1106} // namspace
1107
1108c2_status_t GetHdrMetadataFromGralloc4Handle(
1109 const C2Handle *const handle,
1110 std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
1111 std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
1112 c2_status_t err = C2_OK;
Wonsik Kima79c5522022-01-18 16:29:24 -08001113 sp<IMapper4> mapper = GetMapper4();
Wonsik Kime2aa2402022-04-08 14:07:14 -07001114 Gralloc4Buffer buffer(handle);
1115 if (!mapper || !buffer) {
Wonsik Kima79c5522022-01-18 16:29:24 -08001116 // Gralloc4 not supported; nothing to do
1117 return err;
1118 }
1119 Error4 mapperErr = Error4::NONE;
1120 if (staticInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001121 ALOGV("Grabbing static HDR info from gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001122 staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
1123 memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
1124 (*staticInfo)->maxCll = 0;
1125 (*staticInfo)->maxFall = 0;
1126 IMapper4::get_cb cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1127 mapperErr = err;
1128 if (err != Error4::NONE) {
1129 return;
1130 }
1131
1132 std::optional<Smpte2086> smpte2086;
1133 gralloc4::decodeSmpte2086(vec, &smpte2086);
1134 if (smpte2086) {
1135 (*staticInfo)->mastering.red.x = smpte2086->primaryRed.x;
1136 (*staticInfo)->mastering.red.y = smpte2086->primaryRed.y;
1137 (*staticInfo)->mastering.green.x = smpte2086->primaryGreen.x;
1138 (*staticInfo)->mastering.green.y = smpte2086->primaryGreen.y;
1139 (*staticInfo)->mastering.blue.x = smpte2086->primaryBlue.x;
1140 (*staticInfo)->mastering.blue.y = smpte2086->primaryBlue.y;
1141 (*staticInfo)->mastering.white.x = smpte2086->whitePoint.x;
1142 (*staticInfo)->mastering.white.y = smpte2086->whitePoint.y;
1143
1144 (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
1145 (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
1146 } else {
1147 mapperErr = Error4::BAD_VALUE;
1148 }
1149 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001150 Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001151 if (!ret.isOk()) {
1152 err = C2_REFUSED;
1153 } else if (mapperErr != Error4::NONE) {
1154 err = C2_CORRUPTED;
1155 }
1156 cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1157 mapperErr = err;
1158 if (err != Error4::NONE) {
1159 return;
1160 }
1161
1162 std::optional<Cta861_3> cta861_3;
1163 gralloc4::decodeCta861_3(vec, &cta861_3);
1164 if (cta861_3) {
1165 (*staticInfo)->maxCll = cta861_3->maxContentLightLevel;
1166 (*staticInfo)->maxFall = cta861_3->maxFrameAverageLightLevel;
1167 } else {
1168 mapperErr = Error4::BAD_VALUE;
1169 }
1170 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001171 ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001172 if (!ret.isOk()) {
1173 err = C2_REFUSED;
1174 } else if (mapperErr != Error4::NONE) {
1175 err = C2_CORRUPTED;
1176 }
1177 }
1178 if (dynamicInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001179 ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001180 dynamicInfo->reset();
1181 IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1182 mapperErr = err;
1183 if (err != Error4::NONE) {
1184 return;
1185 }
1186 if (!dynamicInfo) {
1187 return;
1188 }
1189 *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
1190 vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
1191 memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
1192 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001193 Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001194 if (!ret.isOk() || mapperErr != Error4::NONE) {
1195 dynamicInfo->reset();
1196 }
1197 }
1198
1199 return err;
1200}
1201
Hongguangfc1478a2022-07-20 22:56:06 -07001202c2_status_t SetMetadataToGralloc4Handle(
1203 android_dataspace_t dataSpace,
Wonsik Kima79c5522022-01-18 16:29:24 -08001204 const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
1205 const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
1206 const C2Handle *const handle) {
1207 c2_status_t err = C2_OK;
Wonsik Kima79c5522022-01-18 16:29:24 -08001208 sp<IMapper4> mapper = GetMapper4();
Wonsik Kime2aa2402022-04-08 14:07:14 -07001209 Gralloc4Buffer buffer(handle);
1210 if (!mapper || !buffer) {
Wonsik Kima79c5522022-01-18 16:29:24 -08001211 // Gralloc4 not supported; nothing to do
1212 return err;
1213 }
Hongguangfc1478a2022-07-20 22:56:06 -07001214 {
1215 hidl_vec<uint8_t> metadata;
1216 if (gralloc4::encodeDataspace(static_cast<Dataspace>(dataSpace), &metadata) == OK) {
1217 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Dataspace, metadata);
1218 if (!ret.isOk()) {
1219 err = C2_REFUSED;
1220 } else if (ret != Error4::NONE) {
1221 err = C2_CORRUPTED;
1222 }
1223 }
1224 }
Wonsik Kima79c5522022-01-18 16:29:24 -08001225 if (staticInfo && *staticInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001226 ALOGV("Setting static HDR info as gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001227 std::optional<Smpte2086> smpte2086 = Smpte2086{
1228 {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
1229 {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
1230 {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
1231 {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
1232 staticInfo->mastering.maxLuminance,
1233 staticInfo->mastering.minLuminance,
1234 };
1235 hidl_vec<uint8_t> vec;
Wonsik Kime2aa2402022-04-08 14:07:14 -07001236 if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
1237 && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
1238 && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
1239 && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
1240 && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
1241 && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
1242 && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
1243 && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
1244 && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
1245 && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
1246 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
Wonsik Kima79c5522022-01-18 16:29:24 -08001247 if (!ret.isOk()) {
1248 err = C2_REFUSED;
1249 } else if (ret != Error4::NONE) {
1250 err = C2_CORRUPTED;
1251 }
1252 }
1253 std::optional<Cta861_3> cta861_3 = Cta861_3{
1254 staticInfo->maxCll,
1255 staticInfo->maxFall,
1256 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001257 if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
1258 && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
1259 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
Wonsik Kima79c5522022-01-18 16:29:24 -08001260 if (!ret.isOk()) {
1261 err = C2_REFUSED;
1262 } else if (ret != Error4::NONE) {
1263 err = C2_CORRUPTED;
1264 }
1265 }
1266 }
Wonsik Kime2aa2402022-04-08 14:07:14 -07001267 if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
1268 ALOGV("Setting dynamic HDR info as gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001269 std::optional<IMapper4::MetadataType> metadataType;
1270 switch (dynamicInfo->m.type_) {
1271 case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
1272 // TODO
1273 break;
1274 case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40:
1275 metadataType = MetadataType_Smpte2094_40;
1276 break;
1277 }
Taehwan Kim3f3d1ac2022-05-12 10:40:29 +09001278
Wonsik Kima79c5522022-01-18 16:29:24 -08001279 if (metadataType) {
Taehwan Kim3f3d1ac2022-05-12 10:40:29 +09001280 std::vector<uint8_t> smpte2094_40;
1281 smpte2094_40.resize(dynamicInfo->flexCount());
1282 memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
1283
1284 hidl_vec<uint8_t> vec;
1285 if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
1286 Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
1287 if (!ret.isOk()) {
1288 err = C2_REFUSED;
1289 } else if (ret != Error4::NONE) {
1290 err = C2_CORRUPTED;
1291 }
Wonsik Kima79c5522022-01-18 16:29:24 -08001292 }
1293 } else {
1294 err = C2_BAD_VALUE;
1295 }
1296 }
1297
1298 return err;
1299}
1300
Pawin Vongmasa36653902018-11-15 00:10:25 -08001301} // namespace android