blob: 10e4b79c24f47dfcaa96897677609fae1f4f4ad3 [file] [log] [blame]
Richard Xief2932a02023-10-20 17:37:57 +00001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftDav1dDec"
19#include <android-base/properties.h>
20#include <cutils/properties.h>
21#include <thread>
22
23#include <C2Debug.h>
24#include <C2PlatformSupport.h>
25#include <Codec2BufferUtils.h>
26#include <Codec2CommonUtils.h>
27#include <Codec2Mapper.h>
28#include <SimpleC2Interface.h>
Richard Xief2932a02023-10-20 17:37:57 +000029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32#include "C2SoftDav1dDec.h"
33
Richard Xief2932a02023-10-20 17:37:57 +000034namespace android {
35
Richard Xief2932a02023-10-20 17:37:57 +000036// The number of threads used for the dav1d decoder.
37static const int NUM_THREADS_DAV1D_DEFAULT = 0;
38static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
39
40// codecname set and passed in as a compile flag from Android.bp
41constexpr char COMPONENT_NAME[] = CODECNAME;
42
43constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
44
Harish Mahendrakar98d9a242023-12-19 06:42:48 +000045constexpr uint32_t kOutputDelay = 4;
46
Richard Xief2932a02023-10-20 17:37:57 +000047class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
48 public:
49 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
50 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
51 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
52 noPrivateBuffers();
53 noInputReferences();
54 noOutputReferences();
55 noInputLatency();
56 noTimeStretch();
57
58 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
59 .withConstValue(new C2ComponentAttributesSetting(
60 C2Component::ATTRIB_IS_TEMPORAL))
61 .build());
62
63 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
64 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
65 .withFields({
66 C2F(mSize, width).inRange(2, 4096),
67 C2F(mSize, height).inRange(2, 4096),
68 })
69 .withSetter(SizeSetter)
70 .build());
71
72 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
73 .withDefault(new C2StreamProfileLevelInfo::input(
74 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
75 .withFields({C2F(mProfileLevel, profile)
76 .oneOf({C2Config::PROFILE_AV1_0,
77 C2Config::PROFILE_AV1_1}),
78 C2F(mProfileLevel, level)
79 .oneOf({
80 C2Config::LEVEL_AV1_2,
81 C2Config::LEVEL_AV1_2_1,
82 C2Config::LEVEL_AV1_2_2,
83 C2Config::LEVEL_AV1_2_3,
84 C2Config::LEVEL_AV1_3,
85 C2Config::LEVEL_AV1_3_1,
86 C2Config::LEVEL_AV1_3_2,
87 C2Config::LEVEL_AV1_3_3,
88 C2Config::LEVEL_AV1_4,
89 C2Config::LEVEL_AV1_4_1,
90 C2Config::LEVEL_AV1_4_2,
91 C2Config::LEVEL_AV1_4_3,
92 C2Config::LEVEL_AV1_5,
93 C2Config::LEVEL_AV1_5_1,
94 C2Config::LEVEL_AV1_5_2,
95 C2Config::LEVEL_AV1_5_3,
96 })})
97 .withSetter(ProfileLevelSetter, mSize)
98 .build());
99
100 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
101 addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
102 .withDefault(mHdr10PlusInfoInput)
103 .withFields({
104 C2F(mHdr10PlusInfoInput, m.value).any(),
105 })
106 .withSetter(Hdr10PlusInfoInputSetter)
107 .build());
108
109 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
110 addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
111 .withDefault(mHdr10PlusInfoOutput)
112 .withFields({
113 C2F(mHdr10PlusInfoOutput, m.value).any(),
114 })
115 .withSetter(Hdr10PlusInfoOutputSetter)
116 .build());
117
118 // default static info
119 C2HdrStaticMetadataStruct defaultStaticInfo{};
120 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
121 addParameter(
122 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
123 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
124 .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
125 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
131 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
132 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
133 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
134 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
135 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
136 .withSetter(HdrStaticInfoSetter)
137 .build());
138
139 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
140 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
141 .withFields({
142 C2F(mSize, width).inRange(2, 2048, 2),
143 C2F(mSize, height).inRange(2, 2048, 2),
144 })
145 .withSetter(MaxPictureSizeSetter, mSize)
146 .build());
147
148 addParameter(
149 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
150 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
151 .withFields({
152 C2F(mMaxInputSize, value).any(),
153 })
154 .calculatedAs(MaxInputSizeSetter, mMaxSize)
155 .build());
156
157 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
158 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
159 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
160 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
161
162 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
163 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
164 helper->addStructDescriptors<C2ChromaOffsetStruct>();
165
166 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
167 .withConstValue(defaultColorInfo)
168 .build());
169
170 addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
171 .withDefault(new C2StreamColorAspectsTuning::output(
172 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
173 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
174 .withFields({C2F(mDefaultColorAspects, range)
175 .inRange(C2Color::RANGE_UNSPECIFIED,
176 C2Color::RANGE_OTHER),
177 C2F(mDefaultColorAspects, primaries)
178 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
179 C2Color::PRIMARIES_OTHER),
180 C2F(mDefaultColorAspects, transfer)
181 .inRange(C2Color::TRANSFER_UNSPECIFIED,
182 C2Color::TRANSFER_OTHER),
183 C2F(mDefaultColorAspects, matrix)
184 .inRange(C2Color::MATRIX_UNSPECIFIED,
185 C2Color::MATRIX_OTHER)})
186 .withSetter(DefaultColorAspectsSetter)
187 .build());
188
189 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
190 .withDefault(new C2StreamColorAspectsInfo::input(
191 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
192 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
193 .withFields({C2F(mCodedColorAspects, range)
194 .inRange(C2Color::RANGE_UNSPECIFIED,
195 C2Color::RANGE_OTHER),
196 C2F(mCodedColorAspects, primaries)
197 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
198 C2Color::PRIMARIES_OTHER),
199 C2F(mCodedColorAspects, transfer)
200 .inRange(C2Color::TRANSFER_UNSPECIFIED,
201 C2Color::TRANSFER_OTHER),
202 C2F(mCodedColorAspects, matrix)
203 .inRange(C2Color::MATRIX_UNSPECIFIED,
204 C2Color::MATRIX_OTHER)})
205 .withSetter(CodedColorAspectsSetter)
206 .build());
207
208 addParameter(
209 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
210 .withDefault(new C2StreamColorAspectsInfo::output(
211 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
212 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
213 .withFields(
214 {C2F(mColorAspects, range)
215 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
216 C2F(mColorAspects, primaries)
217 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
218 C2Color::PRIMARIES_OTHER),
219 C2F(mColorAspects, transfer)
220 .inRange(C2Color::TRANSFER_UNSPECIFIED,
221 C2Color::TRANSFER_OTHER),
222 C2F(mColorAspects, matrix)
223 .inRange(C2Color::MATRIX_UNSPECIFIED,
224 C2Color::MATRIX_OTHER)})
225 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
226 .build());
227
228 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
229 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
230 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
231 }
232 // If color format surface isn't added to supported formats, there is no way to know
233 // when the color-format is configured to surface. This is necessary to be able to
234 // choose 10-bit format while decoding 10-bit clips in surface mode.
235 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
236
237 // TODO: support more formats?
238 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
239 .withDefault(new C2StreamPixelFormatInfo::output(
240 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
241 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
242 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
243 .build());
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000244
245 addParameter(
246 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
247 .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
248 .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
249 .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
250 .build());
Richard Xief2932a02023-10-20 17:37:57 +0000251 }
252
253 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
254 C2P<C2StreamPictureSizeInfo::output>& me) {
255 (void)mayBlock;
256 C2R res = C2R::Ok();
257 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
258 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
259 me.set().width = oldMe.v.width;
260 }
261 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
262 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
263 me.set().height = oldMe.v.height;
264 }
265 return res;
266 }
267
268 static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
269 const C2P<C2StreamPictureSizeInfo::output>& size) {
270 (void)mayBlock;
271 // TODO: get max width/height from the size's field helpers vs.
272 // hardcoding
273 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
274 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
275 return C2R::Ok();
276 }
277
278 static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
279 const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
280 (void)mayBlock;
281 // assume compression ratio of 2, but enforce a floor
282 me.set().value =
283 c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
284 kMinInputBufferSize);
285 return C2R::Ok();
286 }
287
288 static C2R DefaultColorAspectsSetter(bool mayBlock,
289 C2P<C2StreamColorAspectsTuning::output>& me) {
290 (void)mayBlock;
291 if (me.v.range > C2Color::RANGE_OTHER) {
292 me.set().range = C2Color::RANGE_OTHER;
293 }
294 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
295 me.set().primaries = C2Color::PRIMARIES_OTHER;
296 }
297 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
298 me.set().transfer = C2Color::TRANSFER_OTHER;
299 }
300 if (me.v.matrix > C2Color::MATRIX_OTHER) {
301 me.set().matrix = C2Color::MATRIX_OTHER;
302 }
303 return C2R::Ok();
304 }
305
306 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
307 (void)mayBlock;
308 if (me.v.range > C2Color::RANGE_OTHER) {
309 me.set().range = C2Color::RANGE_OTHER;
310 }
311 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
312 me.set().primaries = C2Color::PRIMARIES_OTHER;
313 }
314 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
315 me.set().transfer = C2Color::TRANSFER_OTHER;
316 }
317 if (me.v.matrix > C2Color::MATRIX_OTHER) {
318 me.set().matrix = C2Color::MATRIX_OTHER;
319 }
320 return C2R::Ok();
321 }
322
323 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
324 const C2P<C2StreamColorAspectsTuning::output>& def,
325 const C2P<C2StreamColorAspectsInfo::input>& coded) {
326 (void)mayBlock;
327 // take default values for all unspecified fields, and coded values for specified ones
328 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
329 me.set().primaries =
330 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
331 me.set().transfer =
332 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
333 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
334 return C2R::Ok();
335 }
336
337 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
338 const C2P<C2StreamPictureSizeInfo::output>& size) {
339 (void)mayBlock;
340 (void)size;
341 (void)me; // TODO: validate
342 return C2R::Ok();
343 }
344
345 std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
346 return mDefaultColorAspects;
347 }
348
349 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
350
351 static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
352 (void)mayBlock;
353 (void)me; // TODO: validate
354 return C2R::Ok();
355 }
356
357 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
358 (void)mayBlock;
359 (void)me; // TODO: validate
360 return C2R::Ok();
361 }
362
363 // unsafe getters
364 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
365 return mPixelFormat;
366 }
367
368 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
369 (void)mayBlock;
370 if (me.v.mastering.red.x > 1) {
371 me.set().mastering.red.x = 1;
372 }
373 if (me.v.mastering.red.y > 1) {
374 me.set().mastering.red.y = 1;
375 }
376 if (me.v.mastering.green.x > 1) {
377 me.set().mastering.green.x = 1;
378 }
379 if (me.v.mastering.green.y > 1) {
380 me.set().mastering.green.y = 1;
381 }
382 if (me.v.mastering.blue.x > 1) {
383 me.set().mastering.blue.x = 1;
384 }
385 if (me.v.mastering.blue.y > 1) {
386 me.set().mastering.blue.y = 1;
387 }
388 if (me.v.mastering.white.x > 1) {
389 me.set().mastering.white.x = 1;
390 }
391 if (me.v.mastering.white.y > 1) {
392 me.set().mastering.white.y = 1;
393 }
394 if (me.v.mastering.maxLuminance > 65535.0) {
395 me.set().mastering.maxLuminance = 65535.0;
396 }
397 if (me.v.mastering.minLuminance > 6.5535) {
398 me.set().mastering.minLuminance = 6.5535;
399 }
400 if (me.v.maxCll > 65535.0) {
401 me.set().maxCll = 65535.0;
402 }
403 if (me.v.maxFall > 65535.0) {
404 me.set().maxFall = 65535.0;
405 }
406 return C2R::Ok();
407 }
408
409 private:
410 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
411 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
412 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
413 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
414 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
415 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
416 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
417 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
418 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
419 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
420 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
421 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
422};
423
424C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
425 const std::shared_ptr<IntfImpl>& intfImpl)
426 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
427 mIntf(intfImpl) {
428 mTimeStart = mTimeEnd = systemTime();
429}
430
431C2SoftDav1dDec::~C2SoftDav1dDec() {
432 onRelease();
433}
434
435c2_status_t C2SoftDav1dDec::onInit() {
436 return initDecoder() ? C2_OK : C2_CORRUPTED;
437}
438
439c2_status_t C2SoftDav1dDec::onStop() {
440 // TODO: b/277797541 - investigate if the decoder needs to be flushed.
441 mSignalledError = false;
442 mSignalledOutputEos = false;
443 return C2_OK;
444}
445
446void C2SoftDav1dDec::onReset() {
447 (void)onStop();
448 c2_status_t err = onFlush_sm();
449 if (err != C2_OK) {
450 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
451 destroyDecoder();
452 if (!initDecoder()) {
453 ALOGE("Hard reset failed.");
454 }
455 }
456}
457
458void C2SoftDav1dDec::flushDav1d() {
459 if (mDav1dCtx) {
460 Dav1dPicture p;
461
Richard Xief2932a02023-10-20 17:37:57 +0000462 int res = 0;
463 while (true) {
464 memset(&p, 0, sizeof(p));
465
466 if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
467 if (res != DAV1D_ERR(EAGAIN)) {
468 ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
469 break;
470 } else {
471 res = 0;
472 break;
473 }
474 } else {
475 dav1d_picture_unref(&p);
476 }
477 }
478
479 dav1d_flush(mDav1dCtx);
480 }
481}
482
483void C2SoftDav1dDec::onRelease() {
484 destroyDecoder();
485}
486
487c2_status_t C2SoftDav1dDec::onFlush_sm() {
488 flushDav1d();
489
490 mSignalledError = false;
491 mSignalledOutputEos = false;
492
493 return C2_OK;
494}
495
496static int GetCPUCoreCount() {
497 int cpuCoreCount = 1;
498#if defined(_SC_NPROCESSORS_ONLN)
499 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
500#else
501 // _SC_NPROC_ONLN must be defined...
502 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
503#endif
504 CHECK(cpuCoreCount >= 1);
505 ALOGV("Number of CPU cores: %d", cpuCoreCount);
506 return cpuCoreCount;
507}
508
509bool C2SoftDav1dDec::initDecoder() {
Richard Xief2932a02023-10-20 17:37:57 +0000510#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530511 mC2SoftDav1dDump.initDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000512#endif
513 mSignalledError = false;
514 mSignalledOutputEos = false;
515 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
516 {
517 IntfImpl::Lock lock = mIntf->lock();
518 mPixelFormatInfo = mIntf->getPixelFormat_l();
519 }
520
521 const char* version = dav1d_version();
522
523 Dav1dSettings lib_settings;
524 dav1d_default_settings(&lib_settings);
525 int cpu_count = GetCPUCoreCount();
526 lib_settings.n_threads = std::max(cpu_count / 2, 1); // use up to half the cores by default.
527
528 int32_t numThreads =
529 android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
530 if (numThreads > 0) lib_settings.n_threads = numThreads;
531
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000532 lib_settings.max_frame_delay = kOutputDelay;
533
Richard Xief2932a02023-10-20 17:37:57 +0000534 int res = 0;
535 if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
536 ALOGE("dav1d_open failed. status: %d.", res);
537 return false;
538 } else {
539 ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
540 }
541
542 return true;
543}
544
545void C2SoftDav1dDec::destroyDecoder() {
546 if (mDav1dCtx) {
Richard Xief2932a02023-10-20 17:37:57 +0000547 dav1d_close(&mDav1dCtx);
548 mDav1dCtx = nullptr;
549 mOutputBufferIndex = 0;
550 mInputBufferIndex = 0;
551 }
552#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530553 mC2SoftDav1dDump.destroyDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000554#endif
555}
556
557void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
558 uint32_t flags = 0;
559 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
560 flags |= C2FrameData::FLAG_END_OF_STREAM;
561 ALOGV("signalling end_of_stream.");
562 }
563 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
564 work->worklets.front()->output.buffers.clear();
565 work->worklets.front()->output.ordinal = work->input.ordinal;
566 work->workletsProcessed = 1u;
567}
568
569void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000570 const std::shared_ptr<C2GraphicBlock>& block,
571 const Dav1dPicture &img) {
Richard Xief2932a02023-10-20 17:37:57 +0000572 std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
573 {
574 IntfImpl::Lock lock = mIntf->lock();
575 buffer->setInfo(mIntf->getColorAspects_l());
576 }
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000577
578 auto fillWork = [buffer, index, img, this](const std::unique_ptr<C2Work>& work) {
Richard Xief2932a02023-10-20 17:37:57 +0000579 uint32_t flags = 0;
580 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
581 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
582 flags |= C2FrameData::FLAG_END_OF_STREAM;
583 ALOGV("signalling end_of_stream.");
584 }
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000585 getHDRStaticParams(&img, work);
586 getHDR10PlusInfoData(&img, work);
587
Richard Xief2932a02023-10-20 17:37:57 +0000588 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
589 work->worklets.front()->output.buffers.clear();
590 work->worklets.front()->output.buffers.push_back(buffer);
591 work->worklets.front()->output.ordinal = work->input.ordinal;
592 work->workletsProcessed = 1u;
593 };
594 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
595 fillWork(work);
596 } else {
597 finish(index, fillWork);
598 }
599}
600
601void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
602 const std::shared_ptr<C2BlockPool>& pool) {
603 work->result = C2_OK;
604 work->workletsProcessed = 0u;
605 work->worklets.front()->output.configUpdate.clear();
606 work->worklets.front()->output.flags = work->input.flags;
607 if (mSignalledError || mSignalledOutputEos) {
608 work->result = C2_BAD_VALUE;
609 return;
610 }
611
612 size_t inOffset = 0u;
613 size_t inSize = 0u;
614 C2ReadView rView = mDummyReadView;
615 if (!work->input.buffers.empty()) {
616 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
617 inSize = rView.capacity();
618 if (inSize && rView.error()) {
619 ALOGE("read view map failed %d", rView.error());
620 work->result = C2_CORRUPTED;
621 return;
622 }
623 }
624
625 bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
626 bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
627
628 if (codecConfig) {
629 fillEmptyWork(work);
630 return;
631 }
632
633 int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
634 if (inSize) {
635 mInputBufferIndex = in_frameIndex;
636
637 uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
638
639 mTimeStart = systemTime();
640 nsecs_t delay = mTimeStart - mTimeEnd;
641
642 // Send the bitstream data (inputBuffer) to dav1d.
643 if (mDav1dCtx) {
644 int i_ret = 0;
645
646 Dav1dSequenceHeader seq;
647 int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
648 if (res == 0) {
649 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
650 seq.max_height, (long)in_frameIndex);
651 }
652
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000653 // insert OBU TD if it is not present.
654 // TODO: b/286852962
655 uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
Richard Xief2932a02023-10-20 17:37:57 +0000656 Dav1dData data;
657
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000658 uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
659 : dav1d_data_create(&data, inSize + 2);
660 if (ptr == nullptr) {
661 ALOGE("dav1d_data_create failed!");
Richard Xief2932a02023-10-20 17:37:57 +0000662 i_ret = -1;
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000663
Richard Xief2932a02023-10-20 17:37:57 +0000664 } else {
665 data.m.timestamp = in_frameIndex;
666
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000667 int new_Size;
668 if (obu_type != DAV1D_OBU_TD) {
669 new_Size = (int)(inSize + 2);
670
671 // OBU TD
672 ptr[0] = 0x12;
673 ptr[1] = 0;
674
675 memcpy(ptr + 2, bitstream, inSize);
676 } else {
677 new_Size = (int)(inSize);
678 // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
679 // avoid memcopy operations.
680 memcpy(ptr, bitstream, new_Size);
681 }
682
683 // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
684 // "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
685 // inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
686 // ptr[3], ptr[4]);
Richard Xief2932a02023-10-20 17:37:57 +0000687
688 // Dump the bitstream data (inputBuffer) if dumping is enabled.
689#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530690 mC2SoftDav1dDump.dumpInput(ptr, new_Size);
Richard Xief2932a02023-10-20 17:37:57 +0000691#endif
692
693 bool b_draining = false;
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000694 int res;
Richard Xief2932a02023-10-20 17:37:57 +0000695
696 do {
697 res = dav1d_send_data(mDav1dCtx, &data);
698 if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
699 ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
700 /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
701 * to be recoverable. Other errors returned from this function are
702 * either unexpected, or considered critical failures.
703 */
704 i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
705 break;
706 }
707
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000708 outputBuffer(pool, work);
Richard Xief2932a02023-10-20 17:37:57 +0000709
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000710 } while (res == DAV1D_ERR(EAGAIN));
Richard Xief2932a02023-10-20 17:37:57 +0000711
712 if (data.sz > 0) {
713 ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
714 dav1d_data_unref(&data);
715 }
716 }
717
718 mTimeEnd = systemTime();
719 nsecs_t decodeTime = mTimeEnd - mTimeStart;
720 // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
721
722 if (i_ret != 0) {
723 ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
724 work->result = C2_CORRUPTED;
725 work->workletsProcessed = 1u;
726 mSignalledError = true;
727 return;
728 }
729 }
730 }
731
Richard Xief2932a02023-10-20 17:37:57 +0000732 if (end_of_stream) {
733 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
734 mSignalledOutputEos = true;
735 } else if (!inSize) {
736 fillEmptyWork(work);
737 }
738}
739
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000740void C2SoftDav1dDec::getHDRStaticParams(const Dav1dPicture* picture,
Richard Xief2932a02023-10-20 17:37:57 +0000741 const std::unique_ptr<C2Work>& work) {
742 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
743 bool infoPresent = false;
744
745 if (picture != nullptr) {
746 if (picture->mastering_display != nullptr) {
747 hdrStaticMetadataInfo.mastering.red.x =
748 picture->mastering_display->primaries[0][0] / 65536.0;
749 hdrStaticMetadataInfo.mastering.red.y =
750 picture->mastering_display->primaries[0][1] / 65536.0;
751
752 hdrStaticMetadataInfo.mastering.green.x =
753 picture->mastering_display->primaries[1][0] / 65536.0;
754 hdrStaticMetadataInfo.mastering.green.y =
755 picture->mastering_display->primaries[1][1] / 65536.0;
756
757 hdrStaticMetadataInfo.mastering.blue.x =
758 picture->mastering_display->primaries[2][0] / 65536.0;
759 hdrStaticMetadataInfo.mastering.blue.y =
760 picture->mastering_display->primaries[2][1] / 65536.0;
761
762 hdrStaticMetadataInfo.mastering.white.x =
763 picture->mastering_display->white_point[0] / 65536.0;
764 hdrStaticMetadataInfo.mastering.white.y =
765 picture->mastering_display->white_point[1] / 65536.0;
766
767 hdrStaticMetadataInfo.mastering.maxLuminance =
768 picture->mastering_display->max_luminance / 256.0;
769 hdrStaticMetadataInfo.mastering.minLuminance =
770 picture->mastering_display->min_luminance / 16384.0;
771
772 infoPresent = true;
773 }
774
775 if (picture->content_light != nullptr) {
776 hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
777 hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
778 infoPresent = true;
779 }
780 }
781
782 // if (infoPresent) {
783 // ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
784 // mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
785 // mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
786 // hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
787 // hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
788 // hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
789 // hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
790 // hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
791 // hdrStaticMetadataInfo.maxCll,
792 // hdrStaticMetadataInfo.maxFall,
793 // mOutputBufferIndex);
794 // }
795
796 // config if static info has changed
797 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
798 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
799 work->worklets.front()->output.configUpdate.push_back(
800 C2Param::Copy(mHdrStaticMetadataInfo));
801 }
802}
803
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000804void C2SoftDav1dDec::getHDR10PlusInfoData(const Dav1dPicture* picture,
Richard Xief2932a02023-10-20 17:37:57 +0000805 const std::unique_ptr<C2Work>& work) {
806 if (picture != nullptr) {
807 if (picture->itut_t35 != nullptr) {
808 std::vector<uint8_t> payload;
809 size_t payloadSize = picture->itut_t35->payload_size;
810 if (payloadSize > 0) {
811 payload.push_back(picture->itut_t35->country_code);
812 if (picture->itut_t35->country_code == 0xFF) {
813 payload.push_back(picture->itut_t35->country_code_extension_byte);
814 }
815 payload.insert(payload.end(), picture->itut_t35->payload,
816 picture->itut_t35->payload + picture->itut_t35->payload_size);
817 }
818
819 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
820 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
821 if (!hdr10PlusInfo) {
822 ALOGE("Hdr10PlusInfo allocation failed");
823 mSignalledError = true;
824 work->result = C2_NO_MEMORY;
825 return;
826 }
827 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
828
829 // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
830 // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
831 // picture->itut_t35->payload_size,
832 // picture->itut_t35->country_code,
833 // mOutputBufferIndex);
834
835 // config if hdr10Plus info has changed
836 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
837 mHdr10PlusInfo = std::move(hdr10PlusInfo);
838 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
839 }
840 }
841 }
842}
843
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000844void C2SoftDav1dDec::getVuiParams(const Dav1dPicture* picture) {
Richard Xief2932a02023-10-20 17:37:57 +0000845 VuiColorAspects vuiColorAspects;
846
847 if (picture) {
848 vuiColorAspects.primaries = picture->seq_hdr->pri;
849 vuiColorAspects.transfer = picture->seq_hdr->trc;
850 vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
851 vuiColorAspects.fullRange = picture->seq_hdr->color_range;
852
853 // ALOGD("Received a vuiColorAspects from dav1d
854 // (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
855 // at mOutputBufferIndex = % d,
856 // out_frameIndex = % ld.",
857 // vuiColorAspects.primaries,
858 // vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
859 // mOutputBufferIndex, picture->m.timestamp);
860 }
861
862 // convert vui aspects to C2 values if changed
863 if (!(vuiColorAspects == mBitstreamColorAspects)) {
864 mBitstreamColorAspects = vuiColorAspects;
865 ColorAspects sfAspects;
866 C2StreamColorAspectsInfo::input codedAspects = {0u};
867 ColorUtils::convertIsoColorAspectsToCodecAspects(
868 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
869 vuiColorAspects.fullRange, sfAspects);
870 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
871 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
872 }
873 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
874 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
875 }
876 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
877 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
878 }
879 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
880 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
881 }
882 std::vector<std::unique_ptr<C2SettingResult>> failures;
883 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
884 }
885}
886
887void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
888 mSignalledError = true;
889 work->result = error;
890 work->workletsProcessed = 1u;
891}
892
893bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
894 if (size > mTmpFrameBufferSize) {
895 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
896 if (mTmpFrameBuffer == nullptr) {
897 mTmpFrameBufferSize = 0;
898 return false;
899 }
900 mTmpFrameBufferSize = size;
901 }
902 return true;
903}
904
Richard Xief2932a02023-10-20 17:37:57 +0000905bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
906 const std::unique_ptr<C2Work>& work) {
907 if (!(work && pool)) return false;
908 if (mDav1dCtx == nullptr) return false;
909
910 // Get a decoded picture from dav1d if it is enabled.
911 Dav1dPicture img;
912 memset(&img, 0, sizeof(img));
913
914 int res = 0;
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000915 res = dav1d_get_picture(mDav1dCtx, &img);
Richard Xief2932a02023-10-20 17:37:57 +0000916 if (res == DAV1D_ERR(EAGAIN)) {
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000917 ALOGV("Not enough data to output a picture.");
Richard Xief2932a02023-10-20 17:37:57 +0000918 return false;
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000919 } else if (res != 0) {
Richard Xief2932a02023-10-20 17:37:57 +0000920 ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
921 return false;
922 }
923
924 const int width = img.p.w;
925 const int height = img.p.h;
926 if (width != mWidth || height != mHeight) {
927 mWidth = width;
928 mHeight = height;
929
930 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
931 std::vector<std::unique_ptr<C2SettingResult>> failures;
932 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
933 if (err == C2_OK) {
934 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
935 } else {
936 ALOGE("Config update size failed");
937 mSignalledError = true;
938 work->result = C2_CORRUPTED;
939 work->workletsProcessed = 1u;
940 return false;
941 }
942 }
943
944 getVuiParams(&img);
Richard Xief2932a02023-10-20 17:37:57 +0000945
946 // out_frameIndex that the decoded picture returns from dav1d.
947 int64_t out_frameIndex = img.m.timestamp;
948
Richard Xief2932a02023-10-20 17:37:57 +0000949 const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
950
951 int bitdepth = img.p.bpc;
952
953 std::shared_ptr<C2GraphicBlock> block;
954 uint32_t format = HAL_PIXEL_FORMAT_YV12;
955 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
956 if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
957 IntfImpl::Lock lock = mIntf->lock();
958 codedColorAspects = mIntf->getColorAspects_l();
959 bool allowRGBA1010102 = false;
960 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
961 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
962 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
963 allowRGBA1010102 = true;
964 }
965 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
Richard Xief2932a02023-10-20 17:37:57 +0000966 }
967
968 if (mHalPixelFormat != format) {
969 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
970 std::vector<std::unique_ptr<C2SettingResult>> failures;
971 c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
972 if (err == C2_OK) {
973 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
974 } else {
975 ALOGE("Config update pixelFormat failed");
976 mSignalledError = true;
977 work->workletsProcessed = 1u;
978 work->result = C2_CORRUPTED;
979 return UNKNOWN_ERROR;
980 }
981 mHalPixelFormat = format;
982 }
983
984 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
985
986 // We always create a graphic block that is width aligned to 16 and height
987 // aligned to 2. We set the correct "crop" value of the image in the call to
988 // createGraphicBuffer() by setting the correct image dimensions.
989 c2_status_t err =
990 pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
991
992 if (err != C2_OK) {
993 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
994 work->result = err;
995 return false;
996 }
997
998 C2GraphicView wView = block->map().get();
999
1000 if (wView.error()) {
1001 ALOGE("graphic view map failed %d", wView.error());
1002 work->result = C2_CORRUPTED;
1003 return false;
1004 }
1005
1006 // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
1007 // block->height(), mWidth, mHeight, (int)out_frameIndex);
1008
1009 mOutputBufferIndex = out_frameIndex;
1010
1011 uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
1012 uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
1013 uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
1014
1015 C2PlanarLayout layout = wView.layout();
1016 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1017 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1018 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1019
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301020 CONV_FORMAT_T convFormat;
1021 switch (img.p.layout) {
1022 case DAV1D_PIXEL_LAYOUT_I444:
1023 convFormat = CONV_FORMAT_I444;
1024 break;
1025 case DAV1D_PIXEL_LAYOUT_I422:
1026 convFormat = CONV_FORMAT_I422;
1027 break;
1028 default:
1029 convFormat = CONV_FORMAT_I420;
1030 break;
1031 }
1032
Richard Xief2932a02023-10-20 17:37:57 +00001033 if (bitdepth == 10) {
1034 // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
1035 // decompression to avoid color conversion.
1036 const uint16_t* srcY = (const uint16_t*)img.data[0];
1037 const uint16_t* srcU = (const uint16_t*)img.data[1];
1038 const uint16_t* srcV = (const uint16_t*)img.data[2];
1039 size_t srcYStride = img.stride[0] / 2;
1040 size_t srcUStride = img.stride[1] / 2;
1041 size_t srcVStride = img.stride[1] / 2;
1042
1043 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301044 if (isMonochrome) {
1045 const size_t tmpSize = mWidth;
1046 const bool needFill = tmpSize > mTmpFrameBufferSize;
1047 if (!allocTmpFrameBuffer(tmpSize)) {
1048 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1049 setError(work, C2_NO_MEMORY);
1050 return false;
Richard Xief2932a02023-10-20 17:37:57 +00001051 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301052 srcU = srcV = mTmpFrameBuffer.get();
1053 srcUStride = srcVStride = 0;
1054 if (needFill) {
1055 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
1056 }
Richard Xief2932a02023-10-20 17:37:57 +00001057 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301058 convertPlanar16ToY410OrRGBA1010102(
1059 dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1060 dstYStride, mWidth, mHeight,
1061 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
1062 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001063 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1064 dstYStride /= 2;
1065 dstUStride /= 2;
1066 dstVStride /= 2;
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301067 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001068 if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
1069 (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301070 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001071 if (!allocTmpFrameBuffer(tmpSize)) {
1072 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1073 setError(work, C2_NO_MEMORY);
1074 return false;
1075 }
Richard Xief2932a02023-10-20 17:37:57 +00001076 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301077 convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
1078 srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
1079 mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
1080 tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001081 } else {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301082 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001083 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301084 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001085 if (!allocTmpFrameBuffer(tmpSize)) {
1086 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1087 setError(work, C2_NO_MEMORY);
1088 return false;
1089 }
Richard Xief2932a02023-10-20 17:37:57 +00001090 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301091 convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1092 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1093 isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001094 }
1095
Richard Xief2932a02023-10-20 17:37:57 +00001096 // if(mOutputBufferIndex % 100 == 0)
1097 ALOGV("output a 10bit picture %dx%d from dav1d "
1098 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1099 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1100
Suyog Pawar4602c372023-08-17 11:09:23 +05301101 // Dump the output buffer if dumping is enabled (debug only).
1102#ifdef FILE_DUMP_ENABLE
1103 mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1104 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001105#endif
1106 } else {
1107 const uint8_t* srcY = (const uint8_t*)img.data[0];
1108 const uint8_t* srcU = (const uint8_t*)img.data[1];
1109 const uint8_t* srcV = (const uint8_t*)img.data[2];
1110
1111 size_t srcYStride = img.stride[0];
1112 size_t srcUStride = img.stride[1];
1113 size_t srcVStride = img.stride[1];
1114
Richard Xief2932a02023-10-20 17:37:57 +00001115 // if(mOutputBufferIndex % 100 == 0)
1116 ALOGV("output a 8bit picture %dx%d from dav1d "
1117 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1118 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1119
Suyog Pawar4602c372023-08-17 11:09:23 +05301120 // Dump the output buffer is dumping is enabled (debug only)
1121#ifdef FILE_DUMP_ENABLE
1122 mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1123 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001124#endif
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301125 convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1126 dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
1127 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001128 }
1129
Harish Mahendrakar98d9a242023-12-19 06:42:48 +00001130 finishWork(out_frameIndex, work, std::move(block), img);
Richard Xief2932a02023-10-20 17:37:57 +00001131 dav1d_picture_unref(&img);
Richard Xief2932a02023-10-20 17:37:57 +00001132 block = nullptr;
1133 return true;
1134}
1135
1136c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
1137 const std::shared_ptr<C2BlockPool>& pool,
1138 const std::unique_ptr<C2Work>& work) {
1139 if (drainMode == NO_DRAIN) {
1140 ALOGW("drain with NO_DRAIN: no-op");
1141 return C2_OK;
1142 }
1143 if (drainMode == DRAIN_CHAIN) {
1144 ALOGW("DRAIN_CHAIN not supported");
1145 return C2_OMITTED;
1146 }
1147
1148 while (outputBuffer(pool, work)) {
1149 }
1150
1151 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
1152 fillEmptyWork(work);
1153 }
1154
1155 return C2_OK;
1156}
1157
1158c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
1159 return drainInternal(drainMode, pool, nullptr);
1160}
1161
1162class C2SoftDav1dFactory : public C2ComponentFactory {
1163 public:
1164 C2SoftDav1dFactory()
1165 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1166 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1167
1168 virtual c2_status_t createComponent(c2_node_id_t id,
1169 std::shared_ptr<C2Component>* const component,
1170 std::function<void(C2Component*)> deleter) override {
1171 *component = std::shared_ptr<C2Component>(
1172 new C2SoftDav1dDec(COMPONENT_NAME, id,
1173 std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1174 deleter);
1175 return C2_OK;
1176 }
1177
1178 virtual c2_status_t createInterface(
1179 c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
1180 std::function<void(C2ComponentInterface*)> deleter) override {
1181 *interface = std::shared_ptr<C2ComponentInterface>(
1182 new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
1183 COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1184 deleter);
1185 return C2_OK;
1186 }
1187
1188 virtual ~C2SoftDav1dFactory() override = default;
1189
1190 private:
1191 std::shared_ptr<C2ReflectorHelper> mHelper;
1192};
1193
1194} // namespace android
1195
1196__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
1197 ALOGV("in %s", __func__);
1198 return new ::android::C2SoftDav1dFactory();
1199}
1200
1201__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1202 ::C2ComponentFactory* factory) {
1203 ALOGV("in %s", __func__);
1204 delete factory;
1205}