blob: cfa4e46dfab9798a6893be3d499bda48fa0705b6 [file] [log] [blame]
Richard Xief2932a02023-10-20 17:37:57 +00001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftDav1dDec"
19#include <android-base/properties.h>
20#include <cutils/properties.h>
21#include <thread>
22
23#include <C2Debug.h>
24#include <C2PlatformSupport.h>
25#include <Codec2BufferUtils.h>
26#include <Codec2CommonUtils.h>
27#include <Codec2Mapper.h>
28#include <SimpleC2Interface.h>
Richard Xief2932a02023-10-20 17:37:57 +000029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32#include "C2SoftDav1dDec.h"
33
Richard Xief2932a02023-10-20 17:37:57 +000034namespace android {
35
36// Flag to enable dumping the bitsteram and the decoded pictures to files.
37static const bool ENABLE_DUMPING_FILES_DEFAULT = false;
38static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
39
40// The number of frames to dump to a file
41static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
42static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
43
44// The number of threads used for the dav1d decoder.
45static const int NUM_THREADS_DAV1D_DEFAULT = 0;
46static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
47
48// codecname set and passed in as a compile flag from Android.bp
49constexpr char COMPONENT_NAME[] = CODECNAME;
50
51constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
52
53class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
54 public:
55 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
56 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
57 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
58 noPrivateBuffers();
59 noInputReferences();
60 noOutputReferences();
61 noInputLatency();
62 noTimeStretch();
63
64 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
65 .withConstValue(new C2ComponentAttributesSetting(
66 C2Component::ATTRIB_IS_TEMPORAL))
67 .build());
68
69 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
70 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
71 .withFields({
72 C2F(mSize, width).inRange(2, 4096),
73 C2F(mSize, height).inRange(2, 4096),
74 })
75 .withSetter(SizeSetter)
76 .build());
77
78 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
79 .withDefault(new C2StreamProfileLevelInfo::input(
80 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
81 .withFields({C2F(mProfileLevel, profile)
82 .oneOf({C2Config::PROFILE_AV1_0,
83 C2Config::PROFILE_AV1_1}),
84 C2F(mProfileLevel, level)
85 .oneOf({
86 C2Config::LEVEL_AV1_2,
87 C2Config::LEVEL_AV1_2_1,
88 C2Config::LEVEL_AV1_2_2,
89 C2Config::LEVEL_AV1_2_3,
90 C2Config::LEVEL_AV1_3,
91 C2Config::LEVEL_AV1_3_1,
92 C2Config::LEVEL_AV1_3_2,
93 C2Config::LEVEL_AV1_3_3,
94 C2Config::LEVEL_AV1_4,
95 C2Config::LEVEL_AV1_4_1,
96 C2Config::LEVEL_AV1_4_2,
97 C2Config::LEVEL_AV1_4_3,
98 C2Config::LEVEL_AV1_5,
99 C2Config::LEVEL_AV1_5_1,
100 C2Config::LEVEL_AV1_5_2,
101 C2Config::LEVEL_AV1_5_3,
102 })})
103 .withSetter(ProfileLevelSetter, mSize)
104 .build());
105
106 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
107 addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
108 .withDefault(mHdr10PlusInfoInput)
109 .withFields({
110 C2F(mHdr10PlusInfoInput, m.value).any(),
111 })
112 .withSetter(Hdr10PlusInfoInputSetter)
113 .build());
114
115 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
116 addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
117 .withDefault(mHdr10PlusInfoOutput)
118 .withFields({
119 C2F(mHdr10PlusInfoOutput, m.value).any(),
120 })
121 .withSetter(Hdr10PlusInfoOutputSetter)
122 .build());
123
124 // default static info
125 C2HdrStaticMetadataStruct defaultStaticInfo{};
126 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
127 addParameter(
128 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
129 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
130 .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
131 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
132 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
133 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
134 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
135 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
136 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
137 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
138 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
139 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
140 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
141 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
142 .withSetter(HdrStaticInfoSetter)
143 .build());
144
145 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
146 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
147 .withFields({
148 C2F(mSize, width).inRange(2, 2048, 2),
149 C2F(mSize, height).inRange(2, 2048, 2),
150 })
151 .withSetter(MaxPictureSizeSetter, mSize)
152 .build());
153
154 addParameter(
155 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
156 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
157 .withFields({
158 C2F(mMaxInputSize, value).any(),
159 })
160 .calculatedAs(MaxInputSizeSetter, mMaxSize)
161 .build());
162
163 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
164 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
165 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
166 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
167
168 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
169 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
170 helper->addStructDescriptors<C2ChromaOffsetStruct>();
171
172 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
173 .withConstValue(defaultColorInfo)
174 .build());
175
176 addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
177 .withDefault(new C2StreamColorAspectsTuning::output(
178 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
179 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
180 .withFields({C2F(mDefaultColorAspects, range)
181 .inRange(C2Color::RANGE_UNSPECIFIED,
182 C2Color::RANGE_OTHER),
183 C2F(mDefaultColorAspects, primaries)
184 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
185 C2Color::PRIMARIES_OTHER),
186 C2F(mDefaultColorAspects, transfer)
187 .inRange(C2Color::TRANSFER_UNSPECIFIED,
188 C2Color::TRANSFER_OTHER),
189 C2F(mDefaultColorAspects, matrix)
190 .inRange(C2Color::MATRIX_UNSPECIFIED,
191 C2Color::MATRIX_OTHER)})
192 .withSetter(DefaultColorAspectsSetter)
193 .build());
194
195 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
196 .withDefault(new C2StreamColorAspectsInfo::input(
197 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
198 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
199 .withFields({C2F(mCodedColorAspects, range)
200 .inRange(C2Color::RANGE_UNSPECIFIED,
201 C2Color::RANGE_OTHER),
202 C2F(mCodedColorAspects, primaries)
203 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
204 C2Color::PRIMARIES_OTHER),
205 C2F(mCodedColorAspects, transfer)
206 .inRange(C2Color::TRANSFER_UNSPECIFIED,
207 C2Color::TRANSFER_OTHER),
208 C2F(mCodedColorAspects, matrix)
209 .inRange(C2Color::MATRIX_UNSPECIFIED,
210 C2Color::MATRIX_OTHER)})
211 .withSetter(CodedColorAspectsSetter)
212 .build());
213
214 addParameter(
215 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
216 .withDefault(new C2StreamColorAspectsInfo::output(
217 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
218 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
219 .withFields(
220 {C2F(mColorAspects, range)
221 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
222 C2F(mColorAspects, primaries)
223 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
224 C2Color::PRIMARIES_OTHER),
225 C2F(mColorAspects, transfer)
226 .inRange(C2Color::TRANSFER_UNSPECIFIED,
227 C2Color::TRANSFER_OTHER),
228 C2F(mColorAspects, matrix)
229 .inRange(C2Color::MATRIX_UNSPECIFIED,
230 C2Color::MATRIX_OTHER)})
231 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
232 .build());
233
234 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
235 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
236 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
237 }
238 // If color format surface isn't added to supported formats, there is no way to know
239 // when the color-format is configured to surface. This is necessary to be able to
240 // choose 10-bit format while decoding 10-bit clips in surface mode.
241 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
242
243 // TODO: support more formats?
244 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
245 .withDefault(new C2StreamPixelFormatInfo::output(
246 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
247 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
248 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
249 .build());
250 }
251
252 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
253 C2P<C2StreamPictureSizeInfo::output>& me) {
254 (void)mayBlock;
255 C2R res = C2R::Ok();
256 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
257 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
258 me.set().width = oldMe.v.width;
259 }
260 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
261 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
262 me.set().height = oldMe.v.height;
263 }
264 return res;
265 }
266
267 static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
268 const C2P<C2StreamPictureSizeInfo::output>& size) {
269 (void)mayBlock;
270 // TODO: get max width/height from the size's field helpers vs.
271 // hardcoding
272 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
273 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
274 return C2R::Ok();
275 }
276
277 static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
278 const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
279 (void)mayBlock;
280 // assume compression ratio of 2, but enforce a floor
281 me.set().value =
282 c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
283 kMinInputBufferSize);
284 return C2R::Ok();
285 }
286
287 static C2R DefaultColorAspectsSetter(bool mayBlock,
288 C2P<C2StreamColorAspectsTuning::output>& me) {
289 (void)mayBlock;
290 if (me.v.range > C2Color::RANGE_OTHER) {
291 me.set().range = C2Color::RANGE_OTHER;
292 }
293 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
294 me.set().primaries = C2Color::PRIMARIES_OTHER;
295 }
296 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
297 me.set().transfer = C2Color::TRANSFER_OTHER;
298 }
299 if (me.v.matrix > C2Color::MATRIX_OTHER) {
300 me.set().matrix = C2Color::MATRIX_OTHER;
301 }
302 return C2R::Ok();
303 }
304
305 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
306 (void)mayBlock;
307 if (me.v.range > C2Color::RANGE_OTHER) {
308 me.set().range = C2Color::RANGE_OTHER;
309 }
310 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
311 me.set().primaries = C2Color::PRIMARIES_OTHER;
312 }
313 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
314 me.set().transfer = C2Color::TRANSFER_OTHER;
315 }
316 if (me.v.matrix > C2Color::MATRIX_OTHER) {
317 me.set().matrix = C2Color::MATRIX_OTHER;
318 }
319 return C2R::Ok();
320 }
321
322 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
323 const C2P<C2StreamColorAspectsTuning::output>& def,
324 const C2P<C2StreamColorAspectsInfo::input>& coded) {
325 (void)mayBlock;
326 // take default values for all unspecified fields, and coded values for specified ones
327 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
328 me.set().primaries =
329 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
330 me.set().transfer =
331 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
332 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
333 return C2R::Ok();
334 }
335
336 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
337 const C2P<C2StreamPictureSizeInfo::output>& size) {
338 (void)mayBlock;
339 (void)size;
340 (void)me; // TODO: validate
341 return C2R::Ok();
342 }
343
344 std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
345 return mDefaultColorAspects;
346 }
347
348 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
349
350 static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
351 (void)mayBlock;
352 (void)me; // TODO: validate
353 return C2R::Ok();
354 }
355
356 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
357 (void)mayBlock;
358 (void)me; // TODO: validate
359 return C2R::Ok();
360 }
361
362 // unsafe getters
363 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
364 return mPixelFormat;
365 }
366
367 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
368 (void)mayBlock;
369 if (me.v.mastering.red.x > 1) {
370 me.set().mastering.red.x = 1;
371 }
372 if (me.v.mastering.red.y > 1) {
373 me.set().mastering.red.y = 1;
374 }
375 if (me.v.mastering.green.x > 1) {
376 me.set().mastering.green.x = 1;
377 }
378 if (me.v.mastering.green.y > 1) {
379 me.set().mastering.green.y = 1;
380 }
381 if (me.v.mastering.blue.x > 1) {
382 me.set().mastering.blue.x = 1;
383 }
384 if (me.v.mastering.blue.y > 1) {
385 me.set().mastering.blue.y = 1;
386 }
387 if (me.v.mastering.white.x > 1) {
388 me.set().mastering.white.x = 1;
389 }
390 if (me.v.mastering.white.y > 1) {
391 me.set().mastering.white.y = 1;
392 }
393 if (me.v.mastering.maxLuminance > 65535.0) {
394 me.set().mastering.maxLuminance = 65535.0;
395 }
396 if (me.v.mastering.minLuminance > 6.5535) {
397 me.set().mastering.minLuminance = 6.5535;
398 }
399 if (me.v.maxCll > 65535.0) {
400 me.set().maxCll = 65535.0;
401 }
402 if (me.v.maxFall > 65535.0) {
403 me.set().maxFall = 65535.0;
404 }
405 return C2R::Ok();
406 }
407
408 private:
409 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
410 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
411 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
412 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
413 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
414 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
415 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
416 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
417 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
418 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
419 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
420 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
421};
422
423C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
424 const std::shared_ptr<IntfImpl>& intfImpl)
425 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
426 mIntf(intfImpl) {
427 mTimeStart = mTimeEnd = systemTime();
428}
429
430C2SoftDav1dDec::~C2SoftDav1dDec() {
431 onRelease();
432}
433
434c2_status_t C2SoftDav1dDec::onInit() {
435 return initDecoder() ? C2_OK : C2_CORRUPTED;
436}
437
438c2_status_t C2SoftDav1dDec::onStop() {
439 // TODO: b/277797541 - investigate if the decoder needs to be flushed.
440 mSignalledError = false;
441 mSignalledOutputEos = false;
442 return C2_OK;
443}
444
445void C2SoftDav1dDec::onReset() {
446 (void)onStop();
447 c2_status_t err = onFlush_sm();
448 if (err != C2_OK) {
449 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
450 destroyDecoder();
451 if (!initDecoder()) {
452 ALOGE("Hard reset failed.");
453 }
454 }
455}
456
457void C2SoftDav1dDec::flushDav1d() {
458 if (mDav1dCtx) {
459 Dav1dPicture p;
460
461 while (mDecodedPictures.size() > 0) {
462 p = mDecodedPictures.front();
463 mDecodedPictures.pop_front();
464
465 dav1d_picture_unref(&p);
466 }
467
468 int res = 0;
469 while (true) {
470 memset(&p, 0, sizeof(p));
471
472 if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
473 if (res != DAV1D_ERR(EAGAIN)) {
474 ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
475 break;
476 } else {
477 res = 0;
478 break;
479 }
480 } else {
481 dav1d_picture_unref(&p);
482 }
483 }
484
485 dav1d_flush(mDav1dCtx);
486 }
487}
488
489void C2SoftDav1dDec::onRelease() {
490 destroyDecoder();
491}
492
493c2_status_t C2SoftDav1dDec::onFlush_sm() {
494 flushDav1d();
495
496 mSignalledError = false;
497 mSignalledOutputEos = false;
498
499 return C2_OK;
500}
501
502static int GetCPUCoreCount() {
503 int cpuCoreCount = 1;
504#if defined(_SC_NPROCESSORS_ONLN)
505 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
506#else
507 // _SC_NPROC_ONLN must be defined...
508 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
509#endif
510 CHECK(cpuCoreCount >= 1);
511 ALOGV("Number of CPU cores: %d", cpuCoreCount);
512 return cpuCoreCount;
513}
514
515bool C2SoftDav1dDec::initDecoder() {
516 nsecs_t now = systemTime();
517#ifdef FILE_DUMP_ENABLE
518 snprintf(mInDataFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_DATA_DUMP_EXT);
519 snprintf(mInSizeFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_SIZE_DUMP_EXT);
520 snprintf(mDav1dOutYuvFileName, 256, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
521 OUTPUT_YUV_DUMP_EXT);
522
523 bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
524 ENABLE_DUMPING_FILES_DEFAULT);
525
526 num_frames_to_dump =
527 android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
528
529 if (enableDumping) {
530 ALOGD("enableDumping = %d, num_frames_to_dump = %d", enableDumping, num_frames_to_dump);
531
532 mInDataFile = fopen(mInDataFileName, "wb");
533 if (mInDataFile == nullptr) {
534 ALOGD("Could not open file %s", mInDataFileName);
535 }
536
537 mInSizeFile = fopen(mInSizeFileName, "wb");
538 if (mInSizeFile == nullptr) {
539 ALOGD("Could not open file %s", mInSizeFileName);
540 }
541
542 mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
543 if (mDav1dOutYuvFile == nullptr) {
544 ALOGD("Could not open file %s", mDav1dOutYuvFileName);
545 }
546 }
547#endif
548 mSignalledError = false;
549 mSignalledOutputEos = false;
550 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
551 {
552 IntfImpl::Lock lock = mIntf->lock();
553 mPixelFormatInfo = mIntf->getPixelFormat_l();
554 }
555
556 const char* version = dav1d_version();
557
558 Dav1dSettings lib_settings;
559 dav1d_default_settings(&lib_settings);
560 int cpu_count = GetCPUCoreCount();
561 lib_settings.n_threads = std::max(cpu_count / 2, 1); // use up to half the cores by default.
562
563 int32_t numThreads =
564 android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
565 if (numThreads > 0) lib_settings.n_threads = numThreads;
566
567 int res = 0;
568 if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
569 ALOGE("dav1d_open failed. status: %d.", res);
570 return false;
571 } else {
572 ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
573 }
574
575 return true;
576}
577
578void C2SoftDav1dDec::destroyDecoder() {
579 if (mDav1dCtx) {
580 Dav1dPicture p;
581 while (mDecodedPictures.size() > 0) {
582 memset(&p, 0, sizeof(p));
583 p = mDecodedPictures.front();
584 mDecodedPictures.pop_front();
585
586 dav1d_picture_unref(&p);
587 }
588
589 dav1d_close(&mDav1dCtx);
590 mDav1dCtx = nullptr;
591 mOutputBufferIndex = 0;
592 mInputBufferIndex = 0;
593 }
594#ifdef FILE_DUMP_ENABLE
595 if (mInDataFile != nullptr) {
596 fclose(mInDataFile);
597 mInDataFile = nullptr;
598 }
599
600 if (mInSizeFile != nullptr) {
601 fclose(mInSizeFile);
602 mInSizeFile = nullptr;
603 }
604
605 if (mDav1dOutYuvFile != nullptr) {
606 fclose(mDav1dOutYuvFile);
607 mDav1dOutYuvFile = nullptr;
608 }
609#endif
610}
611
612void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
613 uint32_t flags = 0;
614 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
615 flags |= C2FrameData::FLAG_END_OF_STREAM;
616 ALOGV("signalling end_of_stream.");
617 }
618 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
619 work->worklets.front()->output.buffers.clear();
620 work->worklets.front()->output.ordinal = work->input.ordinal;
621 work->workletsProcessed = 1u;
622}
623
624void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
625 const std::shared_ptr<C2GraphicBlock>& block) {
626 std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
627 {
628 IntfImpl::Lock lock = mIntf->lock();
629 buffer->setInfo(mIntf->getColorAspects_l());
630 }
631 auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
632 uint32_t flags = 0;
633 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
634 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
635 flags |= C2FrameData::FLAG_END_OF_STREAM;
636 ALOGV("signalling end_of_stream.");
637 }
638 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
639 work->worklets.front()->output.buffers.clear();
640 work->worklets.front()->output.buffers.push_back(buffer);
641 work->worklets.front()->output.ordinal = work->input.ordinal;
642 work->workletsProcessed = 1u;
643 };
644 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
645 fillWork(work);
646 } else {
647 finish(index, fillWork);
648 }
649}
650
651void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
652 const std::shared_ptr<C2BlockPool>& pool) {
653 work->result = C2_OK;
654 work->workletsProcessed = 0u;
655 work->worklets.front()->output.configUpdate.clear();
656 work->worklets.front()->output.flags = work->input.flags;
657 if (mSignalledError || mSignalledOutputEos) {
658 work->result = C2_BAD_VALUE;
659 return;
660 }
661
662 size_t inOffset = 0u;
663 size_t inSize = 0u;
664 C2ReadView rView = mDummyReadView;
665 if (!work->input.buffers.empty()) {
666 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
667 inSize = rView.capacity();
668 if (inSize && rView.error()) {
669 ALOGE("read view map failed %d", rView.error());
670 work->result = C2_CORRUPTED;
671 return;
672 }
673 }
674
675 bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
676 bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
677
678 if (codecConfig) {
679 fillEmptyWork(work);
680 return;
681 }
682
683 int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
684 if (inSize) {
685 mInputBufferIndex = in_frameIndex;
686
687 uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
688
689 mTimeStart = systemTime();
690 nsecs_t delay = mTimeStart - mTimeEnd;
691
692 // Send the bitstream data (inputBuffer) to dav1d.
693 if (mDav1dCtx) {
694 int i_ret = 0;
695
696 Dav1dSequenceHeader seq;
697 int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
698 if (res == 0) {
699 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
700 seq.max_height, (long)in_frameIndex);
701 }
702
703 // insert OBU TD if it is not present.
704 // TODO: b/286852962
705 uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
706 Dav1dData data;
707
708 uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
709 : dav1d_data_create(&data, inSize + 2);
710 if (ptr == nullptr) {
711 ALOGE("dav1d_data_create failed!");
712 i_ret = -1;
713
714 } else {
715 data.m.timestamp = in_frameIndex;
716
717 int new_Size;
718 if (obu_type != DAV1D_OBU_TD) {
719 new_Size = (int)(inSize + 2);
720
721 // OBU TD
722 ptr[0] = 0x12;
723 ptr[1] = 0;
724
725 memcpy(ptr + 2, bitstream, inSize);
726 } else {
727 new_Size = (int)(inSize);
728 // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
729 // avoid memcopy operations.
730 memcpy(ptr, bitstream, new_Size);
731 }
732
733 // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
734 // "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
735 // inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
736 // ptr[3], ptr[4]);
737
738 // Dump the bitstream data (inputBuffer) if dumping is enabled.
739#ifdef FILE_DUMP_ENABLE
740 if (mInDataFile) {
741 int ret = fwrite(ptr, 1, new_Size, mInDataFile);
742
743 if (ret != new_Size) {
744 ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName,
745 new_Size, ret);
746 }
747 }
748
749 // Dump the size per inputBuffer if dumping is enabled.
750 if (mInSizeFile) {
751 int ret = fwrite(&new_Size, 1, 4, mInSizeFile);
752
753 if (ret != 4) {
754 ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4,
755 ret);
756 }
757 }
758#endif
759
760 bool b_draining = false;
761 int res;
762
763 do {
764 res = dav1d_send_data(mDav1dCtx, &data);
765 if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
766 ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
767 /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
768 * to be recoverable. Other errors returned from this function are
769 * either unexpected, or considered critical failures.
770 */
771 i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
772 break;
773 }
774
775 bool b_output_error = false;
776
777 do {
778 Dav1dPicture img;
779 memset(&img, 0, sizeof(img));
780
781 res = dav1d_get_picture(mDav1dCtx, &img);
782 if (res == 0) {
783 mDecodedPictures.push_back(img);
784
785 if (!end_of_stream) break;
786 } else if (res == DAV1D_ERR(EAGAIN)) {
787 /* the decoder needs more data to be able to output something.
788 * if there is more data pending, continue the loop below or
789 * otherwise break */
790 if (data.sz != 0) res = 0;
791 break;
792 } else {
793 ALOGE("warning! Decoder error %d!", res);
794 b_output_error = true;
795 break;
796 }
797 } while (res == 0);
798
799 if (b_output_error) break;
800
801 /* on drain, we must ignore the 1st EAGAIN */
802 if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
803 (end_of_stream)) {
804 b_draining = true;
805 res = 0;
806 }
807 } while (res == 0 && ((data.sz != 0) || b_draining));
808
809 if (data.sz > 0) {
810 ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
811 dav1d_data_unref(&data);
812 }
813 }
814
815 mTimeEnd = systemTime();
816 nsecs_t decodeTime = mTimeEnd - mTimeStart;
817 // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
818
819 if (i_ret != 0) {
820 ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
821 work->result = C2_CORRUPTED;
822 work->workletsProcessed = 1u;
823 mSignalledError = true;
824 return;
825 }
826 }
827 }
828
829 (void)outputBuffer(pool, work);
830
831 if (end_of_stream) {
832 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
833 mSignalledOutputEos = true;
834 } else if (!inSize) {
835 fillEmptyWork(work);
836 }
837}
838
839void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
840 const std::unique_ptr<C2Work>& work) {
841 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
842 bool infoPresent = false;
843
844 if (picture != nullptr) {
845 if (picture->mastering_display != nullptr) {
846 hdrStaticMetadataInfo.mastering.red.x =
847 picture->mastering_display->primaries[0][0] / 65536.0;
848 hdrStaticMetadataInfo.mastering.red.y =
849 picture->mastering_display->primaries[0][1] / 65536.0;
850
851 hdrStaticMetadataInfo.mastering.green.x =
852 picture->mastering_display->primaries[1][0] / 65536.0;
853 hdrStaticMetadataInfo.mastering.green.y =
854 picture->mastering_display->primaries[1][1] / 65536.0;
855
856 hdrStaticMetadataInfo.mastering.blue.x =
857 picture->mastering_display->primaries[2][0] / 65536.0;
858 hdrStaticMetadataInfo.mastering.blue.y =
859 picture->mastering_display->primaries[2][1] / 65536.0;
860
861 hdrStaticMetadataInfo.mastering.white.x =
862 picture->mastering_display->white_point[0] / 65536.0;
863 hdrStaticMetadataInfo.mastering.white.y =
864 picture->mastering_display->white_point[1] / 65536.0;
865
866 hdrStaticMetadataInfo.mastering.maxLuminance =
867 picture->mastering_display->max_luminance / 256.0;
868 hdrStaticMetadataInfo.mastering.minLuminance =
869 picture->mastering_display->min_luminance / 16384.0;
870
871 infoPresent = true;
872 }
873
874 if (picture->content_light != nullptr) {
875 hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
876 hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
877 infoPresent = true;
878 }
879 }
880
881 // if (infoPresent) {
882 // ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
883 // mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
884 // mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
885 // hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
886 // hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
887 // hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
888 // hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
889 // hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
890 // hdrStaticMetadataInfo.maxCll,
891 // hdrStaticMetadataInfo.maxFall,
892 // mOutputBufferIndex);
893 // }
894
895 // config if static info has changed
896 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
897 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
898 work->worklets.front()->output.configUpdate.push_back(
899 C2Param::Copy(mHdrStaticMetadataInfo));
900 }
901}
902
903void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
904 const std::unique_ptr<C2Work>& work) {
905 if (picture != nullptr) {
906 if (picture->itut_t35 != nullptr) {
907 std::vector<uint8_t> payload;
908 size_t payloadSize = picture->itut_t35->payload_size;
909 if (payloadSize > 0) {
910 payload.push_back(picture->itut_t35->country_code);
911 if (picture->itut_t35->country_code == 0xFF) {
912 payload.push_back(picture->itut_t35->country_code_extension_byte);
913 }
914 payload.insert(payload.end(), picture->itut_t35->payload,
915 picture->itut_t35->payload + picture->itut_t35->payload_size);
916 }
917
918 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
919 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
920 if (!hdr10PlusInfo) {
921 ALOGE("Hdr10PlusInfo allocation failed");
922 mSignalledError = true;
923 work->result = C2_NO_MEMORY;
924 return;
925 }
926 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
927
928 // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
929 // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
930 // picture->itut_t35->payload_size,
931 // picture->itut_t35->country_code,
932 // mOutputBufferIndex);
933
934 // config if hdr10Plus info has changed
935 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
936 mHdr10PlusInfo = std::move(hdr10PlusInfo);
937 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
938 }
939 }
940 }
941}
942
943void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
944 VuiColorAspects vuiColorAspects;
945
946 if (picture) {
947 vuiColorAspects.primaries = picture->seq_hdr->pri;
948 vuiColorAspects.transfer = picture->seq_hdr->trc;
949 vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
950 vuiColorAspects.fullRange = picture->seq_hdr->color_range;
951
952 // ALOGD("Received a vuiColorAspects from dav1d
953 // (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
954 // at mOutputBufferIndex = % d,
955 // out_frameIndex = % ld.",
956 // vuiColorAspects.primaries,
957 // vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
958 // mOutputBufferIndex, picture->m.timestamp);
959 }
960
961 // convert vui aspects to C2 values if changed
962 if (!(vuiColorAspects == mBitstreamColorAspects)) {
963 mBitstreamColorAspects = vuiColorAspects;
964 ColorAspects sfAspects;
965 C2StreamColorAspectsInfo::input codedAspects = {0u};
966 ColorUtils::convertIsoColorAspectsToCodecAspects(
967 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
968 vuiColorAspects.fullRange, sfAspects);
969 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
970 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
971 }
972 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
973 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
974 }
975 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
976 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
977 }
978 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
979 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
980 }
981 std::vector<std::unique_ptr<C2SettingResult>> failures;
982 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
983 }
984}
985
986void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
987 mSignalledError = true;
988 work->result = error;
989 work->workletsProcessed = 1u;
990}
991
992bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
993 if (size > mTmpFrameBufferSize) {
994 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
995 if (mTmpFrameBuffer == nullptr) {
996 mTmpFrameBufferSize = 0;
997 return false;
998 }
999 mTmpFrameBufferSize = size;
1000 }
1001 return true;
1002}
1003
1004#ifdef FILE_DUMP_ENABLE
1005void C2SoftDav1dDec::writeDav1dOutYuvFile(const Dav1dPicture& p) {
1006 if (mDav1dOutYuvFile != NULL) {
1007 uint8_t* ptr;
1008 const int hbd = p.p.bpc > 8;
1009
1010 ptr = (uint8_t*)p.data[0];
1011 for (int y = 0; y < p.p.h; y++) {
1012 int iSize = p.p.w << hbd;
1013 int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
1014 if (ret != iSize) {
1015 ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
1016 ret);
1017 break;
1018 }
1019
1020 ptr += p.stride[0];
1021 }
1022
1023 if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
1024 // u/v
1025 const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
1026 const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
1027 const int cw = (p.p.w + ss_hor) >> ss_hor;
1028 const int ch = (p.p.h + ss_ver) >> ss_ver;
1029 for (int pl = 1; pl <= 2; pl++) {
1030 ptr = (uint8_t*)p.data[pl];
1031 for (int y = 0; y < ch; y++) {
1032 int iSize = cw << hbd;
1033 int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
1034 if (ret != iSize) {
1035 ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
1036 iSize, ret);
1037 break;
1038 }
1039 ptr += p.stride[1];
1040 }
1041 }
1042 }
1043 }
1044}
1045#endif
1046
1047bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
1048 const std::unique_ptr<C2Work>& work) {
1049 if (!(work && pool)) return false;
1050 if (mDav1dCtx == nullptr) return false;
1051
1052 // Get a decoded picture from dav1d if it is enabled.
1053 Dav1dPicture img;
1054 memset(&img, 0, sizeof(img));
1055
1056 int res = 0;
1057 if (mDecodedPictures.size() > 0) {
1058 img = mDecodedPictures.front();
1059 mDecodedPictures.pop_front();
1060 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
1061 // outputBuffer.",img.m.timestamp,img.m.timestamp);
1062 } else {
1063 res = dav1d_get_picture(mDav1dCtx, &img);
1064 if (res == 0) {
1065 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
1066 // outputBuffer.",img.m.timestamp,img.m.timestamp);
1067 } else {
1068 ALOGE("failed to get a picture from dav1d for outputBuffer.");
1069 }
1070 }
1071
1072 if (res == DAV1D_ERR(EAGAIN)) {
1073 ALOGD("Not enough data to output a picture.");
1074 return false;
1075 }
1076 if (res != 0) {
1077 ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
1078 return false;
1079 }
1080
1081 const int width = img.p.w;
1082 const int height = img.p.h;
1083 if (width != mWidth || height != mHeight) {
1084 mWidth = width;
1085 mHeight = height;
1086
1087 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
1088 std::vector<std::unique_ptr<C2SettingResult>> failures;
1089 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
1090 if (err == C2_OK) {
1091 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
1092 } else {
1093 ALOGE("Config update size failed");
1094 mSignalledError = true;
1095 work->result = C2_CORRUPTED;
1096 work->workletsProcessed = 1u;
1097 return false;
1098 }
1099 }
1100
1101 getVuiParams(&img);
1102 getHDRStaticParams(&img, work);
1103 getHDR10PlusInfoData(&img, work);
1104
1105 // out_frameIndex that the decoded picture returns from dav1d.
1106 int64_t out_frameIndex = img.m.timestamp;
1107
Richard Xief2932a02023-10-20 17:37:57 +00001108 const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
1109
1110 int bitdepth = img.p.bpc;
1111
1112 std::shared_ptr<C2GraphicBlock> block;
1113 uint32_t format = HAL_PIXEL_FORMAT_YV12;
1114 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
1115 if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
1116 IntfImpl::Lock lock = mIntf->lock();
1117 codedColorAspects = mIntf->getColorAspects_l();
1118 bool allowRGBA1010102 = false;
1119 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
1120 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
1121 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
1122 allowRGBA1010102 = true;
1123 }
1124 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
Richard Xief2932a02023-10-20 17:37:57 +00001125 }
1126
1127 if (mHalPixelFormat != format) {
1128 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
1129 std::vector<std::unique_ptr<C2SettingResult>> failures;
1130 c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
1131 if (err == C2_OK) {
1132 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
1133 } else {
1134 ALOGE("Config update pixelFormat failed");
1135 mSignalledError = true;
1136 work->workletsProcessed = 1u;
1137 work->result = C2_CORRUPTED;
1138 return UNKNOWN_ERROR;
1139 }
1140 mHalPixelFormat = format;
1141 }
1142
1143 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
1144
1145 // We always create a graphic block that is width aligned to 16 and height
1146 // aligned to 2. We set the correct "crop" value of the image in the call to
1147 // createGraphicBuffer() by setting the correct image dimensions.
1148 c2_status_t err =
1149 pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
1150
1151 if (err != C2_OK) {
1152 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
1153 work->result = err;
1154 return false;
1155 }
1156
1157 C2GraphicView wView = block->map().get();
1158
1159 if (wView.error()) {
1160 ALOGE("graphic view map failed %d", wView.error());
1161 work->result = C2_CORRUPTED;
1162 return false;
1163 }
1164
1165 // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
1166 // block->height(), mWidth, mHeight, (int)out_frameIndex);
1167
1168 mOutputBufferIndex = out_frameIndex;
1169
1170 uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
1171 uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
1172 uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
1173
1174 C2PlanarLayout layout = wView.layout();
1175 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1176 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1177 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1178
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301179 CONV_FORMAT_T convFormat;
1180 switch (img.p.layout) {
1181 case DAV1D_PIXEL_LAYOUT_I444:
1182 convFormat = CONV_FORMAT_I444;
1183 break;
1184 case DAV1D_PIXEL_LAYOUT_I422:
1185 convFormat = CONV_FORMAT_I422;
1186 break;
1187 default:
1188 convFormat = CONV_FORMAT_I420;
1189 break;
1190 }
1191
Richard Xief2932a02023-10-20 17:37:57 +00001192 if (bitdepth == 10) {
1193 // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
1194 // decompression to avoid color conversion.
1195 const uint16_t* srcY = (const uint16_t*)img.data[0];
1196 const uint16_t* srcU = (const uint16_t*)img.data[1];
1197 const uint16_t* srcV = (const uint16_t*)img.data[2];
1198 size_t srcYStride = img.stride[0] / 2;
1199 size_t srcUStride = img.stride[1] / 2;
1200 size_t srcVStride = img.stride[1] / 2;
1201
1202 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301203 if (isMonochrome) {
1204 const size_t tmpSize = mWidth;
1205 const bool needFill = tmpSize > mTmpFrameBufferSize;
1206 if (!allocTmpFrameBuffer(tmpSize)) {
1207 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1208 setError(work, C2_NO_MEMORY);
1209 return false;
Richard Xief2932a02023-10-20 17:37:57 +00001210 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301211 srcU = srcV = mTmpFrameBuffer.get();
1212 srcUStride = srcVStride = 0;
1213 if (needFill) {
1214 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
1215 }
Richard Xief2932a02023-10-20 17:37:57 +00001216 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301217 convertPlanar16ToY410OrRGBA1010102(
1218 dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1219 dstYStride, mWidth, mHeight,
1220 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
1221 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001222 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1223 dstYStride /= 2;
1224 dstUStride /= 2;
1225 dstVStride /= 2;
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301226 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001227 if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
1228 (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301229 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001230 if (!allocTmpFrameBuffer(tmpSize)) {
1231 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1232 setError(work, C2_NO_MEMORY);
1233 return false;
1234 }
Richard Xief2932a02023-10-20 17:37:57 +00001235 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301236 convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
1237 srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
1238 mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
1239 tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001240 } else {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301241 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001242 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301243 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001244 if (!allocTmpFrameBuffer(tmpSize)) {
1245 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1246 setError(work, C2_NO_MEMORY);
1247 return false;
1248 }
Richard Xief2932a02023-10-20 17:37:57 +00001249 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301250 convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1251 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1252 isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001253 }
1254
1255 // Dump the output buffer if dumping is enabled (debug only).
1256#ifdef FILE_DUMP_ENABLE
1257 FILE* fp_out = mDav1dOutYuvFile;
1258
1259 // if(mOutputBufferIndex % 100 == 0)
1260 ALOGV("output a 10bit picture %dx%d from dav1d "
1261 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1262 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1263
1264 if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
1265 for (int i = 0; i < mHeight; i++) {
1266 int ret = fwrite((uint8_t*)srcY + i * srcYStride * 2, 1, mWidth * 2, fp_out);
1267 if (ret != mWidth * 2) {
1268 ALOGE("Error in fwrite, requested %d, returned %d", mWidth * 2, ret);
1269 break;
1270 }
1271 }
1272
1273 for (int i = 0; i < mHeight / 2; i++) {
1274 int ret = fwrite((uint8_t*)srcU + i * srcUStride * 2, 1, mWidth, fp_out);
1275 if (ret != mWidth) {
1276 ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
1277 break;
1278 }
1279 }
1280
1281 for (int i = 0; i < mHeight / 2; i++) {
1282 int ret = fwrite((uint8_t*)srcV + i * srcVStride * 2, 1, mWidth, fp_out);
1283 if (ret != mWidth) {
1284 ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
1285 break;
1286 }
1287 }
1288 }
1289#endif
1290 } else {
1291 const uint8_t* srcY = (const uint8_t*)img.data[0];
1292 const uint8_t* srcU = (const uint8_t*)img.data[1];
1293 const uint8_t* srcV = (const uint8_t*)img.data[2];
1294
1295 size_t srcYStride = img.stride[0];
1296 size_t srcUStride = img.stride[1];
1297 size_t srcVStride = img.stride[1];
1298
1299 // Dump the output buffer is dumping is enabled (debug only)
1300#ifdef FILE_DUMP_ENABLE
1301 FILE* fp_out = mDav1dOutYuvFile;
1302 // if(mOutputBufferIndex % 100 == 0)
1303 ALOGV("output a 8bit picture %dx%d from dav1d "
1304 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1305 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1306
1307 if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
1308 for (int i = 0; i < mHeight; i++) {
1309 int ret = fwrite((uint8_t*)srcY + i * srcYStride, 1, mWidth, fp_out);
1310 if (ret != mWidth) {
1311 ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
1312 break;
1313 }
1314 }
1315
1316 for (int i = 0; i < mHeight / 2; i++) {
1317 int ret = fwrite((uint8_t*)srcU + i * srcUStride, 1, mWidth / 2, fp_out);
1318 if (ret != mWidth / 2) {
1319 ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
1320 break;
1321 }
1322 }
1323
1324 for (int i = 0; i < mHeight / 2; i++) {
1325 int ret = fwrite((uint8_t*)srcV + i * srcVStride, 1, mWidth / 2, fp_out);
1326 if (ret != mWidth / 2) {
1327 ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
1328 break;
1329 }
1330 }
1331 }
1332#endif
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301333 convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1334 dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
1335 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001336 }
1337
1338 dav1d_picture_unref(&img);
1339
1340 finishWork(out_frameIndex, work, std::move(block));
1341 block = nullptr;
1342 return true;
1343}
1344
1345c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
1346 const std::shared_ptr<C2BlockPool>& pool,
1347 const std::unique_ptr<C2Work>& work) {
1348 if (drainMode == NO_DRAIN) {
1349 ALOGW("drain with NO_DRAIN: no-op");
1350 return C2_OK;
1351 }
1352 if (drainMode == DRAIN_CHAIN) {
1353 ALOGW("DRAIN_CHAIN not supported");
1354 return C2_OMITTED;
1355 }
1356
1357 while (outputBuffer(pool, work)) {
1358 }
1359
1360 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
1361 fillEmptyWork(work);
1362 }
1363
1364 return C2_OK;
1365}
1366
1367c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
1368 return drainInternal(drainMode, pool, nullptr);
1369}
1370
1371class C2SoftDav1dFactory : public C2ComponentFactory {
1372 public:
1373 C2SoftDav1dFactory()
1374 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1375 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1376
1377 virtual c2_status_t createComponent(c2_node_id_t id,
1378 std::shared_ptr<C2Component>* const component,
1379 std::function<void(C2Component*)> deleter) override {
1380 *component = std::shared_ptr<C2Component>(
1381 new C2SoftDav1dDec(COMPONENT_NAME, id,
1382 std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1383 deleter);
1384 return C2_OK;
1385 }
1386
1387 virtual c2_status_t createInterface(
1388 c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
1389 std::function<void(C2ComponentInterface*)> deleter) override {
1390 *interface = std::shared_ptr<C2ComponentInterface>(
1391 new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
1392 COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1393 deleter);
1394 return C2_OK;
1395 }
1396
1397 virtual ~C2SoftDav1dFactory() override = default;
1398
1399 private:
1400 std::shared_ptr<C2ReflectorHelper> mHelper;
1401};
1402
1403} // namespace android
1404
1405__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
1406 ALOGV("in %s", __func__);
1407 return new ::android::C2SoftDav1dFactory();
1408}
1409
1410__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1411 ::C2ComponentFactory* factory) {
1412 ALOGV("in %s", __func__);
1413 delete factory;
1414}