blob: f5112f0b6e6dd62985eafda0d30beee9a542512c [file] [log] [blame]
Richard Xief2932a02023-10-20 17:37:57 +00001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftDav1dDec"
19#include <android-base/properties.h>
20#include <cutils/properties.h>
21#include <thread>
22
23#include <C2Debug.h>
24#include <C2PlatformSupport.h>
25#include <Codec2BufferUtils.h>
26#include <Codec2CommonUtils.h>
27#include <Codec2Mapper.h>
28#include <SimpleC2Interface.h>
Richard Xief2932a02023-10-20 17:37:57 +000029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32#include "C2SoftDav1dDec.h"
33
Richard Xief2932a02023-10-20 17:37:57 +000034namespace android {
35
Richard Xief2932a02023-10-20 17:37:57 +000036// The number of threads used for the dav1d decoder.
37static const int NUM_THREADS_DAV1D_DEFAULT = 0;
38static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
39
40// codecname set and passed in as a compile flag from Android.bp
41constexpr char COMPONENT_NAME[] = CODECNAME;
42
43constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
44
45class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
46 public:
47 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
48 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
49 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
50 noPrivateBuffers();
51 noInputReferences();
52 noOutputReferences();
53 noInputLatency();
54 noTimeStretch();
55
56 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
57 .withConstValue(new C2ComponentAttributesSetting(
58 C2Component::ATTRIB_IS_TEMPORAL))
59 .build());
60
61 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
62 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
63 .withFields({
64 C2F(mSize, width).inRange(2, 4096),
65 C2F(mSize, height).inRange(2, 4096),
66 })
67 .withSetter(SizeSetter)
68 .build());
69
70 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
71 .withDefault(new C2StreamProfileLevelInfo::input(
72 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
73 .withFields({C2F(mProfileLevel, profile)
74 .oneOf({C2Config::PROFILE_AV1_0,
75 C2Config::PROFILE_AV1_1}),
76 C2F(mProfileLevel, level)
77 .oneOf({
78 C2Config::LEVEL_AV1_2,
79 C2Config::LEVEL_AV1_2_1,
80 C2Config::LEVEL_AV1_2_2,
81 C2Config::LEVEL_AV1_2_3,
82 C2Config::LEVEL_AV1_3,
83 C2Config::LEVEL_AV1_3_1,
84 C2Config::LEVEL_AV1_3_2,
85 C2Config::LEVEL_AV1_3_3,
86 C2Config::LEVEL_AV1_4,
87 C2Config::LEVEL_AV1_4_1,
88 C2Config::LEVEL_AV1_4_2,
89 C2Config::LEVEL_AV1_4_3,
90 C2Config::LEVEL_AV1_5,
91 C2Config::LEVEL_AV1_5_1,
92 C2Config::LEVEL_AV1_5_2,
93 C2Config::LEVEL_AV1_5_3,
94 })})
95 .withSetter(ProfileLevelSetter, mSize)
96 .build());
97
98 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
99 addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
100 .withDefault(mHdr10PlusInfoInput)
101 .withFields({
102 C2F(mHdr10PlusInfoInput, m.value).any(),
103 })
104 .withSetter(Hdr10PlusInfoInputSetter)
105 .build());
106
107 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
108 addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
109 .withDefault(mHdr10PlusInfoOutput)
110 .withFields({
111 C2F(mHdr10PlusInfoOutput, m.value).any(),
112 })
113 .withSetter(Hdr10PlusInfoOutputSetter)
114 .build());
115
116 // default static info
117 C2HdrStaticMetadataStruct defaultStaticInfo{};
118 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
119 addParameter(
120 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
121 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
122 .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
123 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
124 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
125 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
131 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
132 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
133 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
134 .withSetter(HdrStaticInfoSetter)
135 .build());
136
137 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
138 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
139 .withFields({
140 C2F(mSize, width).inRange(2, 2048, 2),
141 C2F(mSize, height).inRange(2, 2048, 2),
142 })
143 .withSetter(MaxPictureSizeSetter, mSize)
144 .build());
145
146 addParameter(
147 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
148 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
149 .withFields({
150 C2F(mMaxInputSize, value).any(),
151 })
152 .calculatedAs(MaxInputSizeSetter, mMaxSize)
153 .build());
154
155 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
156 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
157 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
158 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
159
160 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
161 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
162 helper->addStructDescriptors<C2ChromaOffsetStruct>();
163
164 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
165 .withConstValue(defaultColorInfo)
166 .build());
167
168 addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
169 .withDefault(new C2StreamColorAspectsTuning::output(
170 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
171 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
172 .withFields({C2F(mDefaultColorAspects, range)
173 .inRange(C2Color::RANGE_UNSPECIFIED,
174 C2Color::RANGE_OTHER),
175 C2F(mDefaultColorAspects, primaries)
176 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
177 C2Color::PRIMARIES_OTHER),
178 C2F(mDefaultColorAspects, transfer)
179 .inRange(C2Color::TRANSFER_UNSPECIFIED,
180 C2Color::TRANSFER_OTHER),
181 C2F(mDefaultColorAspects, matrix)
182 .inRange(C2Color::MATRIX_UNSPECIFIED,
183 C2Color::MATRIX_OTHER)})
184 .withSetter(DefaultColorAspectsSetter)
185 .build());
186
187 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
188 .withDefault(new C2StreamColorAspectsInfo::input(
189 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
190 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
191 .withFields({C2F(mCodedColorAspects, range)
192 .inRange(C2Color::RANGE_UNSPECIFIED,
193 C2Color::RANGE_OTHER),
194 C2F(mCodedColorAspects, primaries)
195 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
196 C2Color::PRIMARIES_OTHER),
197 C2F(mCodedColorAspects, transfer)
198 .inRange(C2Color::TRANSFER_UNSPECIFIED,
199 C2Color::TRANSFER_OTHER),
200 C2F(mCodedColorAspects, matrix)
201 .inRange(C2Color::MATRIX_UNSPECIFIED,
202 C2Color::MATRIX_OTHER)})
203 .withSetter(CodedColorAspectsSetter)
204 .build());
205
206 addParameter(
207 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
208 .withDefault(new C2StreamColorAspectsInfo::output(
209 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
210 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
211 .withFields(
212 {C2F(mColorAspects, range)
213 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
214 C2F(mColorAspects, primaries)
215 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
216 C2Color::PRIMARIES_OTHER),
217 C2F(mColorAspects, transfer)
218 .inRange(C2Color::TRANSFER_UNSPECIFIED,
219 C2Color::TRANSFER_OTHER),
220 C2F(mColorAspects, matrix)
221 .inRange(C2Color::MATRIX_UNSPECIFIED,
222 C2Color::MATRIX_OTHER)})
223 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
224 .build());
225
226 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
227 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
228 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
229 }
230 // If color format surface isn't added to supported formats, there is no way to know
231 // when the color-format is configured to surface. This is necessary to be able to
232 // choose 10-bit format while decoding 10-bit clips in surface mode.
233 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
234
235 // TODO: support more formats?
236 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
237 .withDefault(new C2StreamPixelFormatInfo::output(
238 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
239 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
240 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
241 .build());
242 }
243
244 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
245 C2P<C2StreamPictureSizeInfo::output>& me) {
246 (void)mayBlock;
247 C2R res = C2R::Ok();
248 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
249 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
250 me.set().width = oldMe.v.width;
251 }
252 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
253 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
254 me.set().height = oldMe.v.height;
255 }
256 return res;
257 }
258
259 static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
260 const C2P<C2StreamPictureSizeInfo::output>& size) {
261 (void)mayBlock;
262 // TODO: get max width/height from the size's field helpers vs.
263 // hardcoding
264 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
265 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
266 return C2R::Ok();
267 }
268
269 static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
270 const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
271 (void)mayBlock;
272 // assume compression ratio of 2, but enforce a floor
273 me.set().value =
274 c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
275 kMinInputBufferSize);
276 return C2R::Ok();
277 }
278
279 static C2R DefaultColorAspectsSetter(bool mayBlock,
280 C2P<C2StreamColorAspectsTuning::output>& me) {
281 (void)mayBlock;
282 if (me.v.range > C2Color::RANGE_OTHER) {
283 me.set().range = C2Color::RANGE_OTHER;
284 }
285 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
286 me.set().primaries = C2Color::PRIMARIES_OTHER;
287 }
288 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
289 me.set().transfer = C2Color::TRANSFER_OTHER;
290 }
291 if (me.v.matrix > C2Color::MATRIX_OTHER) {
292 me.set().matrix = C2Color::MATRIX_OTHER;
293 }
294 return C2R::Ok();
295 }
296
297 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
298 (void)mayBlock;
299 if (me.v.range > C2Color::RANGE_OTHER) {
300 me.set().range = C2Color::RANGE_OTHER;
301 }
302 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
303 me.set().primaries = C2Color::PRIMARIES_OTHER;
304 }
305 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
306 me.set().transfer = C2Color::TRANSFER_OTHER;
307 }
308 if (me.v.matrix > C2Color::MATRIX_OTHER) {
309 me.set().matrix = C2Color::MATRIX_OTHER;
310 }
311 return C2R::Ok();
312 }
313
314 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
315 const C2P<C2StreamColorAspectsTuning::output>& def,
316 const C2P<C2StreamColorAspectsInfo::input>& coded) {
317 (void)mayBlock;
318 // take default values for all unspecified fields, and coded values for specified ones
319 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
320 me.set().primaries =
321 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
322 me.set().transfer =
323 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
324 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
325 return C2R::Ok();
326 }
327
328 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
329 const C2P<C2StreamPictureSizeInfo::output>& size) {
330 (void)mayBlock;
331 (void)size;
332 (void)me; // TODO: validate
333 return C2R::Ok();
334 }
335
336 std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
337 return mDefaultColorAspects;
338 }
339
340 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
341
342 static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
343 (void)mayBlock;
344 (void)me; // TODO: validate
345 return C2R::Ok();
346 }
347
348 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
349 (void)mayBlock;
350 (void)me; // TODO: validate
351 return C2R::Ok();
352 }
353
354 // unsafe getters
355 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
356 return mPixelFormat;
357 }
358
359 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
360 (void)mayBlock;
361 if (me.v.mastering.red.x > 1) {
362 me.set().mastering.red.x = 1;
363 }
364 if (me.v.mastering.red.y > 1) {
365 me.set().mastering.red.y = 1;
366 }
367 if (me.v.mastering.green.x > 1) {
368 me.set().mastering.green.x = 1;
369 }
370 if (me.v.mastering.green.y > 1) {
371 me.set().mastering.green.y = 1;
372 }
373 if (me.v.mastering.blue.x > 1) {
374 me.set().mastering.blue.x = 1;
375 }
376 if (me.v.mastering.blue.y > 1) {
377 me.set().mastering.blue.y = 1;
378 }
379 if (me.v.mastering.white.x > 1) {
380 me.set().mastering.white.x = 1;
381 }
382 if (me.v.mastering.white.y > 1) {
383 me.set().mastering.white.y = 1;
384 }
385 if (me.v.mastering.maxLuminance > 65535.0) {
386 me.set().mastering.maxLuminance = 65535.0;
387 }
388 if (me.v.mastering.minLuminance > 6.5535) {
389 me.set().mastering.minLuminance = 6.5535;
390 }
391 if (me.v.maxCll > 65535.0) {
392 me.set().maxCll = 65535.0;
393 }
394 if (me.v.maxFall > 65535.0) {
395 me.set().maxFall = 65535.0;
396 }
397 return C2R::Ok();
398 }
399
400 private:
401 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
402 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
403 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
404 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
405 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
406 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
407 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
408 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
409 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
410 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
411 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
412 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
413};
414
415C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
416 const std::shared_ptr<IntfImpl>& intfImpl)
417 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
418 mIntf(intfImpl) {
419 mTimeStart = mTimeEnd = systemTime();
420}
421
422C2SoftDav1dDec::~C2SoftDav1dDec() {
423 onRelease();
424}
425
426c2_status_t C2SoftDav1dDec::onInit() {
427 return initDecoder() ? C2_OK : C2_CORRUPTED;
428}
429
430c2_status_t C2SoftDav1dDec::onStop() {
431 // TODO: b/277797541 - investigate if the decoder needs to be flushed.
432 mSignalledError = false;
433 mSignalledOutputEos = false;
434 return C2_OK;
435}
436
437void C2SoftDav1dDec::onReset() {
438 (void)onStop();
439 c2_status_t err = onFlush_sm();
440 if (err != C2_OK) {
441 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
442 destroyDecoder();
443 if (!initDecoder()) {
444 ALOGE("Hard reset failed.");
445 }
446 }
447}
448
449void C2SoftDav1dDec::flushDav1d() {
450 if (mDav1dCtx) {
451 Dav1dPicture p;
452
453 while (mDecodedPictures.size() > 0) {
454 p = mDecodedPictures.front();
455 mDecodedPictures.pop_front();
456
457 dav1d_picture_unref(&p);
458 }
459
460 int res = 0;
461 while (true) {
462 memset(&p, 0, sizeof(p));
463
464 if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
465 if (res != DAV1D_ERR(EAGAIN)) {
466 ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
467 break;
468 } else {
469 res = 0;
470 break;
471 }
472 } else {
473 dav1d_picture_unref(&p);
474 }
475 }
476
477 dav1d_flush(mDav1dCtx);
478 }
479}
480
481void C2SoftDav1dDec::onRelease() {
482 destroyDecoder();
483}
484
485c2_status_t C2SoftDav1dDec::onFlush_sm() {
486 flushDav1d();
487
488 mSignalledError = false;
489 mSignalledOutputEos = false;
490
491 return C2_OK;
492}
493
494static int GetCPUCoreCount() {
495 int cpuCoreCount = 1;
496#if defined(_SC_NPROCESSORS_ONLN)
497 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
498#else
499 // _SC_NPROC_ONLN must be defined...
500 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
501#endif
502 CHECK(cpuCoreCount >= 1);
503 ALOGV("Number of CPU cores: %d", cpuCoreCount);
504 return cpuCoreCount;
505}
506
507bool C2SoftDav1dDec::initDecoder() {
Richard Xief2932a02023-10-20 17:37:57 +0000508#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530509 mC2SoftDav1dDump.initDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000510#endif
511 mSignalledError = false;
512 mSignalledOutputEos = false;
513 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
514 {
515 IntfImpl::Lock lock = mIntf->lock();
516 mPixelFormatInfo = mIntf->getPixelFormat_l();
517 }
518
519 const char* version = dav1d_version();
520
521 Dav1dSettings lib_settings;
522 dav1d_default_settings(&lib_settings);
523 int cpu_count = GetCPUCoreCount();
524 lib_settings.n_threads = std::max(cpu_count / 2, 1); // use up to half the cores by default.
525
526 int32_t numThreads =
527 android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
528 if (numThreads > 0) lib_settings.n_threads = numThreads;
529
530 int res = 0;
531 if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
532 ALOGE("dav1d_open failed. status: %d.", res);
533 return false;
534 } else {
535 ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
536 }
537
538 return true;
539}
540
541void C2SoftDav1dDec::destroyDecoder() {
542 if (mDav1dCtx) {
543 Dav1dPicture p;
544 while (mDecodedPictures.size() > 0) {
545 memset(&p, 0, sizeof(p));
546 p = mDecodedPictures.front();
547 mDecodedPictures.pop_front();
548
549 dav1d_picture_unref(&p);
550 }
551
552 dav1d_close(&mDav1dCtx);
553 mDav1dCtx = nullptr;
554 mOutputBufferIndex = 0;
555 mInputBufferIndex = 0;
556 }
557#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530558 mC2SoftDav1dDump.destroyDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000559#endif
560}
561
562void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
563 uint32_t flags = 0;
564 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
565 flags |= C2FrameData::FLAG_END_OF_STREAM;
566 ALOGV("signalling end_of_stream.");
567 }
568 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
569 work->worklets.front()->output.buffers.clear();
570 work->worklets.front()->output.ordinal = work->input.ordinal;
571 work->workletsProcessed = 1u;
572}
573
574void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
575 const std::shared_ptr<C2GraphicBlock>& block) {
576 std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
577 {
578 IntfImpl::Lock lock = mIntf->lock();
579 buffer->setInfo(mIntf->getColorAspects_l());
580 }
581 auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
582 uint32_t flags = 0;
583 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
584 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
585 flags |= C2FrameData::FLAG_END_OF_STREAM;
586 ALOGV("signalling end_of_stream.");
587 }
588 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
589 work->worklets.front()->output.buffers.clear();
590 work->worklets.front()->output.buffers.push_back(buffer);
591 work->worklets.front()->output.ordinal = work->input.ordinal;
592 work->workletsProcessed = 1u;
593 };
594 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
595 fillWork(work);
596 } else {
597 finish(index, fillWork);
598 }
599}
600
601void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
602 const std::shared_ptr<C2BlockPool>& pool) {
603 work->result = C2_OK;
604 work->workletsProcessed = 0u;
605 work->worklets.front()->output.configUpdate.clear();
606 work->worklets.front()->output.flags = work->input.flags;
607 if (mSignalledError || mSignalledOutputEos) {
608 work->result = C2_BAD_VALUE;
609 return;
610 }
611
612 size_t inOffset = 0u;
613 size_t inSize = 0u;
614 C2ReadView rView = mDummyReadView;
615 if (!work->input.buffers.empty()) {
616 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
617 inSize = rView.capacity();
618 if (inSize && rView.error()) {
619 ALOGE("read view map failed %d", rView.error());
620 work->result = C2_CORRUPTED;
621 return;
622 }
623 }
624
625 bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
626 bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
627
628 if (codecConfig) {
629 fillEmptyWork(work);
630 return;
631 }
632
633 int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
634 if (inSize) {
635 mInputBufferIndex = in_frameIndex;
636
637 uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
638
639 mTimeStart = systemTime();
640 nsecs_t delay = mTimeStart - mTimeEnd;
641
642 // Send the bitstream data (inputBuffer) to dav1d.
643 if (mDav1dCtx) {
644 int i_ret = 0;
645
646 Dav1dSequenceHeader seq;
647 int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
648 if (res == 0) {
649 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
650 seq.max_height, (long)in_frameIndex);
651 }
652
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000653 // insert OBU TD if it is not present.
654 // TODO: b/286852962
655 uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
Richard Xief2932a02023-10-20 17:37:57 +0000656 Dav1dData data;
657
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000658 uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
659 : dav1d_data_create(&data, inSize + 2);
660 if (ptr == nullptr) {
661 ALOGE("dav1d_data_create failed!");
Richard Xief2932a02023-10-20 17:37:57 +0000662 i_ret = -1;
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000663
Richard Xief2932a02023-10-20 17:37:57 +0000664 } else {
665 data.m.timestamp = in_frameIndex;
666
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000667 int new_Size;
668 if (obu_type != DAV1D_OBU_TD) {
669 new_Size = (int)(inSize + 2);
670
671 // OBU TD
672 ptr[0] = 0x12;
673 ptr[1] = 0;
674
675 memcpy(ptr + 2, bitstream, inSize);
676 } else {
677 new_Size = (int)(inSize);
678 // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
679 // avoid memcopy operations.
680 memcpy(ptr, bitstream, new_Size);
681 }
682
683 // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
684 // "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
685 // inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
686 // ptr[3], ptr[4]);
Richard Xief2932a02023-10-20 17:37:57 +0000687
688 // Dump the bitstream data (inputBuffer) if dumping is enabled.
689#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530690 mC2SoftDav1dDump.dumpInput(ptr, new_Size);
Richard Xief2932a02023-10-20 17:37:57 +0000691#endif
692
693 bool b_draining = false;
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000694 int res;
Richard Xief2932a02023-10-20 17:37:57 +0000695
696 do {
697 res = dav1d_send_data(mDav1dCtx, &data);
698 if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
699 ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
700 /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
701 * to be recoverable. Other errors returned from this function are
702 * either unexpected, or considered critical failures.
703 */
704 i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
705 break;
706 }
707
708 bool b_output_error = false;
709
710 do {
711 Dav1dPicture img;
712 memset(&img, 0, sizeof(img));
713
714 res = dav1d_get_picture(mDav1dCtx, &img);
715 if (res == 0) {
716 mDecodedPictures.push_back(img);
717
718 if (!end_of_stream) break;
719 } else if (res == DAV1D_ERR(EAGAIN)) {
720 /* the decoder needs more data to be able to output something.
721 * if there is more data pending, continue the loop below or
722 * otherwise break */
723 if (data.sz != 0) res = 0;
724 break;
725 } else {
726 ALOGE("warning! Decoder error %d!", res);
727 b_output_error = true;
728 break;
729 }
730 } while (res == 0);
731
732 if (b_output_error) break;
733
734 /* on drain, we must ignore the 1st EAGAIN */
735 if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
736 (end_of_stream)) {
737 b_draining = true;
738 res = 0;
739 }
740 } while (res == 0 && ((data.sz != 0) || b_draining));
741
742 if (data.sz > 0) {
743 ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
744 dav1d_data_unref(&data);
745 }
746 }
747
748 mTimeEnd = systemTime();
749 nsecs_t decodeTime = mTimeEnd - mTimeStart;
750 // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
751
752 if (i_ret != 0) {
753 ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
754 work->result = C2_CORRUPTED;
755 work->workletsProcessed = 1u;
756 mSignalledError = true;
757 return;
758 }
759 }
760 }
761
762 (void)outputBuffer(pool, work);
763
764 if (end_of_stream) {
765 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
766 mSignalledOutputEos = true;
767 } else if (!inSize) {
768 fillEmptyWork(work);
769 }
770}
771
772void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
773 const std::unique_ptr<C2Work>& work) {
774 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
775 bool infoPresent = false;
776
777 if (picture != nullptr) {
778 if (picture->mastering_display != nullptr) {
779 hdrStaticMetadataInfo.mastering.red.x =
780 picture->mastering_display->primaries[0][0] / 65536.0;
781 hdrStaticMetadataInfo.mastering.red.y =
782 picture->mastering_display->primaries[0][1] / 65536.0;
783
784 hdrStaticMetadataInfo.mastering.green.x =
785 picture->mastering_display->primaries[1][0] / 65536.0;
786 hdrStaticMetadataInfo.mastering.green.y =
787 picture->mastering_display->primaries[1][1] / 65536.0;
788
789 hdrStaticMetadataInfo.mastering.blue.x =
790 picture->mastering_display->primaries[2][0] / 65536.0;
791 hdrStaticMetadataInfo.mastering.blue.y =
792 picture->mastering_display->primaries[2][1] / 65536.0;
793
794 hdrStaticMetadataInfo.mastering.white.x =
795 picture->mastering_display->white_point[0] / 65536.0;
796 hdrStaticMetadataInfo.mastering.white.y =
797 picture->mastering_display->white_point[1] / 65536.0;
798
799 hdrStaticMetadataInfo.mastering.maxLuminance =
800 picture->mastering_display->max_luminance / 256.0;
801 hdrStaticMetadataInfo.mastering.minLuminance =
802 picture->mastering_display->min_luminance / 16384.0;
803
804 infoPresent = true;
805 }
806
807 if (picture->content_light != nullptr) {
808 hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
809 hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
810 infoPresent = true;
811 }
812 }
813
814 // if (infoPresent) {
815 // ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
816 // mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
817 // mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
818 // hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
819 // hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
820 // hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
821 // hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
822 // hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
823 // hdrStaticMetadataInfo.maxCll,
824 // hdrStaticMetadataInfo.maxFall,
825 // mOutputBufferIndex);
826 // }
827
828 // config if static info has changed
829 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
830 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
831 work->worklets.front()->output.configUpdate.push_back(
832 C2Param::Copy(mHdrStaticMetadataInfo));
833 }
834}
835
836void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
837 const std::unique_ptr<C2Work>& work) {
838 if (picture != nullptr) {
839 if (picture->itut_t35 != nullptr) {
840 std::vector<uint8_t> payload;
841 size_t payloadSize = picture->itut_t35->payload_size;
842 if (payloadSize > 0) {
843 payload.push_back(picture->itut_t35->country_code);
844 if (picture->itut_t35->country_code == 0xFF) {
845 payload.push_back(picture->itut_t35->country_code_extension_byte);
846 }
847 payload.insert(payload.end(), picture->itut_t35->payload,
848 picture->itut_t35->payload + picture->itut_t35->payload_size);
849 }
850
851 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
852 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
853 if (!hdr10PlusInfo) {
854 ALOGE("Hdr10PlusInfo allocation failed");
855 mSignalledError = true;
856 work->result = C2_NO_MEMORY;
857 return;
858 }
859 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
860
861 // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
862 // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
863 // picture->itut_t35->payload_size,
864 // picture->itut_t35->country_code,
865 // mOutputBufferIndex);
866
867 // config if hdr10Plus info has changed
868 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
869 mHdr10PlusInfo = std::move(hdr10PlusInfo);
870 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
871 }
872 }
873 }
874}
875
876void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
877 VuiColorAspects vuiColorAspects;
878
879 if (picture) {
880 vuiColorAspects.primaries = picture->seq_hdr->pri;
881 vuiColorAspects.transfer = picture->seq_hdr->trc;
882 vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
883 vuiColorAspects.fullRange = picture->seq_hdr->color_range;
884
885 // ALOGD("Received a vuiColorAspects from dav1d
886 // (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
887 // at mOutputBufferIndex = % d,
888 // out_frameIndex = % ld.",
889 // vuiColorAspects.primaries,
890 // vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
891 // mOutputBufferIndex, picture->m.timestamp);
892 }
893
894 // convert vui aspects to C2 values if changed
895 if (!(vuiColorAspects == mBitstreamColorAspects)) {
896 mBitstreamColorAspects = vuiColorAspects;
897 ColorAspects sfAspects;
898 C2StreamColorAspectsInfo::input codedAspects = {0u};
899 ColorUtils::convertIsoColorAspectsToCodecAspects(
900 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
901 vuiColorAspects.fullRange, sfAspects);
902 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
903 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
904 }
905 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
906 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
907 }
908 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
909 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
910 }
911 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
912 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
913 }
914 std::vector<std::unique_ptr<C2SettingResult>> failures;
915 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
916 }
917}
918
919void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
920 mSignalledError = true;
921 work->result = error;
922 work->workletsProcessed = 1u;
923}
924
925bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
926 if (size > mTmpFrameBufferSize) {
927 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
928 if (mTmpFrameBuffer == nullptr) {
929 mTmpFrameBufferSize = 0;
930 return false;
931 }
932 mTmpFrameBufferSize = size;
933 }
934 return true;
935}
936
Richard Xief2932a02023-10-20 17:37:57 +0000937bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
938 const std::unique_ptr<C2Work>& work) {
939 if (!(work && pool)) return false;
940 if (mDav1dCtx == nullptr) return false;
941
942 // Get a decoded picture from dav1d if it is enabled.
943 Dav1dPicture img;
944 memset(&img, 0, sizeof(img));
945
946 int res = 0;
947 if (mDecodedPictures.size() > 0) {
948 img = mDecodedPictures.front();
949 mDecodedPictures.pop_front();
950 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
951 // outputBuffer.",img.m.timestamp,img.m.timestamp);
952 } else {
953 res = dav1d_get_picture(mDav1dCtx, &img);
954 if (res == 0) {
955 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
956 // outputBuffer.",img.m.timestamp,img.m.timestamp);
957 } else {
958 ALOGE("failed to get a picture from dav1d for outputBuffer.");
959 }
960 }
961
962 if (res == DAV1D_ERR(EAGAIN)) {
963 ALOGD("Not enough data to output a picture.");
964 return false;
965 }
966 if (res != 0) {
967 ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
968 return false;
969 }
970
971 const int width = img.p.w;
972 const int height = img.p.h;
973 if (width != mWidth || height != mHeight) {
974 mWidth = width;
975 mHeight = height;
976
977 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
978 std::vector<std::unique_ptr<C2SettingResult>> failures;
979 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
980 if (err == C2_OK) {
981 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
982 } else {
983 ALOGE("Config update size failed");
984 mSignalledError = true;
985 work->result = C2_CORRUPTED;
986 work->workletsProcessed = 1u;
987 return false;
988 }
989 }
990
991 getVuiParams(&img);
992 getHDRStaticParams(&img, work);
993 getHDR10PlusInfoData(&img, work);
994
995 // out_frameIndex that the decoded picture returns from dav1d.
996 int64_t out_frameIndex = img.m.timestamp;
997
Richard Xief2932a02023-10-20 17:37:57 +0000998 const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
999
1000 int bitdepth = img.p.bpc;
1001
1002 std::shared_ptr<C2GraphicBlock> block;
1003 uint32_t format = HAL_PIXEL_FORMAT_YV12;
1004 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
1005 if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
1006 IntfImpl::Lock lock = mIntf->lock();
1007 codedColorAspects = mIntf->getColorAspects_l();
1008 bool allowRGBA1010102 = false;
1009 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
1010 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
1011 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
1012 allowRGBA1010102 = true;
1013 }
1014 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
Richard Xief2932a02023-10-20 17:37:57 +00001015 }
1016
1017 if (mHalPixelFormat != format) {
1018 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
1019 std::vector<std::unique_ptr<C2SettingResult>> failures;
1020 c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
1021 if (err == C2_OK) {
1022 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
1023 } else {
1024 ALOGE("Config update pixelFormat failed");
1025 mSignalledError = true;
1026 work->workletsProcessed = 1u;
1027 work->result = C2_CORRUPTED;
1028 return UNKNOWN_ERROR;
1029 }
1030 mHalPixelFormat = format;
1031 }
1032
1033 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
1034
1035 // We always create a graphic block that is width aligned to 16 and height
1036 // aligned to 2. We set the correct "crop" value of the image in the call to
1037 // createGraphicBuffer() by setting the correct image dimensions.
1038 c2_status_t err =
1039 pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
1040
1041 if (err != C2_OK) {
1042 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
1043 work->result = err;
1044 return false;
1045 }
1046
1047 C2GraphicView wView = block->map().get();
1048
1049 if (wView.error()) {
1050 ALOGE("graphic view map failed %d", wView.error());
1051 work->result = C2_CORRUPTED;
1052 return false;
1053 }
1054
1055 // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
1056 // block->height(), mWidth, mHeight, (int)out_frameIndex);
1057
1058 mOutputBufferIndex = out_frameIndex;
1059
1060 uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
1061 uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
1062 uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
1063
1064 C2PlanarLayout layout = wView.layout();
1065 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1066 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1067 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1068
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301069 CONV_FORMAT_T convFormat;
1070 switch (img.p.layout) {
1071 case DAV1D_PIXEL_LAYOUT_I444:
1072 convFormat = CONV_FORMAT_I444;
1073 break;
1074 case DAV1D_PIXEL_LAYOUT_I422:
1075 convFormat = CONV_FORMAT_I422;
1076 break;
1077 default:
1078 convFormat = CONV_FORMAT_I420;
1079 break;
1080 }
1081
Richard Xief2932a02023-10-20 17:37:57 +00001082 if (bitdepth == 10) {
1083 // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
1084 // decompression to avoid color conversion.
1085 const uint16_t* srcY = (const uint16_t*)img.data[0];
1086 const uint16_t* srcU = (const uint16_t*)img.data[1];
1087 const uint16_t* srcV = (const uint16_t*)img.data[2];
1088 size_t srcYStride = img.stride[0] / 2;
1089 size_t srcUStride = img.stride[1] / 2;
1090 size_t srcVStride = img.stride[1] / 2;
1091
1092 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301093 if (isMonochrome) {
1094 const size_t tmpSize = mWidth;
1095 const bool needFill = tmpSize > mTmpFrameBufferSize;
1096 if (!allocTmpFrameBuffer(tmpSize)) {
1097 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1098 setError(work, C2_NO_MEMORY);
1099 return false;
Richard Xief2932a02023-10-20 17:37:57 +00001100 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301101 srcU = srcV = mTmpFrameBuffer.get();
1102 srcUStride = srcVStride = 0;
1103 if (needFill) {
1104 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
1105 }
Richard Xief2932a02023-10-20 17:37:57 +00001106 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301107 convertPlanar16ToY410OrRGBA1010102(
1108 dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1109 dstYStride, mWidth, mHeight,
1110 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
1111 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001112 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1113 dstYStride /= 2;
1114 dstUStride /= 2;
1115 dstVStride /= 2;
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301116 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001117 if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
1118 (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301119 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001120 if (!allocTmpFrameBuffer(tmpSize)) {
1121 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1122 setError(work, C2_NO_MEMORY);
1123 return false;
1124 }
Richard Xief2932a02023-10-20 17:37:57 +00001125 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301126 convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
1127 srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
1128 mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
1129 tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001130 } else {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301131 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001132 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301133 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001134 if (!allocTmpFrameBuffer(tmpSize)) {
1135 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1136 setError(work, C2_NO_MEMORY);
1137 return false;
1138 }
Richard Xief2932a02023-10-20 17:37:57 +00001139 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301140 convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1141 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1142 isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001143 }
1144
Richard Xief2932a02023-10-20 17:37:57 +00001145 // if(mOutputBufferIndex % 100 == 0)
1146 ALOGV("output a 10bit picture %dx%d from dav1d "
1147 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1148 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1149
Suyog Pawar4602c372023-08-17 11:09:23 +05301150 // Dump the output buffer if dumping is enabled (debug only).
1151#ifdef FILE_DUMP_ENABLE
1152 mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1153 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001154#endif
1155 } else {
1156 const uint8_t* srcY = (const uint8_t*)img.data[0];
1157 const uint8_t* srcU = (const uint8_t*)img.data[1];
1158 const uint8_t* srcV = (const uint8_t*)img.data[2];
1159
1160 size_t srcYStride = img.stride[0];
1161 size_t srcUStride = img.stride[1];
1162 size_t srcVStride = img.stride[1];
1163
Richard Xief2932a02023-10-20 17:37:57 +00001164 // if(mOutputBufferIndex % 100 == 0)
1165 ALOGV("output a 8bit picture %dx%d from dav1d "
1166 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1167 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1168
Suyog Pawar4602c372023-08-17 11:09:23 +05301169 // Dump the output buffer is dumping is enabled (debug only)
1170#ifdef FILE_DUMP_ENABLE
1171 mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1172 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001173#endif
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301174 convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1175 dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
1176 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001177 }
1178
1179 dav1d_picture_unref(&img);
1180
1181 finishWork(out_frameIndex, work, std::move(block));
1182 block = nullptr;
1183 return true;
1184}
1185
1186c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
1187 const std::shared_ptr<C2BlockPool>& pool,
1188 const std::unique_ptr<C2Work>& work) {
1189 if (drainMode == NO_DRAIN) {
1190 ALOGW("drain with NO_DRAIN: no-op");
1191 return C2_OK;
1192 }
1193 if (drainMode == DRAIN_CHAIN) {
1194 ALOGW("DRAIN_CHAIN not supported");
1195 return C2_OMITTED;
1196 }
1197
1198 while (outputBuffer(pool, work)) {
1199 }
1200
1201 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
1202 fillEmptyWork(work);
1203 }
1204
1205 return C2_OK;
1206}
1207
1208c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
1209 return drainInternal(drainMode, pool, nullptr);
1210}
1211
1212class C2SoftDav1dFactory : public C2ComponentFactory {
1213 public:
1214 C2SoftDav1dFactory()
1215 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1216 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1217
1218 virtual c2_status_t createComponent(c2_node_id_t id,
1219 std::shared_ptr<C2Component>* const component,
1220 std::function<void(C2Component*)> deleter) override {
1221 *component = std::shared_ptr<C2Component>(
1222 new C2SoftDav1dDec(COMPONENT_NAME, id,
1223 std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1224 deleter);
1225 return C2_OK;
1226 }
1227
1228 virtual c2_status_t createInterface(
1229 c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
1230 std::function<void(C2ComponentInterface*)> deleter) override {
1231 *interface = std::shared_ptr<C2ComponentInterface>(
1232 new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
1233 COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1234 deleter);
1235 return C2_OK;
1236 }
1237
1238 virtual ~C2SoftDav1dFactory() override = default;
1239
1240 private:
1241 std::shared_ptr<C2ReflectorHelper> mHelper;
1242};
1243
1244} // namespace android
1245
1246__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
1247 ALOGV("in %s", __func__);
1248 return new ::android::C2SoftDav1dFactory();
1249}
1250
1251__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1252 ::C2ComponentFactory* factory) {
1253 ALOGV("in %s", __func__);
1254 delete factory;
1255}