blob: 4ec26d6251167cbe28d1715031421abb78726d4a [file] [log] [blame]
Richard Xief2932a02023-10-20 17:37:57 +00001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftDav1dDec"
19#include <android-base/properties.h>
20#include <cutils/properties.h>
21#include <thread>
22
23#include <C2Debug.h>
24#include <C2PlatformSupport.h>
25#include <Codec2BufferUtils.h>
26#include <Codec2CommonUtils.h>
27#include <Codec2Mapper.h>
28#include <SimpleC2Interface.h>
Richard Xief2932a02023-10-20 17:37:57 +000029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32#include "C2SoftDav1dDec.h"
33
Richard Xief2932a02023-10-20 17:37:57 +000034namespace android {
35
Richard Xief2932a02023-10-20 17:37:57 +000036// The number of threads used for the dav1d decoder.
37static const int NUM_THREADS_DAV1D_DEFAULT = 0;
38static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
39
40// codecname set and passed in as a compile flag from Android.bp
41constexpr char COMPONENT_NAME[] = CODECNAME;
42
43constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
44
Harish Mahendrakar98d9a242023-12-19 06:42:48 +000045constexpr uint32_t kOutputDelay = 4;
46
Richard Xief2932a02023-10-20 17:37:57 +000047class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
48 public:
49 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
50 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
51 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
52 noPrivateBuffers();
53 noInputReferences();
54 noOutputReferences();
55 noInputLatency();
56 noTimeStretch();
57
58 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
59 .withConstValue(new C2ComponentAttributesSetting(
60 C2Component::ATTRIB_IS_TEMPORAL))
61 .build());
62
63 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
64 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
65 .withFields({
66 C2F(mSize, width).inRange(2, 4096),
67 C2F(mSize, height).inRange(2, 4096),
68 })
69 .withSetter(SizeSetter)
70 .build());
71
72 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
73 .withDefault(new C2StreamProfileLevelInfo::input(
74 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
75 .withFields({C2F(mProfileLevel, profile)
76 .oneOf({C2Config::PROFILE_AV1_0,
77 C2Config::PROFILE_AV1_1}),
78 C2F(mProfileLevel, level)
79 .oneOf({
80 C2Config::LEVEL_AV1_2,
81 C2Config::LEVEL_AV1_2_1,
82 C2Config::LEVEL_AV1_2_2,
83 C2Config::LEVEL_AV1_2_3,
84 C2Config::LEVEL_AV1_3,
85 C2Config::LEVEL_AV1_3_1,
86 C2Config::LEVEL_AV1_3_2,
87 C2Config::LEVEL_AV1_3_3,
88 C2Config::LEVEL_AV1_4,
89 C2Config::LEVEL_AV1_4_1,
90 C2Config::LEVEL_AV1_4_2,
91 C2Config::LEVEL_AV1_4_3,
92 C2Config::LEVEL_AV1_5,
93 C2Config::LEVEL_AV1_5_1,
94 C2Config::LEVEL_AV1_5_2,
95 C2Config::LEVEL_AV1_5_3,
96 })})
97 .withSetter(ProfileLevelSetter, mSize)
98 .build());
99
100 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
101 addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
102 .withDefault(mHdr10PlusInfoInput)
103 .withFields({
104 C2F(mHdr10PlusInfoInput, m.value).any(),
105 })
106 .withSetter(Hdr10PlusInfoInputSetter)
107 .build());
108
109 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
110 addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
111 .withDefault(mHdr10PlusInfoOutput)
112 .withFields({
113 C2F(mHdr10PlusInfoOutput, m.value).any(),
114 })
115 .withSetter(Hdr10PlusInfoOutputSetter)
116 .build());
117
118 // default static info
119 C2HdrStaticMetadataStruct defaultStaticInfo{};
120 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
121 addParameter(
122 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
123 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
124 .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
125 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
131 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
132 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
133 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
134 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
135 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
136 .withSetter(HdrStaticInfoSetter)
137 .build());
138
139 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
140 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
141 .withFields({
142 C2F(mSize, width).inRange(2, 2048, 2),
143 C2F(mSize, height).inRange(2, 2048, 2),
144 })
145 .withSetter(MaxPictureSizeSetter, mSize)
146 .build());
147
148 addParameter(
149 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
150 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
151 .withFields({
152 C2F(mMaxInputSize, value).any(),
153 })
154 .calculatedAs(MaxInputSizeSetter, mMaxSize)
155 .build());
156
157 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
158 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
159 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
160 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
161
162 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
163 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
164 helper->addStructDescriptors<C2ChromaOffsetStruct>();
165
166 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
167 .withConstValue(defaultColorInfo)
168 .build());
169
170 addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
171 .withDefault(new C2StreamColorAspectsTuning::output(
172 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
173 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
174 .withFields({C2F(mDefaultColorAspects, range)
175 .inRange(C2Color::RANGE_UNSPECIFIED,
176 C2Color::RANGE_OTHER),
177 C2F(mDefaultColorAspects, primaries)
178 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
179 C2Color::PRIMARIES_OTHER),
180 C2F(mDefaultColorAspects, transfer)
181 .inRange(C2Color::TRANSFER_UNSPECIFIED,
182 C2Color::TRANSFER_OTHER),
183 C2F(mDefaultColorAspects, matrix)
184 .inRange(C2Color::MATRIX_UNSPECIFIED,
185 C2Color::MATRIX_OTHER)})
186 .withSetter(DefaultColorAspectsSetter)
187 .build());
188
189 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
190 .withDefault(new C2StreamColorAspectsInfo::input(
191 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
192 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
193 .withFields({C2F(mCodedColorAspects, range)
194 .inRange(C2Color::RANGE_UNSPECIFIED,
195 C2Color::RANGE_OTHER),
196 C2F(mCodedColorAspects, primaries)
197 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
198 C2Color::PRIMARIES_OTHER),
199 C2F(mCodedColorAspects, transfer)
200 .inRange(C2Color::TRANSFER_UNSPECIFIED,
201 C2Color::TRANSFER_OTHER),
202 C2F(mCodedColorAspects, matrix)
203 .inRange(C2Color::MATRIX_UNSPECIFIED,
204 C2Color::MATRIX_OTHER)})
205 .withSetter(CodedColorAspectsSetter)
206 .build());
207
208 addParameter(
209 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
210 .withDefault(new C2StreamColorAspectsInfo::output(
211 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
212 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
213 .withFields(
214 {C2F(mColorAspects, range)
215 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
216 C2F(mColorAspects, primaries)
217 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
218 C2Color::PRIMARIES_OTHER),
219 C2F(mColorAspects, transfer)
220 .inRange(C2Color::TRANSFER_UNSPECIFIED,
221 C2Color::TRANSFER_OTHER),
222 C2F(mColorAspects, matrix)
223 .inRange(C2Color::MATRIX_UNSPECIFIED,
224 C2Color::MATRIX_OTHER)})
225 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
226 .build());
227
228 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
229 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
230 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
231 }
232 // If color format surface isn't added to supported formats, there is no way to know
233 // when the color-format is configured to surface. This is necessary to be able to
234 // choose 10-bit format while decoding 10-bit clips in surface mode.
235 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
236
237 // TODO: support more formats?
238 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
239 .withDefault(new C2StreamPixelFormatInfo::output(
240 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
241 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
242 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
243 .build());
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000244
245 addParameter(
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530246 DefineParam(mLowLatencyMode, C2_PARAMKEY_LOW_LATENCY_MODE)
247 .withDefault(new C2GlobalLowLatencyModeTuning(0))
248 .withFields({C2F(mLowLatencyMode, value).oneOf({0,1})})
249 .withSetter(Setter<decltype(*mLowLatencyMode)>::StrictValueWithNoDeps)
250 .build());
251
252 addParameter(
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000253 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
254 .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
255 .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530256 .withSetter(ActualOutputDelaySetter, mLowLatencyMode)
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000257 .build());
Richard Xief2932a02023-10-20 17:37:57 +0000258 }
259
260 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
261 C2P<C2StreamPictureSizeInfo::output>& me) {
262 (void)mayBlock;
263 C2R res = C2R::Ok();
264 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
265 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
266 me.set().width = oldMe.v.width;
267 }
268 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
269 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
270 me.set().height = oldMe.v.height;
271 }
272 return res;
273 }
274
275 static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
276 const C2P<C2StreamPictureSizeInfo::output>& size) {
277 (void)mayBlock;
278 // TODO: get max width/height from the size's field helpers vs.
279 // hardcoding
280 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
281 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
282 return C2R::Ok();
283 }
284
285 static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
286 const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
287 (void)mayBlock;
288 // assume compression ratio of 2, but enforce a floor
289 me.set().value =
290 c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
291 kMinInputBufferSize);
292 return C2R::Ok();
293 }
294
295 static C2R DefaultColorAspectsSetter(bool mayBlock,
296 C2P<C2StreamColorAspectsTuning::output>& me) {
297 (void)mayBlock;
298 if (me.v.range > C2Color::RANGE_OTHER) {
299 me.set().range = C2Color::RANGE_OTHER;
300 }
301 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
302 me.set().primaries = C2Color::PRIMARIES_OTHER;
303 }
304 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
305 me.set().transfer = C2Color::TRANSFER_OTHER;
306 }
307 if (me.v.matrix > C2Color::MATRIX_OTHER) {
308 me.set().matrix = C2Color::MATRIX_OTHER;
309 }
310 return C2R::Ok();
311 }
312
313 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
314 (void)mayBlock;
315 if (me.v.range > C2Color::RANGE_OTHER) {
316 me.set().range = C2Color::RANGE_OTHER;
317 }
318 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
319 me.set().primaries = C2Color::PRIMARIES_OTHER;
320 }
321 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
322 me.set().transfer = C2Color::TRANSFER_OTHER;
323 }
324 if (me.v.matrix > C2Color::MATRIX_OTHER) {
325 me.set().matrix = C2Color::MATRIX_OTHER;
326 }
327 return C2R::Ok();
328 }
329
330 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
331 const C2P<C2StreamColorAspectsTuning::output>& def,
332 const C2P<C2StreamColorAspectsInfo::input>& coded) {
333 (void)mayBlock;
334 // take default values for all unspecified fields, and coded values for specified ones
335 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
336 me.set().primaries =
337 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
338 me.set().transfer =
339 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
340 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
341 return C2R::Ok();
342 }
343
344 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
345 const C2P<C2StreamPictureSizeInfo::output>& size) {
346 (void)mayBlock;
347 (void)size;
348 (void)me; // TODO: validate
349 return C2R::Ok();
350 }
351
352 std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
353 return mDefaultColorAspects;
354 }
355
356 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
357
358 static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
359 (void)mayBlock;
360 (void)me; // TODO: validate
361 return C2R::Ok();
362 }
363
364 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
365 (void)mayBlock;
366 (void)me; // TODO: validate
367 return C2R::Ok();
368 }
369
370 // unsafe getters
371 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
372 return mPixelFormat;
373 }
374
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530375 std::shared_ptr<C2PortActualDelayTuning::output> getActualOutputDelay_l() const {
376 return mActualOutputDelay;
377 }
378
Richard Xief2932a02023-10-20 17:37:57 +0000379 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
380 (void)mayBlock;
381 if (me.v.mastering.red.x > 1) {
382 me.set().mastering.red.x = 1;
383 }
384 if (me.v.mastering.red.y > 1) {
385 me.set().mastering.red.y = 1;
386 }
387 if (me.v.mastering.green.x > 1) {
388 me.set().mastering.green.x = 1;
389 }
390 if (me.v.mastering.green.y > 1) {
391 me.set().mastering.green.y = 1;
392 }
393 if (me.v.mastering.blue.x > 1) {
394 me.set().mastering.blue.x = 1;
395 }
396 if (me.v.mastering.blue.y > 1) {
397 me.set().mastering.blue.y = 1;
398 }
399 if (me.v.mastering.white.x > 1) {
400 me.set().mastering.white.x = 1;
401 }
402 if (me.v.mastering.white.y > 1) {
403 me.set().mastering.white.y = 1;
404 }
405 if (me.v.mastering.maxLuminance > 65535.0) {
406 me.set().mastering.maxLuminance = 65535.0;
407 }
408 if (me.v.mastering.minLuminance > 6.5535) {
409 me.set().mastering.minLuminance = 6.5535;
410 }
411 if (me.v.maxCll > 65535.0) {
412 me.set().maxCll = 65535.0;
413 }
414 if (me.v.maxFall > 65535.0) {
415 me.set().maxFall = 65535.0;
416 }
417 return C2R::Ok();
418 }
419
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530420 static C2R ActualOutputDelaySetter(bool mayBlock, C2P<C2PortActualDelayTuning::output>& me,
421 const C2P<C2GlobalLowLatencyModeTuning>& lowLatencyMode) {
422 (void)mayBlock;
423 me.set().value = lowLatencyMode.v.value ? 1 : kOutputDelay;
424 return C2R::Ok();
425 }
426
Richard Xief2932a02023-10-20 17:37:57 +0000427 private:
428 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
429 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
430 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
431 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
432 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
433 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
434 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
435 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
436 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
437 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
438 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
439 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530440 std::shared_ptr<C2GlobalLowLatencyModeTuning> mLowLatencyMode;
Richard Xief2932a02023-10-20 17:37:57 +0000441};
442
443C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
444 const std::shared_ptr<IntfImpl>& intfImpl)
445 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
446 mIntf(intfImpl) {
447 mTimeStart = mTimeEnd = systemTime();
448}
449
450C2SoftDav1dDec::~C2SoftDav1dDec() {
451 onRelease();
452}
453
454c2_status_t C2SoftDav1dDec::onInit() {
455 return initDecoder() ? C2_OK : C2_CORRUPTED;
456}
457
458c2_status_t C2SoftDav1dDec::onStop() {
459 // TODO: b/277797541 - investigate if the decoder needs to be flushed.
460 mSignalledError = false;
461 mSignalledOutputEos = false;
462 return C2_OK;
463}
464
465void C2SoftDav1dDec::onReset() {
466 (void)onStop();
467 c2_status_t err = onFlush_sm();
468 if (err != C2_OK) {
469 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
470 destroyDecoder();
471 if (!initDecoder()) {
472 ALOGE("Hard reset failed.");
473 }
474 }
475}
476
477void C2SoftDav1dDec::flushDav1d() {
478 if (mDav1dCtx) {
479 Dav1dPicture p;
480
Richard Xief2932a02023-10-20 17:37:57 +0000481 int res = 0;
482 while (true) {
483 memset(&p, 0, sizeof(p));
484
485 if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
486 if (res != DAV1D_ERR(EAGAIN)) {
487 ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
488 break;
489 } else {
490 res = 0;
491 break;
492 }
493 } else {
494 dav1d_picture_unref(&p);
495 }
496 }
497
498 dav1d_flush(mDav1dCtx);
499 }
500}
501
502void C2SoftDav1dDec::onRelease() {
503 destroyDecoder();
504}
505
506c2_status_t C2SoftDav1dDec::onFlush_sm() {
507 flushDav1d();
508
509 mSignalledError = false;
510 mSignalledOutputEos = false;
511
512 return C2_OK;
513}
514
515static int GetCPUCoreCount() {
516 int cpuCoreCount = 1;
517#if defined(_SC_NPROCESSORS_ONLN)
518 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
519#else
520 // _SC_NPROC_ONLN must be defined...
521 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
522#endif
523 CHECK(cpuCoreCount >= 1);
524 ALOGV("Number of CPU cores: %d", cpuCoreCount);
525 return cpuCoreCount;
526}
527
528bool C2SoftDav1dDec::initDecoder() {
Richard Xief2932a02023-10-20 17:37:57 +0000529#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530530 mC2SoftDav1dDump.initDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000531#endif
532 mSignalledError = false;
533 mSignalledOutputEos = false;
534 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
535 {
536 IntfImpl::Lock lock = mIntf->lock();
537 mPixelFormatInfo = mIntf->getPixelFormat_l();
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530538 mActualOutputDelayInfo = mIntf->getActualOutputDelay_l();
Richard Xief2932a02023-10-20 17:37:57 +0000539 }
540
541 const char* version = dav1d_version();
542
543 Dav1dSettings lib_settings;
544 dav1d_default_settings(&lib_settings);
545 int cpu_count = GetCPUCoreCount();
546 lib_settings.n_threads = std::max(cpu_count / 2, 1); // use up to half the cores by default.
547
548 int32_t numThreads =
549 android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
550 if (numThreads > 0) lib_settings.n_threads = numThreads;
551
Suyog Pawar3321d4e2024-02-28 16:00:35 +0530552 lib_settings.max_frame_delay = mActualOutputDelayInfo->value;
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000553
Richard Xief2932a02023-10-20 17:37:57 +0000554 int res = 0;
555 if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
556 ALOGE("dav1d_open failed. status: %d.", res);
557 return false;
558 } else {
559 ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
560 }
561
562 return true;
563}
564
565void C2SoftDav1dDec::destroyDecoder() {
566 if (mDav1dCtx) {
Richard Xief2932a02023-10-20 17:37:57 +0000567 dav1d_close(&mDav1dCtx);
568 mDav1dCtx = nullptr;
569 mOutputBufferIndex = 0;
570 mInputBufferIndex = 0;
571 }
572#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530573 mC2SoftDav1dDump.destroyDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000574#endif
575}
576
577void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
578 uint32_t flags = 0;
579 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
580 flags |= C2FrameData::FLAG_END_OF_STREAM;
581 ALOGV("signalling end_of_stream.");
582 }
583 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
584 work->worklets.front()->output.buffers.clear();
585 work->worklets.front()->output.ordinal = work->input.ordinal;
586 work->workletsProcessed = 1u;
587}
588
589void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000590 const std::shared_ptr<C2GraphicBlock>& block,
591 const Dav1dPicture &img) {
Richard Xief2932a02023-10-20 17:37:57 +0000592 std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
593 {
594 IntfImpl::Lock lock = mIntf->lock();
595 buffer->setInfo(mIntf->getColorAspects_l());
596 }
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000597
598 auto fillWork = [buffer, index, img, this](const std::unique_ptr<C2Work>& work) {
Richard Xief2932a02023-10-20 17:37:57 +0000599 uint32_t flags = 0;
600 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
601 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
602 flags |= C2FrameData::FLAG_END_OF_STREAM;
603 ALOGV("signalling end_of_stream.");
604 }
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000605 getHDRStaticParams(&img, work);
606 getHDR10PlusInfoData(&img, work);
607
Richard Xief2932a02023-10-20 17:37:57 +0000608 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
609 work->worklets.front()->output.buffers.clear();
610 work->worklets.front()->output.buffers.push_back(buffer);
611 work->worklets.front()->output.ordinal = work->input.ordinal;
612 work->workletsProcessed = 1u;
613 };
614 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
615 fillWork(work);
616 } else {
617 finish(index, fillWork);
618 }
619}
620
621void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
622 const std::shared_ptr<C2BlockPool>& pool) {
623 work->result = C2_OK;
624 work->workletsProcessed = 0u;
625 work->worklets.front()->output.configUpdate.clear();
626 work->worklets.front()->output.flags = work->input.flags;
627 if (mSignalledError || mSignalledOutputEos) {
628 work->result = C2_BAD_VALUE;
629 return;
630 }
631
632 size_t inOffset = 0u;
633 size_t inSize = 0u;
634 C2ReadView rView = mDummyReadView;
635 if (!work->input.buffers.empty()) {
636 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
637 inSize = rView.capacity();
638 if (inSize && rView.error()) {
639 ALOGE("read view map failed %d", rView.error());
640 work->result = C2_CORRUPTED;
641 return;
642 }
643 }
644
645 bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
646 bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
647
648 if (codecConfig) {
649 fillEmptyWork(work);
650 return;
651 }
652
653 int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
654 if (inSize) {
655 mInputBufferIndex = in_frameIndex;
656
657 uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
658
659 mTimeStart = systemTime();
660 nsecs_t delay = mTimeStart - mTimeEnd;
661
662 // Send the bitstream data (inputBuffer) to dav1d.
663 if (mDav1dCtx) {
664 int i_ret = 0;
665
666 Dav1dSequenceHeader seq;
667 int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
668 if (res == 0) {
669 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
670 seq.max_height, (long)in_frameIndex);
Harish Mahendrakard74f2f02024-01-04 00:51:28 +0000671 if (seq.max_width != mWidth || seq.max_height != mHeight) {
672 drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
673 mWidth = seq.max_width;
674 mHeight = seq.max_height;
675
676 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
677 std::vector<std::unique_ptr<C2SettingResult>> failures;
678 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
679 if (err == C2_OK) {
680 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
681 } else {
682 ALOGE("Config update size failed");
683 mSignalledError = true;
684 work->result = C2_CORRUPTED;
685 work->workletsProcessed = 1u;
686 return;
687 }
688 }
Richard Xief2932a02023-10-20 17:37:57 +0000689 }
690
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000691 // insert OBU TD if it is not present.
692 // TODO: b/286852962
693 uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
Richard Xief2932a02023-10-20 17:37:57 +0000694 Dav1dData data;
695
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000696 uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
697 : dav1d_data_create(&data, inSize + 2);
698 if (ptr == nullptr) {
699 ALOGE("dav1d_data_create failed!");
Richard Xief2932a02023-10-20 17:37:57 +0000700 i_ret = -1;
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000701
Richard Xief2932a02023-10-20 17:37:57 +0000702 } else {
703 data.m.timestamp = in_frameIndex;
704
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000705 int new_Size;
706 if (obu_type != DAV1D_OBU_TD) {
707 new_Size = (int)(inSize + 2);
708
709 // OBU TD
710 ptr[0] = 0x12;
711 ptr[1] = 0;
712
713 memcpy(ptr + 2, bitstream, inSize);
714 } else {
715 new_Size = (int)(inSize);
716 // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
717 // avoid memcopy operations.
718 memcpy(ptr, bitstream, new_Size);
719 }
720
721 // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
722 // "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
723 // inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
724 // ptr[3], ptr[4]);
Richard Xief2932a02023-10-20 17:37:57 +0000725
726 // Dump the bitstream data (inputBuffer) if dumping is enabled.
727#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530728 mC2SoftDav1dDump.dumpInput(ptr, new_Size);
Richard Xief2932a02023-10-20 17:37:57 +0000729#endif
730
731 bool b_draining = false;
Harish Mahendrakar166dffc2023-12-19 22:53:15 +0000732 int res;
Richard Xief2932a02023-10-20 17:37:57 +0000733
734 do {
735 res = dav1d_send_data(mDav1dCtx, &data);
736 if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
737 ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
738 /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
739 * to be recoverable. Other errors returned from this function are
740 * either unexpected, or considered critical failures.
741 */
742 i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
743 break;
744 }
745
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000746 outputBuffer(pool, work);
Richard Xief2932a02023-10-20 17:37:57 +0000747
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000748 } while (res == DAV1D_ERR(EAGAIN));
Richard Xief2932a02023-10-20 17:37:57 +0000749
750 if (data.sz > 0) {
751 ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
752 dav1d_data_unref(&data);
753 }
754 }
755
756 mTimeEnd = systemTime();
757 nsecs_t decodeTime = mTimeEnd - mTimeStart;
758 // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
759
760 if (i_ret != 0) {
761 ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
762 work->result = C2_CORRUPTED;
763 work->workletsProcessed = 1u;
764 mSignalledError = true;
765 return;
766 }
767 }
768 }
769
Richard Xief2932a02023-10-20 17:37:57 +0000770 if (end_of_stream) {
771 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
772 mSignalledOutputEos = true;
773 } else if (!inSize) {
774 fillEmptyWork(work);
775 }
776}
777
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000778void C2SoftDav1dDec::getHDRStaticParams(const Dav1dPicture* picture,
Richard Xief2932a02023-10-20 17:37:57 +0000779 const std::unique_ptr<C2Work>& work) {
780 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
781 bool infoPresent = false;
782
783 if (picture != nullptr) {
784 if (picture->mastering_display != nullptr) {
785 hdrStaticMetadataInfo.mastering.red.x =
786 picture->mastering_display->primaries[0][0] / 65536.0;
787 hdrStaticMetadataInfo.mastering.red.y =
788 picture->mastering_display->primaries[0][1] / 65536.0;
789
790 hdrStaticMetadataInfo.mastering.green.x =
791 picture->mastering_display->primaries[1][0] / 65536.0;
792 hdrStaticMetadataInfo.mastering.green.y =
793 picture->mastering_display->primaries[1][1] / 65536.0;
794
795 hdrStaticMetadataInfo.mastering.blue.x =
796 picture->mastering_display->primaries[2][0] / 65536.0;
797 hdrStaticMetadataInfo.mastering.blue.y =
798 picture->mastering_display->primaries[2][1] / 65536.0;
799
800 hdrStaticMetadataInfo.mastering.white.x =
801 picture->mastering_display->white_point[0] / 65536.0;
802 hdrStaticMetadataInfo.mastering.white.y =
803 picture->mastering_display->white_point[1] / 65536.0;
804
805 hdrStaticMetadataInfo.mastering.maxLuminance =
806 picture->mastering_display->max_luminance / 256.0;
807 hdrStaticMetadataInfo.mastering.minLuminance =
808 picture->mastering_display->min_luminance / 16384.0;
809
810 infoPresent = true;
811 }
812
813 if (picture->content_light != nullptr) {
814 hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
815 hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
816 infoPresent = true;
817 }
818 }
819
820 // if (infoPresent) {
821 // ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
822 // mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
823 // mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
824 // hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
825 // hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
826 // hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
827 // hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
828 // hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
829 // hdrStaticMetadataInfo.maxCll,
830 // hdrStaticMetadataInfo.maxFall,
831 // mOutputBufferIndex);
832 // }
833
834 // config if static info has changed
835 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
836 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
837 work->worklets.front()->output.configUpdate.push_back(
838 C2Param::Copy(mHdrStaticMetadataInfo));
839 }
840}
841
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000842void C2SoftDav1dDec::getHDR10PlusInfoData(const Dav1dPicture* picture,
Richard Xief2932a02023-10-20 17:37:57 +0000843 const std::unique_ptr<C2Work>& work) {
844 if (picture != nullptr) {
845 if (picture->itut_t35 != nullptr) {
846 std::vector<uint8_t> payload;
847 size_t payloadSize = picture->itut_t35->payload_size;
848 if (payloadSize > 0) {
849 payload.push_back(picture->itut_t35->country_code);
850 if (picture->itut_t35->country_code == 0xFF) {
851 payload.push_back(picture->itut_t35->country_code_extension_byte);
852 }
853 payload.insert(payload.end(), picture->itut_t35->payload,
854 picture->itut_t35->payload + picture->itut_t35->payload_size);
855 }
856
857 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
858 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
859 if (!hdr10PlusInfo) {
860 ALOGE("Hdr10PlusInfo allocation failed");
861 mSignalledError = true;
862 work->result = C2_NO_MEMORY;
863 return;
864 }
865 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
866
867 // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
868 // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
869 // picture->itut_t35->payload_size,
870 // picture->itut_t35->country_code,
871 // mOutputBufferIndex);
872
873 // config if hdr10Plus info has changed
874 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
875 mHdr10PlusInfo = std::move(hdr10PlusInfo);
876 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
877 }
878 }
879 }
880}
881
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000882void C2SoftDav1dDec::getVuiParams(const Dav1dPicture* picture) {
Richard Xief2932a02023-10-20 17:37:57 +0000883 VuiColorAspects vuiColorAspects;
884
885 if (picture) {
886 vuiColorAspects.primaries = picture->seq_hdr->pri;
887 vuiColorAspects.transfer = picture->seq_hdr->trc;
888 vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
889 vuiColorAspects.fullRange = picture->seq_hdr->color_range;
890
891 // ALOGD("Received a vuiColorAspects from dav1d
892 // (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
893 // at mOutputBufferIndex = % d,
894 // out_frameIndex = % ld.",
895 // vuiColorAspects.primaries,
896 // vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
897 // mOutputBufferIndex, picture->m.timestamp);
898 }
899
900 // convert vui aspects to C2 values if changed
901 if (!(vuiColorAspects == mBitstreamColorAspects)) {
902 mBitstreamColorAspects = vuiColorAspects;
903 ColorAspects sfAspects;
904 C2StreamColorAspectsInfo::input codedAspects = {0u};
905 ColorUtils::convertIsoColorAspectsToCodecAspects(
906 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
907 vuiColorAspects.fullRange, sfAspects);
908 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
909 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
910 }
911 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
912 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
913 }
914 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
915 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
916 }
917 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
918 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
919 }
920 std::vector<std::unique_ptr<C2SettingResult>> failures;
921 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
922 }
923}
924
925void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
926 mSignalledError = true;
927 work->result = error;
928 work->workletsProcessed = 1u;
929}
930
931bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
932 if (size > mTmpFrameBufferSize) {
933 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
934 if (mTmpFrameBuffer == nullptr) {
935 mTmpFrameBufferSize = 0;
936 return false;
937 }
938 mTmpFrameBufferSize = size;
939 }
940 return true;
941}
942
Richard Xief2932a02023-10-20 17:37:57 +0000943bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
944 const std::unique_ptr<C2Work>& work) {
945 if (!(work && pool)) return false;
946 if (mDav1dCtx == nullptr) return false;
947
948 // Get a decoded picture from dav1d if it is enabled.
949 Dav1dPicture img;
950 memset(&img, 0, sizeof(img));
951
952 int res = 0;
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000953 res = dav1d_get_picture(mDav1dCtx, &img);
Richard Xief2932a02023-10-20 17:37:57 +0000954 if (res == DAV1D_ERR(EAGAIN)) {
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000955 ALOGV("Not enough data to output a picture.");
Richard Xief2932a02023-10-20 17:37:57 +0000956 return false;
Harish Mahendrakar98d9a242023-12-19 06:42:48 +0000957 } else if (res != 0) {
Richard Xief2932a02023-10-20 17:37:57 +0000958 ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
959 return false;
960 }
961
Richard Xief2932a02023-10-20 17:37:57 +0000962 getVuiParams(&img);
Richard Xief2932a02023-10-20 17:37:57 +0000963
964 // out_frameIndex that the decoded picture returns from dav1d.
965 int64_t out_frameIndex = img.m.timestamp;
966
Richard Xief2932a02023-10-20 17:37:57 +0000967 const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
968
969 int bitdepth = img.p.bpc;
970
971 std::shared_ptr<C2GraphicBlock> block;
972 uint32_t format = HAL_PIXEL_FORMAT_YV12;
973 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
974 if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
975 IntfImpl::Lock lock = mIntf->lock();
976 codedColorAspects = mIntf->getColorAspects_l();
977 bool allowRGBA1010102 = false;
978 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
979 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
980 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
981 allowRGBA1010102 = true;
982 }
983 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
Richard Xief2932a02023-10-20 17:37:57 +0000984 }
985
986 if (mHalPixelFormat != format) {
987 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
988 std::vector<std::unique_ptr<C2SettingResult>> failures;
989 c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
990 if (err == C2_OK) {
991 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
992 } else {
993 ALOGE("Config update pixelFormat failed");
994 mSignalledError = true;
995 work->workletsProcessed = 1u;
996 work->result = C2_CORRUPTED;
997 return UNKNOWN_ERROR;
998 }
999 mHalPixelFormat = format;
1000 }
1001
1002 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
1003
1004 // We always create a graphic block that is width aligned to 16 and height
1005 // aligned to 2. We set the correct "crop" value of the image in the call to
1006 // createGraphicBuffer() by setting the correct image dimensions.
1007 c2_status_t err =
1008 pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
1009
1010 if (err != C2_OK) {
1011 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
1012 work->result = err;
1013 return false;
1014 }
1015
1016 C2GraphicView wView = block->map().get();
1017
1018 if (wView.error()) {
1019 ALOGE("graphic view map failed %d", wView.error());
1020 work->result = C2_CORRUPTED;
1021 return false;
1022 }
1023
1024 // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
1025 // block->height(), mWidth, mHeight, (int)out_frameIndex);
1026
1027 mOutputBufferIndex = out_frameIndex;
1028
1029 uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
1030 uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
1031 uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
1032
1033 C2PlanarLayout layout = wView.layout();
1034 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1035 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1036 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1037
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301038 CONV_FORMAT_T convFormat;
1039 switch (img.p.layout) {
1040 case DAV1D_PIXEL_LAYOUT_I444:
1041 convFormat = CONV_FORMAT_I444;
1042 break;
1043 case DAV1D_PIXEL_LAYOUT_I422:
1044 convFormat = CONV_FORMAT_I422;
1045 break;
1046 default:
1047 convFormat = CONV_FORMAT_I420;
1048 break;
1049 }
1050
Richard Xief2932a02023-10-20 17:37:57 +00001051 if (bitdepth == 10) {
1052 // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
1053 // decompression to avoid color conversion.
1054 const uint16_t* srcY = (const uint16_t*)img.data[0];
1055 const uint16_t* srcU = (const uint16_t*)img.data[1];
1056 const uint16_t* srcV = (const uint16_t*)img.data[2];
1057 size_t srcYStride = img.stride[0] / 2;
1058 size_t srcUStride = img.stride[1] / 2;
1059 size_t srcVStride = img.stride[1] / 2;
1060
1061 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301062 if (isMonochrome) {
1063 const size_t tmpSize = mWidth;
1064 const bool needFill = tmpSize > mTmpFrameBufferSize;
1065 if (!allocTmpFrameBuffer(tmpSize)) {
1066 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1067 setError(work, C2_NO_MEMORY);
1068 return false;
Richard Xief2932a02023-10-20 17:37:57 +00001069 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301070 srcU = srcV = mTmpFrameBuffer.get();
1071 srcUStride = srcVStride = 0;
1072 if (needFill) {
1073 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
1074 }
Richard Xief2932a02023-10-20 17:37:57 +00001075 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301076 convertPlanar16ToY410OrRGBA1010102(
1077 dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1078 dstYStride, mWidth, mHeight,
1079 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
1080 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001081 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1082 dstYStride /= 2;
1083 dstUStride /= 2;
1084 dstVStride /= 2;
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301085 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001086 if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
1087 (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301088 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001089 if (!allocTmpFrameBuffer(tmpSize)) {
1090 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1091 setError(work, C2_NO_MEMORY);
1092 return false;
1093 }
Richard Xief2932a02023-10-20 17:37:57 +00001094 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301095 convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
1096 srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
1097 mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
1098 tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001099 } else {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301100 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001101 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301102 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001103 if (!allocTmpFrameBuffer(tmpSize)) {
1104 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1105 setError(work, C2_NO_MEMORY);
1106 return false;
1107 }
Richard Xief2932a02023-10-20 17:37:57 +00001108 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301109 convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1110 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1111 isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001112 }
1113
Richard Xief2932a02023-10-20 17:37:57 +00001114 // if(mOutputBufferIndex % 100 == 0)
1115 ALOGV("output a 10bit picture %dx%d from dav1d "
1116 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1117 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1118
Suyog Pawar4602c372023-08-17 11:09:23 +05301119 // Dump the output buffer if dumping is enabled (debug only).
1120#ifdef FILE_DUMP_ENABLE
1121 mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1122 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001123#endif
1124 } else {
1125 const uint8_t* srcY = (const uint8_t*)img.data[0];
1126 const uint8_t* srcU = (const uint8_t*)img.data[1];
1127 const uint8_t* srcV = (const uint8_t*)img.data[2];
1128
1129 size_t srcYStride = img.stride[0];
1130 size_t srcUStride = img.stride[1];
1131 size_t srcVStride = img.stride[1];
1132
Richard Xief2932a02023-10-20 17:37:57 +00001133 // if(mOutputBufferIndex % 100 == 0)
1134 ALOGV("output a 8bit picture %dx%d from dav1d "
1135 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1136 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1137
Suyog Pawar4602c372023-08-17 11:09:23 +05301138 // Dump the output buffer is dumping is enabled (debug only)
1139#ifdef FILE_DUMP_ENABLE
1140 mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1141 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001142#endif
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301143 convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1144 dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
1145 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001146 }
1147
Harish Mahendrakar98d9a242023-12-19 06:42:48 +00001148 finishWork(out_frameIndex, work, std::move(block), img);
Richard Xief2932a02023-10-20 17:37:57 +00001149 dav1d_picture_unref(&img);
Richard Xief2932a02023-10-20 17:37:57 +00001150 block = nullptr;
1151 return true;
1152}
1153
1154c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
1155 const std::shared_ptr<C2BlockPool>& pool,
1156 const std::unique_ptr<C2Work>& work) {
1157 if (drainMode == NO_DRAIN) {
1158 ALOGW("drain with NO_DRAIN: no-op");
1159 return C2_OK;
1160 }
1161 if (drainMode == DRAIN_CHAIN) {
1162 ALOGW("DRAIN_CHAIN not supported");
1163 return C2_OMITTED;
1164 }
1165
1166 while (outputBuffer(pool, work)) {
1167 }
1168
1169 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
1170 fillEmptyWork(work);
1171 }
1172
1173 return C2_OK;
1174}
1175
1176c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
1177 return drainInternal(drainMode, pool, nullptr);
1178}
1179
1180class C2SoftDav1dFactory : public C2ComponentFactory {
1181 public:
1182 C2SoftDav1dFactory()
1183 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1184 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1185
1186 virtual c2_status_t createComponent(c2_node_id_t id,
1187 std::shared_ptr<C2Component>* const component,
1188 std::function<void(C2Component*)> deleter) override {
1189 *component = std::shared_ptr<C2Component>(
1190 new C2SoftDav1dDec(COMPONENT_NAME, id,
1191 std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1192 deleter);
1193 return C2_OK;
1194 }
1195
1196 virtual c2_status_t createInterface(
1197 c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
1198 std::function<void(C2ComponentInterface*)> deleter) override {
1199 *interface = std::shared_ptr<C2ComponentInterface>(
1200 new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
1201 COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1202 deleter);
1203 return C2_OK;
1204 }
1205
1206 virtual ~C2SoftDav1dFactory() override = default;
1207
1208 private:
1209 std::shared_ptr<C2ReflectorHelper> mHelper;
1210};
1211
1212} // namespace android
1213
1214__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
1215 ALOGV("in %s", __func__);
1216 return new ::android::C2SoftDav1dFactory();
1217}
1218
1219__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1220 ::C2ComponentFactory* factory) {
1221 ALOGV("in %s", __func__);
1222 delete factory;
1223}