blob: 3e4247b6704d0512678973169ec6a348838b5223 [file] [log] [blame]
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftGav1Dec"
19#include "C2SoftGav1Dec.h"
20
21#include <C2Debug.h>
22#include <C2PlatformSupport.h>
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -080023#include <Codec2BufferUtils.h>
Harish Mahendrakarf5dec502022-04-13 15:53:55 -070024#include <Codec2CommonUtils.h>
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +053025#include <Codec2Mapper.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070026#include <SimpleC2Interface.h>
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -070027#include <libyuv.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070028#include <log/log.h>
29#include <media/stagefright/foundation/AUtils.h>
30#include <media/stagefright/foundation/MediaDefs.h>
31
James Zern11b60d22023-03-02 19:21:44 -080032// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
33#if LIBYUV_VERSION >= 1780
34#include <algorithm>
35#define HAVE_LIBYUV_I410_I210_TO_AB30 1
36#else
37#define HAVE_LIBYUV_I410_I210_TO_AB30 0
38#endif
39
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070040namespace android {
41
Ray Essickc2cc4372019-08-21 14:02:28 -070042// codecname set and passed in as a compile flag from Android.bp
43constexpr char COMPONENT_NAME[] = CODECNAME;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070044
Ray Essick1af2cc52022-01-25 15:59:23 -080045constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
46
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070047class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
48 public:
49 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
50 : SimpleInterface<void>::BaseParams(
51 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
52 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
53 noPrivateBuffers(); // TODO: account for our buffers here.
54 noInputReferences();
55 noOutputReferences();
56 noInputLatency();
57 noTimeStretch();
58
59 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
60 .withConstValue(new C2ComponentAttributesSetting(
61 C2Component::ATTRIB_IS_TEMPORAL))
62 .build());
63
64 addParameter(
65 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
66 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
67 .withFields({
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -080068 C2F(mSize, width).inRange(2, 4096),
69 C2F(mSize, height).inRange(2, 4096),
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070070 })
71 .withSetter(SizeSetter)
72 .build());
73
74 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
75 .withDefault(new C2StreamProfileLevelInfo::input(
76 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
77 .withFields({C2F(mProfileLevel, profile)
78 .oneOf({C2Config::PROFILE_AV1_0,
79 C2Config::PROFILE_AV1_1}),
80 C2F(mProfileLevel, level)
81 .oneOf({
Harish Mahendrakar1ad8c3b2021-06-04 15:42:31 -070082 C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
83 C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
84 C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
85 C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
86 C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
87 C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
88 C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
89 C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070090 })})
91 .withSetter(ProfileLevelSetter, mSize)
92 .build());
93
94 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
95 addParameter(
96 DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
97 .withDefault(mHdr10PlusInfoInput)
98 .withFields({
99 C2F(mHdr10PlusInfoInput, m.value).any(),
100 })
101 .withSetter(Hdr10PlusInfoInputSetter)
102 .build());
103
104 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
105 addParameter(
106 DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
107 .withDefault(mHdr10PlusInfoOutput)
108 .withFields({
109 C2F(mHdr10PlusInfoOutput, m.value).any(),
110 })
111 .withSetter(Hdr10PlusInfoOutputSetter)
112 .build());
113
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530114 // default static info
115 C2HdrStaticMetadataStruct defaultStaticInfo{};
116 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
117 addParameter(
118 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
119 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
120 .withFields({
121 C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
122 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
123 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
124 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
125 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
130 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
131 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
132 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
133 })
134 .withSetter(HdrStaticInfoSetter)
135 .build());
136
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700137 addParameter(
138 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
139 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
140 .withFields({
141 C2F(mSize, width).inRange(2, 2048, 2),
142 C2F(mSize, height).inRange(2, 2048, 2),
143 })
144 .withSetter(MaxPictureSizeSetter, mSize)
145 .build());
146
147 addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
Ray Essick1af2cc52022-01-25 15:59:23 -0800148 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700149 .withFields({
150 C2F(mMaxInputSize, value).any(),
151 })
152 .calculatedAs(MaxInputSizeSetter, mMaxSize)
153 .build());
154
155 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
156 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
157 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
158 C2Color::YUV_420);
159 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
160
161 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
162 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
163 C2Color::YUV_420);
164 helper->addStructDescriptors<C2ChromaOffsetStruct>();
165
166 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
167 .withConstValue(defaultColorInfo)
168 .build());
169
170 addParameter(
171 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
172 .withDefault(new C2StreamColorAspectsTuning::output(
173 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
174 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
175 .withFields(
176 {C2F(mDefaultColorAspects, range)
177 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
178 C2F(mDefaultColorAspects, primaries)
179 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
180 C2Color::PRIMARIES_OTHER),
181 C2F(mDefaultColorAspects, transfer)
182 .inRange(C2Color::TRANSFER_UNSPECIFIED,
183 C2Color::TRANSFER_OTHER),
184 C2F(mDefaultColorAspects, matrix)
185 .inRange(C2Color::MATRIX_UNSPECIFIED,
186 C2Color::MATRIX_OTHER)})
187 .withSetter(DefaultColorAspectsSetter)
188 .build());
189
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530190 addParameter(
191 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
192 .withDefault(new C2StreamColorAspectsInfo::input(
193 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
194 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
195 .withFields({
196 C2F(mCodedColorAspects, range).inRange(
197 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
198 C2F(mCodedColorAspects, primaries).inRange(
199 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
200 C2F(mCodedColorAspects, transfer).inRange(
201 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
202 C2F(mCodedColorAspects, matrix).inRange(
203 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
204 })
205 .withSetter(CodedColorAspectsSetter)
206 .build());
207
208 addParameter(
209 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
210 .withDefault(new C2StreamColorAspectsInfo::output(
211 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
212 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
213 .withFields({
214 C2F(mColorAspects, range).inRange(
215 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
216 C2F(mColorAspects, primaries).inRange(
217 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
218 C2F(mColorAspects, transfer).inRange(
219 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
220 C2F(mColorAspects, matrix).inRange(
221 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
222 })
223 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
224 .build());
225
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700226 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
Harish Mahendrakarf5dec502022-04-13 15:53:55 -0700227 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700228 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
229 }
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700230 // If color format surface isn't added to supported formats, there is no way to know
231 // when the color-format is configured to surface. This is necessary to be able to
232 // choose 10-bit format while decoding 10-bit clips in surface mode.
233 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
234
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700235 // TODO: support more formats?
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700236 addParameter(
237 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
238 .withDefault(new C2StreamPixelFormatInfo::output(
239 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
240 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
241 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
242 .build());
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700243 }
244
245 static C2R SizeSetter(bool mayBlock,
246 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
247 C2P<C2StreamPictureSizeInfo::output> &me) {
248 (void)mayBlock;
249 C2R res = C2R::Ok();
250 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
251 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
252 me.set().width = oldMe.v.width;
253 }
254 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
255 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
256 me.set().height = oldMe.v.height;
257 }
258 return res;
259 }
260
261 static C2R MaxPictureSizeSetter(
262 bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
263 const C2P<C2StreamPictureSizeInfo::output> &size) {
264 (void)mayBlock;
265 // TODO: get max width/height from the size's field helpers vs.
266 // hardcoding
267 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
268 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
269 return C2R::Ok();
270 }
271
272 static C2R MaxInputSizeSetter(
273 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
274 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
275 (void)mayBlock;
Ray Essick1af2cc52022-01-25 15:59:23 -0800276 // assume compression ratio of 2, but enforce a floor
277 me.set().value = c2_max((((maxSize.v.width + 63) / 64)
278 * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700279 return C2R::Ok();
280 }
281
282 static C2R DefaultColorAspectsSetter(
283 bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
284 (void)mayBlock;
285 if (me.v.range > C2Color::RANGE_OTHER) {
286 me.set().range = C2Color::RANGE_OTHER;
287 }
288 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
289 me.set().primaries = C2Color::PRIMARIES_OTHER;
290 }
291 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
292 me.set().transfer = C2Color::TRANSFER_OTHER;
293 }
294 if (me.v.matrix > C2Color::MATRIX_OTHER) {
295 me.set().matrix = C2Color::MATRIX_OTHER;
296 }
297 return C2R::Ok();
298 }
299
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530300 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
301 (void)mayBlock;
302 if (me.v.range > C2Color::RANGE_OTHER) {
303 me.set().range = C2Color::RANGE_OTHER;
304 }
305 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
306 me.set().primaries = C2Color::PRIMARIES_OTHER;
307 }
308 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
309 me.set().transfer = C2Color::TRANSFER_OTHER;
310 }
311 if (me.v.matrix > C2Color::MATRIX_OTHER) {
312 me.set().matrix = C2Color::MATRIX_OTHER;
313 }
314 return C2R::Ok();
315 }
316
317 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
318 const C2P<C2StreamColorAspectsTuning::output> &def,
319 const C2P<C2StreamColorAspectsInfo::input> &coded) {
320 (void)mayBlock;
321 // take default values for all unspecified fields, and coded values for specified ones
322 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
323 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
324 ? def.v.primaries : coded.v.primaries;
325 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
326 ? def.v.transfer : coded.v.transfer;
327 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
328 return C2R::Ok();
329 }
330
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700331 static C2R ProfileLevelSetter(
332 bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
333 const C2P<C2StreamPictureSizeInfo::output> &size) {
334 (void)mayBlock;
335 (void)size;
336 (void)me; // TODO: validate
337 return C2R::Ok();
338 }
339
340 std::shared_ptr<C2StreamColorAspectsTuning::output>
341 getDefaultColorAspects_l() {
342 return mDefaultColorAspects;
343 }
344
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530345 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
346 return mColorAspects;
347 }
348
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700349 static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
350 C2P<C2StreamHdr10PlusInfo::input> &me) {
351 (void)mayBlock;
352 (void)me; // TODO: validate
353 return C2R::Ok();
354 }
355
356 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
357 C2P<C2StreamHdr10PlusInfo::output> &me) {
358 (void)mayBlock;
359 (void)me; // TODO: validate
360 return C2R::Ok();
361 }
362
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700363 // unsafe getters
364 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
365
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530366 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
367 (void)mayBlock;
368 if (me.v.mastering.red.x > 1) {
369 me.set().mastering.red.x = 1;
370 }
371 if (me.v.mastering.red.y > 1) {
372 me.set().mastering.red.y = 1;
373 }
374 if (me.v.mastering.green.x > 1) {
375 me.set().mastering.green.x = 1;
376 }
377 if (me.v.mastering.green.y > 1) {
378 me.set().mastering.green.y = 1;
379 }
380 if (me.v.mastering.blue.x > 1) {
381 me.set().mastering.blue.x = 1;
382 }
383 if (me.v.mastering.blue.y > 1) {
384 me.set().mastering.blue.y = 1;
385 }
386 if (me.v.mastering.white.x > 1) {
387 me.set().mastering.white.x = 1;
388 }
389 if (me.v.mastering.white.y > 1) {
390 me.set().mastering.white.y = 1;
391 }
392 if (me.v.mastering.maxLuminance > 65535.0) {
393 me.set().mastering.maxLuminance = 65535.0;
394 }
395 if (me.v.mastering.minLuminance > 6.5535) {
396 me.set().mastering.minLuminance = 6.5535;
397 }
398 if (me.v.maxCll > 65535.0) {
399 me.set().maxCll = 65535.0;
400 }
401 if (me.v.maxFall > 65535.0) {
402 me.set().maxFall = 65535.0;
403 }
404 return C2R::Ok();
405 }
406
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700407 private:
408 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
409 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
410 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
411 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
412 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
413 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
414 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530415 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
416 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700417 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
418 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530419 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700420};
421
422C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
423 const std::shared_ptr<IntfImpl> &intfImpl)
424 : SimpleC2Component(
425 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700426 mIntf(intfImpl),
427 mCodecCtx(nullptr) {
Ray Essick24754942022-04-16 09:50:35 -0700428 mTimeStart = mTimeEnd = systemTime();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700429}
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700430
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700431C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
432
433c2_status_t C2SoftGav1Dec::onInit() {
434 return initDecoder() ? C2_OK : C2_CORRUPTED;
435}
436
437c2_status_t C2SoftGav1Dec::onStop() {
438 mSignalledError = false;
439 mSignalledOutputEos = false;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700440 return C2_OK;
441}
442
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700443void C2SoftGav1Dec::onReset() {
444 (void)onStop();
445 c2_status_t err = onFlush_sm();
446 if (err != C2_OK) {
447 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
448 destroyDecoder();
449 if (!initDecoder()) {
450 ALOGE("Hard reset failed.");
451 }
452 }
453}
454
455void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
456
457c2_status_t C2SoftGav1Dec::onFlush_sm() {
James Zernb7aee6e2020-06-26 13:49:53 -0700458 Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700459 if (status != kLibgav1StatusOk) {
460 ALOGE("Failed to flush av1 decoder. status: %d.", status);
461 return C2_CORRUPTED;
462 }
463
464 // Dequeue frame (if any) that was enqueued previously.
465 const libgav1::DecoderBuffer *buffer;
466 status = mCodecCtx->DequeueFrame(&buffer);
James Zernb7aee6e2020-06-26 13:49:53 -0700467 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700468 ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
469 status);
470 return C2_CORRUPTED;
471 }
472
473 mSignalledError = false;
474 mSignalledOutputEos = false;
475
476 return C2_OK;
477}
478
479static int GetCPUCoreCount() {
480 int cpuCoreCount = 1;
481#if defined(_SC_NPROCESSORS_ONLN)
482 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
483#else
484 // _SC_NPROC_ONLN must be defined...
485 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
486#endif
487 CHECK(cpuCoreCount >= 1);
488 ALOGV("Number of CPU cores: %d", cpuCoreCount);
489 return cpuCoreCount;
490}
491
492bool C2SoftGav1Dec::initDecoder() {
493 mSignalledError = false;
494 mSignalledOutputEos = false;
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700495 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700496 {
497 IntfImpl::Lock lock = mIntf->lock();
498 mPixelFormatInfo = mIntf->getPixelFormat_l();
499 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700500 mCodecCtx.reset(new libgav1::Decoder());
501
502 if (mCodecCtx == nullptr) {
503 ALOGE("mCodecCtx is null");
504 return false;
505 }
506
507 libgav1::DecoderSettings settings = {};
508 settings.threads = GetCPUCoreCount();
509
Vignesh Venkatasubramanian61ba2cf2019-06-24 10:04:00 -0700510 ALOGV("Using libgav1 AV1 software decoder.");
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700511 Libgav1StatusCode status = mCodecCtx->Init(&settings);
512 if (status != kLibgav1StatusOk) {
513 ALOGE("av1 decoder failed to initialize. status: %d.", status);
514 return false;
515 }
516
517 return true;
518}
519
520void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
521
522void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
523 uint32_t flags = 0;
524 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
525 flags |= C2FrameData::FLAG_END_OF_STREAM;
526 ALOGV("signalling eos");
527 }
528 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
529 work->worklets.front()->output.buffers.clear();
530 work->worklets.front()->output.ordinal = work->input.ordinal;
531 work->workletsProcessed = 1u;
532}
533
534void C2SoftGav1Dec::finishWork(uint64_t index,
535 const std::unique_ptr<C2Work> &work,
536 const std::shared_ptr<C2GraphicBlock> &block) {
537 std::shared_ptr<C2Buffer> buffer =
538 createGraphicBuffer(block, C2Rect(mWidth, mHeight));
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530539 {
540 IntfImpl::Lock lock = mIntf->lock();
541 buffer->setInfo(mIntf->getColorAspects_l());
542 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700543 auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
544 uint32_t flags = 0;
545 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
546 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
547 flags |= C2FrameData::FLAG_END_OF_STREAM;
548 ALOGV("signalling eos");
549 }
550 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
551 work->worklets.front()->output.buffers.clear();
552 work->worklets.front()->output.buffers.push_back(buffer);
553 work->worklets.front()->output.ordinal = work->input.ordinal;
554 work->workletsProcessed = 1u;
555 };
556 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
557 fillWork(work);
558 } else {
559 finish(index, fillWork);
560 }
561}
562
563void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
564 const std::shared_ptr<C2BlockPool> &pool) {
565 work->result = C2_OK;
566 work->workletsProcessed = 0u;
567 work->worklets.front()->output.configUpdate.clear();
568 work->worklets.front()->output.flags = work->input.flags;
569 if (mSignalledError || mSignalledOutputEos) {
570 work->result = C2_BAD_VALUE;
571 return;
572 }
573
574 size_t inOffset = 0u;
575 size_t inSize = 0u;
576 C2ReadView rView = mDummyReadView;
577 if (!work->input.buffers.empty()) {
578 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
579 inSize = rView.capacity();
580 if (inSize && rView.error()) {
581 ALOGE("read view map failed %d", rView.error());
582 work->result = C2_CORRUPTED;
583 return;
584 }
585 }
586
587 bool codecConfig =
588 ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
589 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
590
591 ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
592 (int)work->input.ordinal.timestamp.peeku(),
593 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
594
595 if (codecConfig) {
596 fillEmptyWork(work);
597 return;
598 }
599
600 int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
601 if (inSize) {
602 uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700603
Ray Essick24754942022-04-16 09:50:35 -0700604 mTimeStart = systemTime();
605 nsecs_t delay = mTimeStart - mTimeEnd;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700606
607 const Libgav1StatusCode status =
James Zernb7aee6e2020-06-26 13:49:53 -0700608 mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
609 /*buffer_private_data=*/nullptr);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700610
Ray Essick24754942022-04-16 09:50:35 -0700611 mTimeEnd = systemTime();
612 nsecs_t decodeTime = mTimeEnd - mTimeStart;
613 ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700614
615 if (status != kLibgav1StatusOk) {
616 ALOGE("av1 decoder failed to decode frame. status: %d.", status);
617 work->result = C2_CORRUPTED;
618 work->workletsProcessed = 1u;
619 mSignalledError = true;
620 return;
621 }
622
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700623 }
624
625 (void)outputBuffer(pool, work);
626
627 if (eos) {
628 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
629 mSignalledOutputEos = true;
630 } else if (!inSize) {
631 fillEmptyWork(work);
632 }
633}
634
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530635void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
636 const std::unique_ptr<C2Work> &work) {
637 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
638 bool infoPresent = false;
639 if (buffer->has_hdr_mdcv) {
640 // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
641 hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
642 hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
643
644 hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
645 hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
646
647 hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
648 hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
649
650 // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
651 hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
652 hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
653
654 // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
655 hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
656 // hdr_mdcv.luminance_min is in 18.14 format.
657 hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
658 infoPresent = true;
659 }
660
661 if (buffer->has_hdr_cll) {
662 hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
663 hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
664 infoPresent = true;
665 }
666 // config if static info has changed
667 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
668 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
669 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
670 }
671}
672
673void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
674 const std::unique_ptr<C2Work> &work) {
675 if (buffer->has_itut_t35) {
676 std::vector<uint8_t> payload;
677 size_t payloadSize = buffer->itut_t35.payload_size;
678 if (payloadSize > 0) {
679 payload.push_back(buffer->itut_t35.country_code);
680 if (buffer->itut_t35.country_code == 0xFF) {
681 payload.push_back(buffer->itut_t35.country_code_extension_byte);
682 }
683 payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
684 buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
685 }
686
687 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
688 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
689 if (!hdr10PlusInfo) {
690 ALOGE("Hdr10PlusInfo allocation failed");
691 mSignalledError = true;
692 work->result = C2_NO_MEMORY;
693 return;
694 }
695 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
696
697 // config if hdr10Plus info has changed
698 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
699 mHdr10PlusInfo = std::move(hdr10PlusInfo);
700 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
701 }
702 }
703}
704
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530705void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
706 VuiColorAspects vuiColorAspects;
707 vuiColorAspects.primaries = buffer->color_primary;
708 vuiColorAspects.transfer = buffer->transfer_characteristics;
709 vuiColorAspects.coeffs = buffer->matrix_coefficients;
710 vuiColorAspects.fullRange = buffer->color_range;
711
712 // convert vui aspects to C2 values if changed
713 if (!(vuiColorAspects == mBitstreamColorAspects)) {
714 mBitstreamColorAspects = vuiColorAspects;
715 ColorAspects sfAspects;
716 C2StreamColorAspectsInfo::input codedAspects = { 0u };
717 ColorUtils::convertIsoColorAspectsToCodecAspects(
718 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
719 vuiColorAspects.fullRange, sfAspects);
720 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
721 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
722 }
723 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
724 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
725 }
726 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
727 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
728 }
729 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
730 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
731 }
732 std::vector<std::unique_ptr<C2SettingResult>> failures;
733 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
734 }
735}
736
James Zern0958b8b2023-02-17 21:48:08 -0800737void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
738 mSignalledError = true;
739 work->result = error;
740 work->workletsProcessed = 1u;
741}
742
743bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
744 if (size > mTmpFrameBufferSize) {
745 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
746 if (mTmpFrameBuffer == nullptr) {
747 mTmpFrameBufferSize = 0;
748 return false;
749 }
750 mTmpFrameBufferSize = size;
751 }
752 return true;
753}
754
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700755bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
756 const std::unique_ptr<C2Work> &work) {
757 if (!(work && pool)) return false;
758
759 const libgav1::DecoderBuffer *buffer;
760 const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
761
James Zernb7aee6e2020-06-26 13:49:53 -0700762 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700763 ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
764 return false;
765 }
766
James Zernb7aee6e2020-06-26 13:49:53 -0700767 // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
768 // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
769 // of two things:
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700770 // - The EnqueueFrame() call was either a flush (called with nullptr).
771 // - The enqueued frame did not have any displayable frames.
772 if (!buffer) {
773 return false;
774 }
775
776 const int width = buffer->displayed_width[0];
777 const int height = buffer->displayed_height[0];
778 if (width != mWidth || height != mHeight) {
779 mWidth = width;
780 mHeight = height;
781
782 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
783 std::vector<std::unique_ptr<C2SettingResult>> failures;
784 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
785 if (err == C2_OK) {
786 work->worklets.front()->output.configUpdate.push_back(
787 C2Param::Copy(size));
788 } else {
789 ALOGE("Config update size failed");
790 mSignalledError = true;
791 work->result = C2_CORRUPTED;
792 work->workletsProcessed = 1u;
793 return false;
794 }
795 }
796
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530797 getVuiParams(buffer);
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530798 getHDRStaticParams(buffer, work);
799 getHDR10PlusInfoData(buffer, work);
800
James Zern0958b8b2023-02-17 21:48:08 -0800801#if LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800802 if (buffer->bitdepth == 10 &&
803 !(buffer->image_format == libgav1::kImageFormatYuv420 ||
804 buffer->image_format == libgav1::kImageFormatMonochrome400)) {
805 ALOGE("image_format %d not supported for 10bit", buffer->image_format);
806 mSignalledError = true;
807 work->workletsProcessed = 1u;
808 work->result = C2_CORRUPTED;
809 return false;
810 }
James Zern0958b8b2023-02-17 21:48:08 -0800811#endif
James Zern2c90f852023-02-17 21:42:27 -0800812
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800813 const bool isMonochrome =
814 buffer->image_format == libgav1::kImageFormatMonochrome400;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700815
816 std::shared_ptr<C2GraphicBlock> block;
817 uint32_t format = HAL_PIXEL_FORMAT_YV12;
Lajos Molnar45109a32022-06-03 09:41:48 -0700818 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700819 if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700820 IntfImpl::Lock lock = mIntf->lock();
Lajos Molnar45109a32022-06-03 09:41:48 -0700821 codedColorAspects = mIntf->getColorAspects_l();
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800822 bool allowRGBA1010102 = false;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530823 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
824 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
825 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800826 allowRGBA1010102 = true;
827 }
828 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
James Zern11b60d22023-03-02 19:21:44 -0800829#if !HAVE_LIBYUV_I410_I210_TO_AB30
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800830 if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
831 (buffer->image_format != libgav1::kImageFormatYuv420)) {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800832 ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800833 mSignalledError = true;
834 work->result = C2_OMITTED;
835 work->workletsProcessed = 1u;
836 return false;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700837 }
James Zern11b60d22023-03-02 19:21:44 -0800838#endif
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700839 }
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700840
841 if (mHalPixelFormat != format) {
842 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
843 std::vector<std::unique_ptr<C2SettingResult>> failures;
844 c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
845 if (err == C2_OK) {
846 work->worklets.front()->output.configUpdate.push_back(
847 C2Param::Copy(pixelFormat));
848 } else {
849 ALOGE("Config update pixelFormat failed");
850 mSignalledError = true;
851 work->workletsProcessed = 1u;
852 work->result = C2_CORRUPTED;
853 return UNKNOWN_ERROR;
854 }
855 mHalPixelFormat = format;
856 }
857
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700858 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
859
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -0800860 // We always create a graphic block that is width aligned to 16 and height
861 // aligned to 2. We set the correct "crop" value of the image in the call to
862 // createGraphicBuffer() by setting the correct image dimensions.
863 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
864 align(mHeight, 2), format, usage,
865 &block);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700866
867 if (err != C2_OK) {
868 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
869 work->result = err;
870 return false;
871 }
872
873 C2GraphicView wView = block->map().get();
874
875 if (wView.error()) {
876 ALOGE("graphic view map failed %d", wView.error());
877 work->result = C2_CORRUPTED;
878 return false;
879 }
880
881 ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
882 block->height(), mWidth, mHeight, (int)buffer->user_private_data);
883
ming.zhouac19c3d2019-10-11 11:14:07 +0800884 uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
885 uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
886 uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700887
ming.zhouac19c3d2019-10-11 11:14:07 +0800888 C2PlanarLayout layout = wView.layout();
889 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
Vignesh Venkatasubramanian47b1d222023-01-12 21:45:40 +0000890 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
891 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
ming.zhouac19c3d2019-10-11 11:14:07 +0800892
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700893 if (buffer->bitdepth == 10) {
894 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
895 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
896 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -0800897 size_t srcYStride = buffer->stride[0] / 2;
898 size_t srcUStride = buffer->stride[1] / 2;
899 size_t srcVStride = buffer->stride[2] / 2;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700900
901 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
James Zern11b60d22023-03-02 19:21:44 -0800902 bool processed = false;
903#if HAVE_LIBYUV_I410_I210_TO_AB30
904 if (buffer->image_format == libgav1::kImageFormatYuv444) {
905 libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
906 dstY, dstYStride, &libyuv::kYuvV2020Constants,
907 mWidth, mHeight);
908 processed = true;
909 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
910 libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
911 dstY, dstYStride, &libyuv::kYuvV2020Constants,
912 mWidth, mHeight);
913 processed = true;
914 }
915#endif // HAVE_LIBYUV_I410_I210_TO_AB30
916 if (!processed) {
917 if (isMonochrome) {
918 const size_t tmpSize = mWidth;
919 const bool needFill = tmpSize > mTmpFrameBufferSize;
920 if (!allocTmpFrameBuffer(tmpSize)) {
921 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
922 setError(work, C2_NO_MEMORY);
923 return false;
924 }
925 srcU = srcV = mTmpFrameBuffer.get();
926 srcUStride = srcVStride = 0;
927 if (needFill) {
928 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
929 }
930 }
931 convertYUV420Planar16ToY410OrRGBA1010102(
932 (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
933 srcUStride, srcVStride,
934 dstYStride / sizeof(uint32_t), mWidth, mHeight,
935 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
936 }
Harish Mahendrakar1b1aef22021-12-30 19:12:51 -0800937 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
James Zern0958b8b2023-02-17 21:48:08 -0800938 dstYStride /= 2;
939 dstUStride /= 2;
940 dstVStride /= 2;
941#if LIBYUV_VERSION >= 1779
942 if (buffer->image_format == libgav1::kImageFormatYuv444 ||
943 buffer->image_format == libgav1::kImageFormatYuv422) {
944 // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
945 // libyuv::I210ToP010 when they are available.
946 // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
947 // guarantees.
948 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
949 if (!allocTmpFrameBuffer(tmpSize)) {
950 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
951 setError(work, C2_NO_MEMORY);
952 return false;
953 }
954 uint16_t *const tmpY = mTmpFrameBuffer.get();
955 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
956 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
957 if (buffer->image_format == libgav1::kImageFormatYuv444) {
958 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
959 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
960 mWidth, mHeight);
961 } else {
962 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
963 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
964 mWidth, mHeight);
965 }
966 libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
967 (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
968 mWidth, mHeight);
969 } else {
970 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
971 srcYStride, srcUStride, srcVStride, dstYStride,
972 dstUStride, mWidth, mHeight, isMonochrome);
973 }
974#else // LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800975 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
James Zern0958b8b2023-02-17 21:48:08 -0800976 srcYStride, srcUStride, srcVStride, dstYStride,
977 dstUStride, mWidth, mHeight, isMonochrome);
978#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700979 } else {
James Zern0958b8b2023-02-17 21:48:08 -0800980#if LIBYUV_VERSION >= 1779
981 if (buffer->image_format == libgav1::kImageFormatYuv444) {
982 // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
983 // it's available.
984 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
985 if (!allocTmpFrameBuffer(tmpSize)) {
986 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
987 setError(work, C2_NO_MEMORY);
988 return false;
989 }
990 uint16_t *const tmpY = mTmpFrameBuffer.get();
991 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
992 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
993 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
994 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
995 mWidth, mHeight);
996 libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
997 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
998 mWidth, mHeight);
999 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1000 libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1001 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1002 mWidth, mHeight);
1003 } else {
1004 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1005 srcUStride, srcVStride, dstYStride, dstUStride,
1006 mWidth, mHeight, isMonochrome);
1007 }
1008#else // LIBYUV_VERSION < 1779
1009 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1010 srcUStride, srcVStride, dstYStride, dstUStride,
1011 mWidth, mHeight, isMonochrome);
1012#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001013 }
1014 } else {
1015 const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
1016 const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
1017 const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -08001018 size_t srcYStride = buffer->stride[0];
1019 size_t srcUStride = buffer->stride[1];
1020 size_t srcVStride = buffer->stride[2];
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -07001021
1022 if (buffer->image_format == libgav1::kImageFormatYuv444) {
1023 libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1024 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1025 mWidth, mHeight);
1026 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1027 libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1028 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1029 mWidth, mHeight);
1030 } else {
1031 convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1032 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1033 isMonochrome);
1034 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001035 }
1036 finishWork(buffer->user_private_data, work, std::move(block));
1037 block = nullptr;
1038 return true;
1039}
1040
1041c2_status_t C2SoftGav1Dec::drainInternal(
1042 uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
1043 const std::unique_ptr<C2Work> &work) {
1044 if (drainMode == NO_DRAIN) {
1045 ALOGW("drain with NO_DRAIN: no-op");
1046 return C2_OK;
1047 }
1048 if (drainMode == DRAIN_CHAIN) {
1049 ALOGW("DRAIN_CHAIN not supported");
1050 return C2_OMITTED;
1051 }
1052
James Zernb7aee6e2020-06-26 13:49:53 -07001053 const Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001054 if (status != kLibgav1StatusOk) {
1055 ALOGE("Failed to flush av1 decoder. status: %d.", status);
1056 return C2_CORRUPTED;
1057 }
1058
1059 while (outputBuffer(pool, work)) {
1060 }
1061
1062 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
1063 work->workletsProcessed == 0u) {
1064 fillEmptyWork(work);
1065 }
1066
1067 return C2_OK;
1068}
1069
1070c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
1071 const std::shared_ptr<C2BlockPool> &pool) {
1072 return drainInternal(drainMode, pool, nullptr);
1073}
1074
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001075class C2SoftGav1Factory : public C2ComponentFactory {
1076 public:
1077 C2SoftGav1Factory()
1078 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1079 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1080
1081 virtual c2_status_t createComponent(
1082 c2_node_id_t id, std::shared_ptr<C2Component> *const component,
1083 std::function<void(C2Component *)> deleter) override {
1084 *component = std::shared_ptr<C2Component>(
1085 new C2SoftGav1Dec(COMPONENT_NAME, id,
1086 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1087 deleter);
1088 return C2_OK;
1089 }
1090
1091 virtual c2_status_t createInterface(
1092 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1093 std::function<void(C2ComponentInterface *)> deleter) override {
1094 *interface = std::shared_ptr<C2ComponentInterface>(
1095 new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
1096 COMPONENT_NAME, id,
1097 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1098 deleter);
1099 return C2_OK;
1100 }
1101
1102 virtual ~C2SoftGav1Factory() override = default;
1103
1104 private:
1105 std::shared_ptr<C2ReflectorHelper> mHelper;
1106};
1107
1108} // namespace android
1109
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001110__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001111extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1112 ALOGV("in %s", __func__);
1113 return new ::android::C2SoftGav1Factory();
1114}
1115
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001116__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001117extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1118 ALOGV("in %s", __func__);
1119 delete factory;
1120}