blob: 914167d0a063322d154361349eebd8fb75df21d5 [file] [log] [blame]
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftGav1Dec"
19#include "C2SoftGav1Dec.h"
20
21#include <C2Debug.h>
22#include <C2PlatformSupport.h>
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -080023#include <Codec2BufferUtils.h>
Harish Mahendrakarf5dec502022-04-13 15:53:55 -070024#include <Codec2CommonUtils.h>
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +053025#include <Codec2Mapper.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070026#include <SimpleC2Interface.h>
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -070027#include <libyuv.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070028#include <log/log.h>
29#include <media/stagefright/foundation/AUtils.h>
30#include <media/stagefright/foundation/MediaDefs.h>
31
32namespace android {
33
Ray Essickc2cc4372019-08-21 14:02:28 -070034// codecname set and passed in as a compile flag from Android.bp
35constexpr char COMPONENT_NAME[] = CODECNAME;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070036
Ray Essick1af2cc52022-01-25 15:59:23 -080037constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
38
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070039class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
40 public:
41 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
42 : SimpleInterface<void>::BaseParams(
43 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
44 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
45 noPrivateBuffers(); // TODO: account for our buffers here.
46 noInputReferences();
47 noOutputReferences();
48 noInputLatency();
49 noTimeStretch();
50
51 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
52 .withConstValue(new C2ComponentAttributesSetting(
53 C2Component::ATTRIB_IS_TEMPORAL))
54 .build());
55
56 addParameter(
57 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
58 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
59 .withFields({
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -080060 C2F(mSize, width).inRange(2, 4096),
61 C2F(mSize, height).inRange(2, 4096),
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070062 })
63 .withSetter(SizeSetter)
64 .build());
65
66 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
67 .withDefault(new C2StreamProfileLevelInfo::input(
68 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
69 .withFields({C2F(mProfileLevel, profile)
70 .oneOf({C2Config::PROFILE_AV1_0,
71 C2Config::PROFILE_AV1_1}),
72 C2F(mProfileLevel, level)
73 .oneOf({
Harish Mahendrakar1ad8c3b2021-06-04 15:42:31 -070074 C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
75 C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
76 C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
77 C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
78 C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
79 C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
80 C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
81 C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070082 })})
83 .withSetter(ProfileLevelSetter, mSize)
84 .build());
85
86 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
87 addParameter(
88 DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
89 .withDefault(mHdr10PlusInfoInput)
90 .withFields({
91 C2F(mHdr10PlusInfoInput, m.value).any(),
92 })
93 .withSetter(Hdr10PlusInfoInputSetter)
94 .build());
95
96 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
97 addParameter(
98 DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
99 .withDefault(mHdr10PlusInfoOutput)
100 .withFields({
101 C2F(mHdr10PlusInfoOutput, m.value).any(),
102 })
103 .withSetter(Hdr10PlusInfoOutputSetter)
104 .build());
105
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530106 // default static info
107 C2HdrStaticMetadataStruct defaultStaticInfo{};
108 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
109 addParameter(
110 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
111 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
112 .withFields({
113 C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
114 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
115 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
116 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
117 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
118 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
119 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
120 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
121 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
122 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
123 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
124 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
125 })
126 .withSetter(HdrStaticInfoSetter)
127 .build());
128
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700129 addParameter(
130 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
131 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
132 .withFields({
133 C2F(mSize, width).inRange(2, 2048, 2),
134 C2F(mSize, height).inRange(2, 2048, 2),
135 })
136 .withSetter(MaxPictureSizeSetter, mSize)
137 .build());
138
139 addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
Ray Essick1af2cc52022-01-25 15:59:23 -0800140 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700141 .withFields({
142 C2F(mMaxInputSize, value).any(),
143 })
144 .calculatedAs(MaxInputSizeSetter, mMaxSize)
145 .build());
146
147 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
148 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
149 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
150 C2Color::YUV_420);
151 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
152
153 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
154 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
155 C2Color::YUV_420);
156 helper->addStructDescriptors<C2ChromaOffsetStruct>();
157
158 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
159 .withConstValue(defaultColorInfo)
160 .build());
161
162 addParameter(
163 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
164 .withDefault(new C2StreamColorAspectsTuning::output(
165 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
166 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
167 .withFields(
168 {C2F(mDefaultColorAspects, range)
169 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
170 C2F(mDefaultColorAspects, primaries)
171 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
172 C2Color::PRIMARIES_OTHER),
173 C2F(mDefaultColorAspects, transfer)
174 .inRange(C2Color::TRANSFER_UNSPECIFIED,
175 C2Color::TRANSFER_OTHER),
176 C2F(mDefaultColorAspects, matrix)
177 .inRange(C2Color::MATRIX_UNSPECIFIED,
178 C2Color::MATRIX_OTHER)})
179 .withSetter(DefaultColorAspectsSetter)
180 .build());
181
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530182 addParameter(
183 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
184 .withDefault(new C2StreamColorAspectsInfo::input(
185 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
186 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
187 .withFields({
188 C2F(mCodedColorAspects, range).inRange(
189 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
190 C2F(mCodedColorAspects, primaries).inRange(
191 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
192 C2F(mCodedColorAspects, transfer).inRange(
193 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
194 C2F(mCodedColorAspects, matrix).inRange(
195 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
196 })
197 .withSetter(CodedColorAspectsSetter)
198 .build());
199
200 addParameter(
201 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
202 .withDefault(new C2StreamColorAspectsInfo::output(
203 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
204 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
205 .withFields({
206 C2F(mColorAspects, range).inRange(
207 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
208 C2F(mColorAspects, primaries).inRange(
209 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
210 C2F(mColorAspects, transfer).inRange(
211 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
212 C2F(mColorAspects, matrix).inRange(
213 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
214 })
215 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
216 .build());
217
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700218 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
Harish Mahendrakarf5dec502022-04-13 15:53:55 -0700219 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700220 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
221 }
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700222 // If color format surface isn't added to supported formats, there is no way to know
223 // when the color-format is configured to surface. This is necessary to be able to
224 // choose 10-bit format while decoding 10-bit clips in surface mode.
225 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
226
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700227 // TODO: support more formats?
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700228 addParameter(
229 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
230 .withDefault(new C2StreamPixelFormatInfo::output(
231 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
232 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
233 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
234 .build());
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700235 }
236
237 static C2R SizeSetter(bool mayBlock,
238 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
239 C2P<C2StreamPictureSizeInfo::output> &me) {
240 (void)mayBlock;
241 C2R res = C2R::Ok();
242 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
243 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
244 me.set().width = oldMe.v.width;
245 }
246 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
247 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
248 me.set().height = oldMe.v.height;
249 }
250 return res;
251 }
252
253 static C2R MaxPictureSizeSetter(
254 bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
255 const C2P<C2StreamPictureSizeInfo::output> &size) {
256 (void)mayBlock;
257 // TODO: get max width/height from the size's field helpers vs.
258 // hardcoding
259 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
260 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
261 return C2R::Ok();
262 }
263
264 static C2R MaxInputSizeSetter(
265 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
266 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
267 (void)mayBlock;
Ray Essick1af2cc52022-01-25 15:59:23 -0800268 // assume compression ratio of 2, but enforce a floor
269 me.set().value = c2_max((((maxSize.v.width + 63) / 64)
270 * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700271 return C2R::Ok();
272 }
273
274 static C2R DefaultColorAspectsSetter(
275 bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
276 (void)mayBlock;
277 if (me.v.range > C2Color::RANGE_OTHER) {
278 me.set().range = C2Color::RANGE_OTHER;
279 }
280 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
281 me.set().primaries = C2Color::PRIMARIES_OTHER;
282 }
283 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
284 me.set().transfer = C2Color::TRANSFER_OTHER;
285 }
286 if (me.v.matrix > C2Color::MATRIX_OTHER) {
287 me.set().matrix = C2Color::MATRIX_OTHER;
288 }
289 return C2R::Ok();
290 }
291
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530292 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
293 (void)mayBlock;
294 if (me.v.range > C2Color::RANGE_OTHER) {
295 me.set().range = C2Color::RANGE_OTHER;
296 }
297 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
298 me.set().primaries = C2Color::PRIMARIES_OTHER;
299 }
300 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
301 me.set().transfer = C2Color::TRANSFER_OTHER;
302 }
303 if (me.v.matrix > C2Color::MATRIX_OTHER) {
304 me.set().matrix = C2Color::MATRIX_OTHER;
305 }
306 return C2R::Ok();
307 }
308
309 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
310 const C2P<C2StreamColorAspectsTuning::output> &def,
311 const C2P<C2StreamColorAspectsInfo::input> &coded) {
312 (void)mayBlock;
313 // take default values for all unspecified fields, and coded values for specified ones
314 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
315 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
316 ? def.v.primaries : coded.v.primaries;
317 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
318 ? def.v.transfer : coded.v.transfer;
319 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
320 return C2R::Ok();
321 }
322
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700323 static C2R ProfileLevelSetter(
324 bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
325 const C2P<C2StreamPictureSizeInfo::output> &size) {
326 (void)mayBlock;
327 (void)size;
328 (void)me; // TODO: validate
329 return C2R::Ok();
330 }
331
332 std::shared_ptr<C2StreamColorAspectsTuning::output>
333 getDefaultColorAspects_l() {
334 return mDefaultColorAspects;
335 }
336
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530337 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
338 return mColorAspects;
339 }
340
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700341 static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
342 C2P<C2StreamHdr10PlusInfo::input> &me) {
343 (void)mayBlock;
344 (void)me; // TODO: validate
345 return C2R::Ok();
346 }
347
348 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
349 C2P<C2StreamHdr10PlusInfo::output> &me) {
350 (void)mayBlock;
351 (void)me; // TODO: validate
352 return C2R::Ok();
353 }
354
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700355 // unsafe getters
356 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
357
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530358 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
359 (void)mayBlock;
360 if (me.v.mastering.red.x > 1) {
361 me.set().mastering.red.x = 1;
362 }
363 if (me.v.mastering.red.y > 1) {
364 me.set().mastering.red.y = 1;
365 }
366 if (me.v.mastering.green.x > 1) {
367 me.set().mastering.green.x = 1;
368 }
369 if (me.v.mastering.green.y > 1) {
370 me.set().mastering.green.y = 1;
371 }
372 if (me.v.mastering.blue.x > 1) {
373 me.set().mastering.blue.x = 1;
374 }
375 if (me.v.mastering.blue.y > 1) {
376 me.set().mastering.blue.y = 1;
377 }
378 if (me.v.mastering.white.x > 1) {
379 me.set().mastering.white.x = 1;
380 }
381 if (me.v.mastering.white.y > 1) {
382 me.set().mastering.white.y = 1;
383 }
384 if (me.v.mastering.maxLuminance > 65535.0) {
385 me.set().mastering.maxLuminance = 65535.0;
386 }
387 if (me.v.mastering.minLuminance > 6.5535) {
388 me.set().mastering.minLuminance = 6.5535;
389 }
390 if (me.v.maxCll > 65535.0) {
391 me.set().maxCll = 65535.0;
392 }
393 if (me.v.maxFall > 65535.0) {
394 me.set().maxFall = 65535.0;
395 }
396 return C2R::Ok();
397 }
398
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700399 private:
400 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
401 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
402 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
403 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
404 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
405 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
406 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530407 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
408 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700409 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
410 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530411 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700412};
413
414C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
415 const std::shared_ptr<IntfImpl> &intfImpl)
416 : SimpleC2Component(
417 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700418 mIntf(intfImpl),
419 mCodecCtx(nullptr) {
Ray Essick24754942022-04-16 09:50:35 -0700420 mTimeStart = mTimeEnd = systemTime();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700421}
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700422
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700423C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
424
425c2_status_t C2SoftGav1Dec::onInit() {
426 return initDecoder() ? C2_OK : C2_CORRUPTED;
427}
428
429c2_status_t C2SoftGav1Dec::onStop() {
430 mSignalledError = false;
431 mSignalledOutputEos = false;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700432 return C2_OK;
433}
434
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700435void C2SoftGav1Dec::onReset() {
436 (void)onStop();
437 c2_status_t err = onFlush_sm();
438 if (err != C2_OK) {
439 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
440 destroyDecoder();
441 if (!initDecoder()) {
442 ALOGE("Hard reset failed.");
443 }
444 }
445}
446
447void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
448
449c2_status_t C2SoftGav1Dec::onFlush_sm() {
James Zernb7aee6e2020-06-26 13:49:53 -0700450 Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700451 if (status != kLibgav1StatusOk) {
452 ALOGE("Failed to flush av1 decoder. status: %d.", status);
453 return C2_CORRUPTED;
454 }
455
456 // Dequeue frame (if any) that was enqueued previously.
457 const libgav1::DecoderBuffer *buffer;
458 status = mCodecCtx->DequeueFrame(&buffer);
James Zernb7aee6e2020-06-26 13:49:53 -0700459 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700460 ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
461 status);
462 return C2_CORRUPTED;
463 }
464
465 mSignalledError = false;
466 mSignalledOutputEos = false;
467
468 return C2_OK;
469}
470
471static int GetCPUCoreCount() {
472 int cpuCoreCount = 1;
473#if defined(_SC_NPROCESSORS_ONLN)
474 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
475#else
476 // _SC_NPROC_ONLN must be defined...
477 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
478#endif
479 CHECK(cpuCoreCount >= 1);
480 ALOGV("Number of CPU cores: %d", cpuCoreCount);
481 return cpuCoreCount;
482}
483
484bool C2SoftGav1Dec::initDecoder() {
485 mSignalledError = false;
486 mSignalledOutputEos = false;
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700487 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700488 {
489 IntfImpl::Lock lock = mIntf->lock();
490 mPixelFormatInfo = mIntf->getPixelFormat_l();
491 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700492 mCodecCtx.reset(new libgav1::Decoder());
493
494 if (mCodecCtx == nullptr) {
495 ALOGE("mCodecCtx is null");
496 return false;
497 }
498
499 libgav1::DecoderSettings settings = {};
500 settings.threads = GetCPUCoreCount();
501
Vignesh Venkatasubramanian61ba2cf2019-06-24 10:04:00 -0700502 ALOGV("Using libgav1 AV1 software decoder.");
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700503 Libgav1StatusCode status = mCodecCtx->Init(&settings);
504 if (status != kLibgav1StatusOk) {
505 ALOGE("av1 decoder failed to initialize. status: %d.", status);
506 return false;
507 }
508
509 return true;
510}
511
512void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
513
514void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
515 uint32_t flags = 0;
516 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
517 flags |= C2FrameData::FLAG_END_OF_STREAM;
518 ALOGV("signalling eos");
519 }
520 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
521 work->worklets.front()->output.buffers.clear();
522 work->worklets.front()->output.ordinal = work->input.ordinal;
523 work->workletsProcessed = 1u;
524}
525
526void C2SoftGav1Dec::finishWork(uint64_t index,
527 const std::unique_ptr<C2Work> &work,
528 const std::shared_ptr<C2GraphicBlock> &block) {
529 std::shared_ptr<C2Buffer> buffer =
530 createGraphicBuffer(block, C2Rect(mWidth, mHeight));
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530531 {
532 IntfImpl::Lock lock = mIntf->lock();
533 buffer->setInfo(mIntf->getColorAspects_l());
534 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700535 auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
536 uint32_t flags = 0;
537 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
538 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
539 flags |= C2FrameData::FLAG_END_OF_STREAM;
540 ALOGV("signalling eos");
541 }
542 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
543 work->worklets.front()->output.buffers.clear();
544 work->worklets.front()->output.buffers.push_back(buffer);
545 work->worklets.front()->output.ordinal = work->input.ordinal;
546 work->workletsProcessed = 1u;
547 };
548 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
549 fillWork(work);
550 } else {
551 finish(index, fillWork);
552 }
553}
554
555void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
556 const std::shared_ptr<C2BlockPool> &pool) {
557 work->result = C2_OK;
558 work->workletsProcessed = 0u;
559 work->worklets.front()->output.configUpdate.clear();
560 work->worklets.front()->output.flags = work->input.flags;
561 if (mSignalledError || mSignalledOutputEos) {
562 work->result = C2_BAD_VALUE;
563 return;
564 }
565
566 size_t inOffset = 0u;
567 size_t inSize = 0u;
568 C2ReadView rView = mDummyReadView;
569 if (!work->input.buffers.empty()) {
570 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
571 inSize = rView.capacity();
572 if (inSize && rView.error()) {
573 ALOGE("read view map failed %d", rView.error());
574 work->result = C2_CORRUPTED;
575 return;
576 }
577 }
578
579 bool codecConfig =
580 ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
581 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
582
583 ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
584 (int)work->input.ordinal.timestamp.peeku(),
585 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
586
587 if (codecConfig) {
588 fillEmptyWork(work);
589 return;
590 }
591
592 int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
593 if (inSize) {
594 uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700595
Ray Essick24754942022-04-16 09:50:35 -0700596 mTimeStart = systemTime();
597 nsecs_t delay = mTimeStart - mTimeEnd;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700598
599 const Libgav1StatusCode status =
James Zernb7aee6e2020-06-26 13:49:53 -0700600 mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
601 /*buffer_private_data=*/nullptr);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700602
Ray Essick24754942022-04-16 09:50:35 -0700603 mTimeEnd = systemTime();
604 nsecs_t decodeTime = mTimeEnd - mTimeStart;
605 ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700606
607 if (status != kLibgav1StatusOk) {
608 ALOGE("av1 decoder failed to decode frame. status: %d.", status);
609 work->result = C2_CORRUPTED;
610 work->workletsProcessed = 1u;
611 mSignalledError = true;
612 return;
613 }
614
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700615 }
616
617 (void)outputBuffer(pool, work);
618
619 if (eos) {
620 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
621 mSignalledOutputEos = true;
622 } else if (!inSize) {
623 fillEmptyWork(work);
624 }
625}
626
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530627void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
628 const std::unique_ptr<C2Work> &work) {
629 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
630 bool infoPresent = false;
631 if (buffer->has_hdr_mdcv) {
632 // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
633 hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
634 hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
635
636 hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
637 hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
638
639 hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
640 hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
641
642 // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
643 hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
644 hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
645
646 // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
647 hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
648 // hdr_mdcv.luminance_min is in 18.14 format.
649 hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
650 infoPresent = true;
651 }
652
653 if (buffer->has_hdr_cll) {
654 hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
655 hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
656 infoPresent = true;
657 }
658 // config if static info has changed
659 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
660 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
661 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
662 }
663}
664
665void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
666 const std::unique_ptr<C2Work> &work) {
667 if (buffer->has_itut_t35) {
668 std::vector<uint8_t> payload;
669 size_t payloadSize = buffer->itut_t35.payload_size;
670 if (payloadSize > 0) {
671 payload.push_back(buffer->itut_t35.country_code);
672 if (buffer->itut_t35.country_code == 0xFF) {
673 payload.push_back(buffer->itut_t35.country_code_extension_byte);
674 }
675 payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
676 buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
677 }
678
679 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
680 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
681 if (!hdr10PlusInfo) {
682 ALOGE("Hdr10PlusInfo allocation failed");
683 mSignalledError = true;
684 work->result = C2_NO_MEMORY;
685 return;
686 }
687 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
688
689 // config if hdr10Plus info has changed
690 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
691 mHdr10PlusInfo = std::move(hdr10PlusInfo);
692 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
693 }
694 }
695}
696
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530697void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
698 VuiColorAspects vuiColorAspects;
699 vuiColorAspects.primaries = buffer->color_primary;
700 vuiColorAspects.transfer = buffer->transfer_characteristics;
701 vuiColorAspects.coeffs = buffer->matrix_coefficients;
702 vuiColorAspects.fullRange = buffer->color_range;
703
704 // convert vui aspects to C2 values if changed
705 if (!(vuiColorAspects == mBitstreamColorAspects)) {
706 mBitstreamColorAspects = vuiColorAspects;
707 ColorAspects sfAspects;
708 C2StreamColorAspectsInfo::input codedAspects = { 0u };
709 ColorUtils::convertIsoColorAspectsToCodecAspects(
710 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
711 vuiColorAspects.fullRange, sfAspects);
712 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
713 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
714 }
715 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
716 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
717 }
718 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
719 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
720 }
721 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
722 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
723 }
724 std::vector<std::unique_ptr<C2SettingResult>> failures;
725 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
726 }
727}
728
James Zern0958b8b2023-02-17 21:48:08 -0800729void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
730 mSignalledError = true;
731 work->result = error;
732 work->workletsProcessed = 1u;
733}
734
735bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
736 if (size > mTmpFrameBufferSize) {
737 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
738 if (mTmpFrameBuffer == nullptr) {
739 mTmpFrameBufferSize = 0;
740 return false;
741 }
742 mTmpFrameBufferSize = size;
743 }
744 return true;
745}
746
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700747bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
748 const std::unique_ptr<C2Work> &work) {
749 if (!(work && pool)) return false;
750
751 const libgav1::DecoderBuffer *buffer;
752 const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
753
James Zernb7aee6e2020-06-26 13:49:53 -0700754 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700755 ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
756 return false;
757 }
758
James Zernb7aee6e2020-06-26 13:49:53 -0700759 // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
760 // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
761 // of two things:
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700762 // - The EnqueueFrame() call was either a flush (called with nullptr).
763 // - The enqueued frame did not have any displayable frames.
764 if (!buffer) {
765 return false;
766 }
767
768 const int width = buffer->displayed_width[0];
769 const int height = buffer->displayed_height[0];
770 if (width != mWidth || height != mHeight) {
771 mWidth = width;
772 mHeight = height;
773
774 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
775 std::vector<std::unique_ptr<C2SettingResult>> failures;
776 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
777 if (err == C2_OK) {
778 work->worklets.front()->output.configUpdate.push_back(
779 C2Param::Copy(size));
780 } else {
781 ALOGE("Config update size failed");
782 mSignalledError = true;
783 work->result = C2_CORRUPTED;
784 work->workletsProcessed = 1u;
785 return false;
786 }
787 }
788
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530789 getVuiParams(buffer);
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530790 getHDRStaticParams(buffer, work);
791 getHDR10PlusInfoData(buffer, work);
792
James Zern0958b8b2023-02-17 21:48:08 -0800793#if LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800794 if (buffer->bitdepth == 10 &&
795 !(buffer->image_format == libgav1::kImageFormatYuv420 ||
796 buffer->image_format == libgav1::kImageFormatMonochrome400)) {
797 ALOGE("image_format %d not supported for 10bit", buffer->image_format);
798 mSignalledError = true;
799 work->workletsProcessed = 1u;
800 work->result = C2_CORRUPTED;
801 return false;
802 }
James Zern0958b8b2023-02-17 21:48:08 -0800803#endif
James Zern2c90f852023-02-17 21:42:27 -0800804
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800805 const bool isMonochrome =
806 buffer->image_format == libgav1::kImageFormatMonochrome400;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700807
808 std::shared_ptr<C2GraphicBlock> block;
809 uint32_t format = HAL_PIXEL_FORMAT_YV12;
Lajos Molnar45109a32022-06-03 09:41:48 -0700810 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700811 if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700812 IntfImpl::Lock lock = mIntf->lock();
Lajos Molnar45109a32022-06-03 09:41:48 -0700813 codedColorAspects = mIntf->getColorAspects_l();
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800814 bool allowRGBA1010102 = false;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530815 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
816 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
817 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800818 allowRGBA1010102 = true;
819 }
820 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
821 if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
822 (buffer->image_format != libgav1::kImageFormatYuv420)) {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800823 ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800824 mSignalledError = true;
825 work->result = C2_OMITTED;
826 work->workletsProcessed = 1u;
827 return false;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700828 }
829 }
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700830
831 if (mHalPixelFormat != format) {
832 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
833 std::vector<std::unique_ptr<C2SettingResult>> failures;
834 c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
835 if (err == C2_OK) {
836 work->worklets.front()->output.configUpdate.push_back(
837 C2Param::Copy(pixelFormat));
838 } else {
839 ALOGE("Config update pixelFormat failed");
840 mSignalledError = true;
841 work->workletsProcessed = 1u;
842 work->result = C2_CORRUPTED;
843 return UNKNOWN_ERROR;
844 }
845 mHalPixelFormat = format;
846 }
847
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700848 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
849
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -0800850 // We always create a graphic block that is width aligned to 16 and height
851 // aligned to 2. We set the correct "crop" value of the image in the call to
852 // createGraphicBuffer() by setting the correct image dimensions.
853 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
854 align(mHeight, 2), format, usage,
855 &block);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700856
857 if (err != C2_OK) {
858 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
859 work->result = err;
860 return false;
861 }
862
863 C2GraphicView wView = block->map().get();
864
865 if (wView.error()) {
866 ALOGE("graphic view map failed %d", wView.error());
867 work->result = C2_CORRUPTED;
868 return false;
869 }
870
871 ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
872 block->height(), mWidth, mHeight, (int)buffer->user_private_data);
873
ming.zhouac19c3d2019-10-11 11:14:07 +0800874 uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
875 uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
876 uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700877
ming.zhouac19c3d2019-10-11 11:14:07 +0800878 C2PlanarLayout layout = wView.layout();
879 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
Vignesh Venkatasubramanian47b1d222023-01-12 21:45:40 +0000880 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
881 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
ming.zhouac19c3d2019-10-11 11:14:07 +0800882
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700883 if (buffer->bitdepth == 10) {
884 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
885 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
886 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -0800887 size_t srcYStride = buffer->stride[0] / 2;
888 size_t srcUStride = buffer->stride[1] / 2;
889 size_t srcVStride = buffer->stride[2] / 2;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700890
891 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
Lajos Molnar45109a32022-06-03 09:41:48 -0700892 convertYUV420Planar16ToY410OrRGBA1010102(
James Zern0958b8b2023-02-17 21:48:08 -0800893 (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
894 srcUStride, srcVStride,
Lajos Molnar45109a32022-06-03 09:41:48 -0700895 dstYStride / sizeof(uint32_t), mWidth, mHeight,
896 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
Harish Mahendrakar1b1aef22021-12-30 19:12:51 -0800897 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
James Zern0958b8b2023-02-17 21:48:08 -0800898 dstYStride /= 2;
899 dstUStride /= 2;
900 dstVStride /= 2;
901#if LIBYUV_VERSION >= 1779
902 if (buffer->image_format == libgav1::kImageFormatYuv444 ||
903 buffer->image_format == libgav1::kImageFormatYuv422) {
904 // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
905 // libyuv::I210ToP010 when they are available.
906 // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
907 // guarantees.
908 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
909 if (!allocTmpFrameBuffer(tmpSize)) {
910 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
911 setError(work, C2_NO_MEMORY);
912 return false;
913 }
914 uint16_t *const tmpY = mTmpFrameBuffer.get();
915 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
916 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
917 if (buffer->image_format == libgav1::kImageFormatYuv444) {
918 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
919 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
920 mWidth, mHeight);
921 } else {
922 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
923 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
924 mWidth, mHeight);
925 }
926 libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
927 (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
928 mWidth, mHeight);
929 } else {
930 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
931 srcYStride, srcUStride, srcVStride, dstYStride,
932 dstUStride, mWidth, mHeight, isMonochrome);
933 }
934#else // LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800935 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
James Zern0958b8b2023-02-17 21:48:08 -0800936 srcYStride, srcUStride, srcVStride, dstYStride,
937 dstUStride, mWidth, mHeight, isMonochrome);
938#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700939 } else {
James Zern0958b8b2023-02-17 21:48:08 -0800940#if LIBYUV_VERSION >= 1779
941 if (buffer->image_format == libgav1::kImageFormatYuv444) {
942 // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
943 // it's available.
944 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
945 if (!allocTmpFrameBuffer(tmpSize)) {
946 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
947 setError(work, C2_NO_MEMORY);
948 return false;
949 }
950 uint16_t *const tmpY = mTmpFrameBuffer.get();
951 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
952 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
953 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
954 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
955 mWidth, mHeight);
956 libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
957 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
958 mWidth, mHeight);
959 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
960 libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
961 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
962 mWidth, mHeight);
963 } else {
964 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
965 srcUStride, srcVStride, dstYStride, dstUStride,
966 mWidth, mHeight, isMonochrome);
967 }
968#else // LIBYUV_VERSION < 1779
969 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
970 srcUStride, srcVStride, dstYStride, dstUStride,
971 mWidth, mHeight, isMonochrome);
972#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700973 }
974 } else {
975 const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
976 const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
977 const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -0800978 size_t srcYStride = buffer->stride[0];
979 size_t srcUStride = buffer->stride[1];
980 size_t srcVStride = buffer->stride[2];
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -0700981
982 if (buffer->image_format == libgav1::kImageFormatYuv444) {
983 libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
984 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
985 mWidth, mHeight);
986 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
987 libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
988 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
989 mWidth, mHeight);
990 } else {
991 convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
992 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
993 isMonochrome);
994 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700995 }
996 finishWork(buffer->user_private_data, work, std::move(block));
997 block = nullptr;
998 return true;
999}
1000
1001c2_status_t C2SoftGav1Dec::drainInternal(
1002 uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
1003 const std::unique_ptr<C2Work> &work) {
1004 if (drainMode == NO_DRAIN) {
1005 ALOGW("drain with NO_DRAIN: no-op");
1006 return C2_OK;
1007 }
1008 if (drainMode == DRAIN_CHAIN) {
1009 ALOGW("DRAIN_CHAIN not supported");
1010 return C2_OMITTED;
1011 }
1012
James Zernb7aee6e2020-06-26 13:49:53 -07001013 const Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001014 if (status != kLibgav1StatusOk) {
1015 ALOGE("Failed to flush av1 decoder. status: %d.", status);
1016 return C2_CORRUPTED;
1017 }
1018
1019 while (outputBuffer(pool, work)) {
1020 }
1021
1022 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
1023 work->workletsProcessed == 0u) {
1024 fillEmptyWork(work);
1025 }
1026
1027 return C2_OK;
1028}
1029
1030c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
1031 const std::shared_ptr<C2BlockPool> &pool) {
1032 return drainInternal(drainMode, pool, nullptr);
1033}
1034
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001035class C2SoftGav1Factory : public C2ComponentFactory {
1036 public:
1037 C2SoftGav1Factory()
1038 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1039 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1040
1041 virtual c2_status_t createComponent(
1042 c2_node_id_t id, std::shared_ptr<C2Component> *const component,
1043 std::function<void(C2Component *)> deleter) override {
1044 *component = std::shared_ptr<C2Component>(
1045 new C2SoftGav1Dec(COMPONENT_NAME, id,
1046 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1047 deleter);
1048 return C2_OK;
1049 }
1050
1051 virtual c2_status_t createInterface(
1052 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1053 std::function<void(C2ComponentInterface *)> deleter) override {
1054 *interface = std::shared_ptr<C2ComponentInterface>(
1055 new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
1056 COMPONENT_NAME, id,
1057 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1058 deleter);
1059 return C2_OK;
1060 }
1061
1062 virtual ~C2SoftGav1Factory() override = default;
1063
1064 private:
1065 std::shared_ptr<C2ReflectorHelper> mHelper;
1066};
1067
1068} // namespace android
1069
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001070__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001071extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1072 ALOGV("in %s", __func__);
1073 return new ::android::C2SoftGav1Factory();
1074}
1075
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001076__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001077extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1078 ALOGV("in %s", __func__);
1079 delete factory;
1080}