blob: a61cfd617b0403af8c60d434146a21c482c76cae [file] [log] [blame]
Fyodor Kyslovdd7d5992024-11-05 21:40:16 +00001/*
2 * Copyright (C) 2024 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftApvEnc"
19#include <log/log.h>
20
Fyodor Kyslov6379ef22024-11-07 16:44:51 +000021#include <android_media_swcodec_flags.h>
22
Fyodor Kyslovdd7d5992024-11-05 21:40:16 +000023#include <media/hardware/VideoAPI.h>
24#include <media/stagefright/MediaDefs.h>
25#include <media/stagefright/MediaErrors.h>
26#include <media/stagefright/MetaData.h>
27#include <media/stagefright/foundation/AUtils.h>
28
29#include <C2Debug.h>
30#include <C2PlatformSupport.h>
31#include <Codec2BufferUtils.h>
32#include <Codec2CommonUtils.h>
33#include <Codec2Mapper.h>
34#include <SimpleC2Interface.h>
35#include <media/stagefright/foundation/ABitReader.h>
36#include <util/C2InterfaceHelper.h>
37#include <cmath>
38#include "C2SoftApvEnc.h"
39
40namespace android {
41
42namespace {
43
44constexpr char COMPONENT_NAME[] = "c2.android.apv.encoder";
45constexpr uint32_t kMinOutBufferSize = 524288;
46constexpr uint32_t kMaxBitstreamBufSize = 16 * 1024 * 1024;
47constexpr int32_t kApvQpMin = 0;
48constexpr int32_t kApvQpMax = 51;
49constexpr int32_t kApvDefaultQP = 32;
50
51#define PROFILE_APV_DEFAULT 0
52#define LEVEL_APV_DEFAULT 0
53#define MAX_NUM_FRMS (1) // supports only 1-frame input
54
55} // namespace
56
57class C2SoftApvEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
58 public:
59 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
60 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
61 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
62 noPrivateBuffers();
63 noInputReferences();
64 noOutputReferences();
65 noTimeStretch();
66 setDerivedInstance(this);
67
68 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
69 .withConstValue(new C2ComponentAttributesSetting(
70 C2Component::ATTRIB_IS_TEMPORAL))
71 .build());
72
73 addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
74 .withConstValue(new C2StreamUsageTuning::input(
75 0u, (uint64_t)C2MemoryUsage::CPU_READ))
76 .build());
77
78 // matches size limits in codec library
79 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
80 .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
81 .withFields({
82 C2F(mSize, width).inRange(2, 4096, 2),
83 C2F(mSize, height).inRange(2, 4096, 2),
84 })
85 .withSetter(SizeSetter)
86 .build());
87
88 // matches limits in codec library
89 addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
90 .withDefault(new C2StreamBitrateModeTuning::output(
91 0u, C2Config::BITRATE_VARIABLE))
92 .withFields({C2F(mBitrateMode, value)
93 .oneOf({C2Config::BITRATE_CONST,
94 C2Config::BITRATE_VARIABLE,
95 C2Config::BITRATE_IGNORE})})
96 .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
97 .build());
98
99 addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
100 .withDefault(new C2StreamBitrateInfo::output(0u, 512000))
101 .withFields({C2F(mBitrate, value).inRange(512000, 240000000)})
102 .withSetter(BitrateSetter)
103 .build());
104
105 addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
106 .withDefault(new C2StreamFrameRateInfo::output(0u, 15.))
107 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
108 .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
109 .build());
110
111 addParameter(DefineParam(mQuality, C2_PARAMKEY_QUALITY)
112 .withDefault(new C2StreamQualityTuning::output(0u, 40))
113 .withFields({C2F(mQuality, value).inRange(0, 100)})
114 .withSetter(Setter<decltype(*mQuality)>::NonStrictValueWithNoDeps)
115 .build());
116
117 addParameter(
118 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
119 .withDefault(new C2StreamProfileLevelInfo::output(
120 0u, C2Config::PROFILE_APV_422_10, LEVEL_APV_1_BAND_0))
121 .withFields({
122 C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
123 C2F(mProfileLevel, level)
124 .oneOf({
125 C2Config::LEVEL_APV_1_BAND_0,
126 C2Config::LEVEL_APV_1_1_BAND_0,
127 C2Config::LEVEL_APV_2_BAND_0,
128 C2Config::LEVEL_APV_2_1_BAND_0,
129 C2Config::LEVEL_APV_3_BAND_0,
130 C2Config::LEVEL_APV_3_1_BAND_0,
131 C2Config::LEVEL_APV_4_BAND_0,
132 C2Config::LEVEL_APV_4_1_BAND_0,
133 C2Config::LEVEL_APV_5_BAND_0,
134 C2Config::LEVEL_APV_5_1_BAND_0,
135 C2Config::LEVEL_APV_6_BAND_0,
136 C2Config::LEVEL_APV_6_1_BAND_0,
137 C2Config::LEVEL_APV_7_BAND_0,
138 C2Config::LEVEL_APV_7_1_BAND_0,
139 C2Config::LEVEL_APV_1_BAND_1,
140 C2Config::LEVEL_APV_1_1_BAND_1,
141 C2Config::LEVEL_APV_2_BAND_1,
142 C2Config::LEVEL_APV_2_1_BAND_1,
143 C2Config::LEVEL_APV_3_BAND_1,
144 C2Config::LEVEL_APV_3_1_BAND_1,
145 C2Config::LEVEL_APV_4_BAND_1,
146 C2Config::LEVEL_APV_4_1_BAND_1,
147 C2Config::LEVEL_APV_5_BAND_1,
148 C2Config::LEVEL_APV_5_1_BAND_1,
149 C2Config::LEVEL_APV_6_BAND_1,
150 C2Config::LEVEL_APV_6_1_BAND_1,
151 C2Config::LEVEL_APV_7_BAND_1,
152 C2Config::LEVEL_APV_7_1_BAND_1,
153 C2Config::LEVEL_APV_1_BAND_2,
154 C2Config::LEVEL_APV_1_1_BAND_2,
155 C2Config::LEVEL_APV_2_BAND_2,
156 C2Config::LEVEL_APV_2_1_BAND_2,
157 C2Config::LEVEL_APV_3_BAND_2,
158 C2Config::LEVEL_APV_3_1_BAND_2,
159 C2Config::LEVEL_APV_4_BAND_2,
160 C2Config::LEVEL_APV_4_1_BAND_2,
161 C2Config::LEVEL_APV_5_BAND_2,
162 C2Config::LEVEL_APV_5_1_BAND_2,
163 C2Config::LEVEL_APV_6_BAND_2,
164 C2Config::LEVEL_APV_6_1_BAND_2,
165 C2Config::LEVEL_APV_7_BAND_2,
166 C2Config::LEVEL_APV_7_1_BAND_2,
167 C2Config::LEVEL_APV_1_BAND_3,
168 C2Config::LEVEL_APV_1_1_BAND_3,
169 C2Config::LEVEL_APV_2_BAND_3,
170 C2Config::LEVEL_APV_2_1_BAND_3,
171 C2Config::LEVEL_APV_3_BAND_3,
172 C2Config::LEVEL_APV_3_1_BAND_3,
173 C2Config::LEVEL_APV_4_BAND_3,
174 C2Config::LEVEL_APV_4_1_BAND_3,
175 C2Config::LEVEL_APV_5_BAND_3,
176 C2Config::LEVEL_APV_5_1_BAND_3,
177 C2Config::LEVEL_APV_6_BAND_3,
178 C2Config::LEVEL_APV_6_1_BAND_3,
179 C2Config::LEVEL_APV_7_BAND_3,
180 C2Config::LEVEL_APV_7_1_BAND_3,
181 }),
182 })
183 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
184 .build());
185
186 addParameter(DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
187 .withDefault(new C2StreamColorAspectsInfo::input(
188 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
189 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
190 .withFields({C2F(mColorAspects, range)
191 .inRange(C2Color::RANGE_UNSPECIFIED,
192 C2Color::RANGE_OTHER),
193 C2F(mColorAspects, primaries)
194 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
195 C2Color::PRIMARIES_OTHER),
196 C2F(mColorAspects, transfer)
197 .inRange(C2Color::TRANSFER_UNSPECIFIED,
198 C2Color::TRANSFER_OTHER),
199 C2F(mColorAspects, matrix)
200 .inRange(C2Color::MATRIX_UNSPECIFIED,
201 C2Color::MATRIX_OTHER)})
202 .withSetter(ColorAspectsSetter)
203 .build());
204
205 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
206 .withDefault(new C2StreamColorAspectsInfo::output(
207 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
208 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
209 .withFields({C2F(mCodedColorAspects, range)
210 .inRange(C2Color::RANGE_UNSPECIFIED,
211 C2Color::RANGE_OTHER),
212 C2F(mCodedColorAspects, primaries)
213 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
214 C2Color::PRIMARIES_OTHER),
215 C2F(mCodedColorAspects, transfer)
216 .inRange(C2Color::TRANSFER_UNSPECIFIED,
217 C2Color::TRANSFER_OTHER),
218 C2F(mCodedColorAspects, matrix)
219 .inRange(C2Color::MATRIX_UNSPECIFIED,
220 C2Color::MATRIX_OTHER)})
221 .withSetter(CodedColorAspectsSetter, mColorAspects)
222 .build());
223 std::vector<uint32_t> pixelFormats = {
224 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
225 };
226 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
227 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
228 }
229 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
230 .withDefault(new C2StreamPixelFormatInfo::input(
231 0u, HAL_PIXEL_FORMAT_YCBCR_P010))
232 .withFields({C2F(mPixelFormat, value).oneOf({pixelFormats})})
233 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
234 .build());
235 }
236
237 static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
238 (void)mayBlock;
239 C2R res = C2R::Ok();
240 if (me.v.value < 1000000) {
241 me.set().value = 1000000;
242 }
243 return res;
244 }
245
246 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
247 C2P<C2StreamPictureSizeInfo::input>& me) {
248 (void)mayBlock;
249 C2R res = C2R::Ok();
250 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
251 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
252 me.set().width = oldMe.v.width;
253 }
254 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
255 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
256 me.set().height = oldMe.v.height;
257 }
258 return res;
259 }
260
261 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
262 const C2P<C2StreamPictureSizeInfo::input>& size,
263 const C2P<C2StreamFrameRateInfo::output>& frameRate,
264 const C2P<C2StreamBitrateInfo::output>& bitrate) {
265 (void)mayBlock;
266 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
267 me.set().profile = C2Config::PROFILE_APV_422_10;
268 }
269 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
270 me.set().level = LEVEL_APV_1_BAND_0;
271 }
272 return C2R::Ok();
273 }
274
275 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
276 (void)mayBlock;
277 if (me.v.range > C2Color::RANGE_OTHER) {
278 me.set().range = C2Color::RANGE_OTHER;
279 }
280 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
281 me.set().primaries = C2Color::PRIMARIES_OTHER;
282 }
283 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
284 me.set().transfer = C2Color::TRANSFER_OTHER;
285 }
286 if (me.v.matrix > C2Color::MATRIX_OTHER) {
287 me.set().matrix = C2Color::MATRIX_OTHER;
288 }
289 return C2R::Ok();
290 }
291
292 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
293 const C2P<C2StreamColorAspectsInfo::input>& coded) {
294 (void)mayBlock;
295 me.set().range = coded.v.range;
296 me.set().primaries = coded.v.primaries;
297 me.set().transfer = coded.v.transfer;
298 me.set().matrix = coded.v.matrix;
299 return C2R::Ok();
300 }
301
302 uint32_t getProfile_l() const {
303 int32_t profile = PROFILE_UNUSED;
304
305 switch (mProfileLevel->profile) {
306 case C2Config::PROFILE_APV_422_10:
307 profile = 33;
308 break;
309 case C2Config::PROFILE_APV_422_12:
310 profile = 44;
311 break;
312 case C2Config::PROFILE_APV_444_10:
313 profile = 55;
314 break;
315 case C2Config::PROFILE_APV_444_12:
316 profile = 66;
317 break;
318 case C2Config::PROFILE_APV_4444_10:
319 profile = 77;
320 break;
321 case C2Config::PROFILE_APV_4444_12:
322 profile = 88;
323 break;
324 case C2Config::PROFILE_APV_400_10:
325 profile = 99;
326 break;
327 default:
328 ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
329 }
330 return profile;
331 }
332
333 uint32_t getLevel_l() const {
334 int32_t level = LEVEL_UNUSED;
335
336 // TODO: Add Band settings
337 switch (mProfileLevel->level) {
338 case C2Config::LEVEL_APV_1_BAND_0:
339 level = 10;
340 break;
341 case C2Config::LEVEL_APV_1_1_BAND_0:
342 level = 11;
343 break;
344 case C2Config::LEVEL_APV_2_BAND_0:
345 level = 20;
346 break;
347 case C2Config::LEVEL_APV_2_1_BAND_0:
348 level = 21;
349 break;
350 case C2Config::LEVEL_APV_3_BAND_0:
351 level = 30;
352 break;
353 case C2Config::LEVEL_APV_3_1_BAND_0:
354 level = 31;
355 break;
356 case C2Config::LEVEL_APV_4_BAND_0:
357 level = 40;
358 break;
359 case C2Config::LEVEL_APV_4_1_BAND_0:
360 level = 41;
361 break;
362 case C2Config::LEVEL_APV_5_BAND_0:
363 level = 50;
364 break;
365 case C2Config::LEVEL_APV_5_1_BAND_0:
366 level = 51;
367 break;
368 case C2Config::LEVEL_APV_6_BAND_0:
369 level = 60;
370 break;
371 case C2Config::LEVEL_APV_6_1_BAND_0:
372 level = 61;
373 break;
374 case C2Config::LEVEL_APV_7_BAND_0:
375 level = 70;
376 break;
377 case C2Config::LEVEL_APV_7_1_BAND_0:
378 level = 71;
379 break;
380 default:
381 ALOGD("Unrecognized level: %x", mProfileLevel->level);
382 }
383 // Convert to APV level_idc according to APV spec
384 return level * 3;
385 }
386
387 int32_t getBitrateMode_l() const {
388 int32_t bitrateMode = C2Config::BITRATE_CONST;
389
390 switch (mBitrateMode->value) {
391 case C2Config::BITRATE_CONST:
392 bitrateMode = OAPV_RC_CQP;
393 break;
394 case C2Config::BITRATE_VARIABLE:
395 bitrateMode = OAPV_RC_ABR;
396 break;
397 case C2Config::BITRATE_IGNORE:
398 bitrateMode = 0;
399 break;
400 default:
401 ALOGE("Unrecognized bitrate mode: %x", mBitrateMode->value);
402 }
403 return bitrateMode;
404 }
405
406 std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
407 std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
408 std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
409 std::shared_ptr<C2StreamQualityTuning::output> getQuality_l() const { return mQuality; }
410 std::shared_ptr<C2StreamColorAspectsInfo::input> getColorAspects_l() const {
411 return mColorAspects;
412 }
413 std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
414 return mCodedColorAspects;
415 }
416 std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
417 return mPictureQuantization;
418 }
419 std::shared_ptr<C2StreamProfileLevelInfo::output> getProfileLevel_l() const {
420 return mProfileLevel;
421 }
422 std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
423 return mPixelFormat;
424 }
425
426 private:
427 std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
428 std::shared_ptr<C2StreamUsageTuning::input> mUsage;
429 std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
430 std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
431 std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
432 std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
433 std::shared_ptr<C2StreamQualityTuning::output> mQuality;
434 std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
435 std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
436 std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
437 std::shared_ptr<C2StreamColorInfo::input> mColorFormat;
438 std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
439};
440
441C2SoftApvEnc::C2SoftApvEnc(const char* name, c2_node_id_t id,
442 const std::shared_ptr<IntfImpl>& intfImpl)
443 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
444 mIntf(intfImpl),
445 mColorFormat(OAPV_CF_PLANAR2),
446 mStarted(false),
447 mSignalledEos(false),
448 mSignalledError(false),
449 mOutBlock(nullptr) {
450 reset();
451}
452
453C2SoftApvEnc::~C2SoftApvEnc() {
454 onRelease();
455}
456
457c2_status_t C2SoftApvEnc::onInit() {
458 return C2_OK;
459}
460
461c2_status_t C2SoftApvEnc::onStop() {
462 return C2_OK;
463}
464
465void C2SoftApvEnc::onReset() {
466 releaseEncoder();
467 reset();
468}
469
470void C2SoftApvEnc::onRelease() {
471 releaseEncoder();
472}
473
474c2_status_t C2SoftApvEnc::onFlush_sm() {
475 return C2_OK;
476}
477
478static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
479 uint32_t flags = 0;
480 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
481 flags |= C2FrameData::FLAG_END_OF_STREAM;
482 ALOGV("Signalling EOS");
483 }
484 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
485 work->worklets.front()->output.buffers.clear();
486 work->worklets.front()->output.ordinal = work->input.ordinal;
487 work->workletsProcessed = 1u;
488}
489
490int32_t C2SoftApvEnc::getQpFromQuality(int32_t quality) {
491 int32_t qp = ((kApvQpMin - kApvQpMax) * quality / 100) + kApvQpMax;
492 qp = std::min(qp, (int)kApvQpMax);
493 qp = std::max(qp, (int)kApvQpMin);
494 return qp;
495}
496
497c2_status_t C2SoftApvEnc::reset() {
498 ALOGV("reset");
499 mInitEncoder = false;
500 mStarted = false;
501 mSignalledEos = false;
502 mSignalledError = false;
503 mBitDepth = 10;
504 mMaxFrames = MAX_NUM_FRMS;
505 mReceivedFrames = 0;
506 mReceivedFirstFrame = false;
507 mColorFormat = OAPV_CF_PLANAR2;
Fyodor Kyslov3fea66b2024-11-08 18:32:26 +0000508 memset(&mInputFrames, 0, sizeof(mInputFrames));
509 memset(&mReconFrames, 0, sizeof(mReconFrames));
Fyodor Kyslovdd7d5992024-11-05 21:40:16 +0000510 return C2_OK;
511}
512
513c2_status_t C2SoftApvEnc::releaseEncoder() {
514 for (int32_t i = 0; i < MAX_NUM_FRMS; i++) {
515 if (mInputFrames.frm[i].imgb != nullptr) {
516 imgb_release(mInputFrames.frm[i].imgb);
Fyodor Kyslov3fea66b2024-11-08 18:32:26 +0000517 mInputFrames.frm[i].imgb = nullptr;
Fyodor Kyslovdd7d5992024-11-05 21:40:16 +0000518 }
519 }
520
521 if (mBitstreamBuf) {
522 std::free(mBitstreamBuf);
523 mBitstreamBuf = nullptr;
524 }
525 return C2_OK;
526}
527
528c2_status_t C2SoftApvEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
529 return drainInternal(drainMode, pool, nullptr);
530}
531
532void C2SoftApvEnc::showEncoderParams(oapve_cdesc_t* cdsc) {
533 std::string title = "APV encoder params:";
534 ALOGD("%s width = %d, height = %d", title.c_str(), cdsc->param[0].w, cdsc->param[0].h);
535 ALOGD("%s FrameRate = %f", title.c_str(),
536 (double)cdsc->param[0].fps_num / cdsc->param[0].fps_den);
537 ALOGD("%s BitRate = %d Kbps", title.c_str(), cdsc->param[0].bitrate);
538 ALOGD("%s QP = %d", title.c_str(), cdsc->param[0].qp);
539 ALOGD("%s profile_idc = %d, level_idc = %d, band_idc = %d", title.c_str(),
540 cdsc->param[0].profile_idc, cdsc->param[0].level_idc / 3, cdsc->param[0].band_idc);
541 ALOGD("%s Bitrate Mode: %d", title.c_str(), cdsc->param[0].rc_type);
542 ALOGD("%s mColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
543 mColorAspects->primaries, mColorAspects->transfer, mColorAspects->matrix,
544 mColorAspects->range);
545 ALOGD("%s mCodedColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
546 mCodedColorAspects->primaries, mCodedColorAspects->transfer, mCodedColorAspects->matrix,
547 mCodedColorAspects->range);
548 ALOGD("%s Input color format: %s", title.c_str(),
549 mColorFormat == OAPV_CF_YCBCR422 ? "YUV422P10LE" : "P210");
550 ALOGD("%s max_num_frms: %d", title.c_str(), cdsc->max_num_frms);
551}
552
553c2_status_t C2SoftApvEnc::initEncoder() {
554 if (mInitEncoder) {
555 return C2_OK;
556 }
557 ALOGV("initEncoder");
558
559 mSize = mIntf->getSize_l();
560 mFrameRate = mIntf->getFrameRate_l();
561 mBitrate = mIntf->getBitrate_l();
562 mQuality = mIntf->getQuality_l();
563 mColorAspects = mIntf->getColorAspects_l();
564 mCodedColorAspects = mIntf->getCodedColorAspects_l();
565 mProfileLevel = mIntf->getProfileLevel_l();
566 mPixelFormat = mIntf->getPixelFormat_l();
567
568 mCodecDesc = std::make_unique<oapve_cdesc_t>();
569 if (mCodecDesc == nullptr) {
570 ALOGE("Allocate ctx failed");
571 return C2_NO_INIT;
572 }
573 mCodecDesc->max_bs_buf_size = kMaxBitstreamBufSize;
574 mCodecDesc->max_num_frms = MAX_NUM_FRMS;
575 // TODO: Bound parameters to CPU count
576 mCodecDesc->threads = 4;
577
578 int32_t ret = C2_OK;
579 /* set params */
580 for (int32_t i = 0; i < mMaxFrames; i++) {
581 oapve_param_t* param = &mCodecDesc->param[i];
582 ret = oapve_param_default(param);
583 if (OAPV_FAILED(ret)) {
584 ALOGE("cannot set default parameter");
585 return C2_NO_INIT;
586 }
587 setParams(*param);
588 }
589
590 showEncoderParams(mCodecDesc.get());
591
592 /* create encoder */
593 mEncoderId = oapve_create(mCodecDesc.get(), NULL);
594 if (mEncoderId == NULL) {
595 ALOGE("cannot create APV encoder");
596 return C2_CORRUPTED;
597 }
598
599 /* create metadata */
600 mMetaId = oapvm_create(&ret);
601 if (mMetaId == NULL) {
602 ALOGE("cannot create APV encoder");
603 return C2_NO_MEMORY;
604 }
605
606 /* create image buffers */
607 for (int32_t i = 0; i < mMaxFrames; i++) {
608 if (mBitDepth == 10) {
609 mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
610 OAPV_CS_SET(mColorFormat, mBitDepth, 0));
611 mReconFrames.frm[i].imgb = nullptr;
612 } else {
613 mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
614 OAPV_CS_SET(mColorFormat, 10, 0));
615 mReconFrames.frm[i].imgb = nullptr;
616 }
617 }
618
619 /* allocate bitstream buffer */
620 mBitstreamBuf = new unsigned char[kMaxBitstreamBufSize];
621 if (mBitstreamBuf == nullptr) {
622 ALOGE("cannot allocate bitstream buffer, size= %d", kMaxBitstreamBufSize);
623 return C2_NO_MEMORY;
624 }
625
626 /* Calculate SDR to HDR mapping values */
627 mSdrToHdrMapping.clear();
628 for (int32_t i = 0; i < 256; i++) {
629 mSdrToHdrMapping.push_back((uint16_t)(i * 1023 / 255 + 0.5));
630 }
631
632 mStarted = true;
633 mInitEncoder = true;
634 return C2_OK;
635}
636
637void C2SoftApvEnc::setParams(oapve_param_t& param) {
638 param.w = mSize->width;
639 param.h = mSize->height;
640 param.fps_num = (int)(mFrameRate->value * 100);
641 param.fps_den = 100;
642 param.bitrate = mBitrate->value / 1000;
643 param.rc_type = mIntf->getBitrateMode_l();
644
645 int ApvQP = kApvDefaultQP;
646 if (param.rc_type == OAPV_RC_CQP) {
647 ApvQP = getQpFromQuality(mQuality->value);
648 ALOGI("Bitrate mode is CQ, so QP value is derived from Quality. Quality is %d, QP is %d",
649 mQuality->value, ApvQP);
650 }
651 param.qp = ApvQP;
652 param.band_idc = 0; // TODO: Get from the Level setting
653 param.profile_idc = mIntf->getProfile_l();
654 C2Config::level_t level = decisionApvLevel(
655 param.w, param.h, (int)(param.fps_num / param.fps_den), param.bitrate, param.band_idc);
656 if (mProfileLevel->level != level) {
657 mProfileLevel->level = level;
658 ALOGI("Need to update level to %d", mIntf->getLevel_l());
659 }
660 param.level_idc = mIntf->getLevel_l();
661}
662
663c2_status_t C2SoftApvEnc::setEncodeArgs(oapv_frms_t* inputFrames, const C2GraphicView* const input,
664 uint64_t workIndex) {
665 if (input->width() < mSize->width || input->height() < mSize->height) {
666 /* Expect width height to be configured */
667 ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(), mSize->width,
668 input->height(), mSize->height);
669 return C2_BAD_VALUE;
670 }
671 const C2PlanarLayout& layout = input->layout();
672 uint8_t* yPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_Y]);
673 uint8_t* uPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_U]);
674 uint8_t* vPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_V]);
675 int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
676 int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
677 int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
678
679 uint32_t width = mSize->width;
680 uint32_t height = mSize->height;
681
682 /* width and height must be even */
683 if (width & 1u || height & 1u) {
684 ALOGW("height(%u) and width(%u) must both be even", height, width);
685 return C2_BAD_VALUE;
686 }
687
688 /* Set num frames */
689 inputFrames->num_frms = MAX_NUM_FRMS;
690 inputFrames->frm[mReceivedFrames].group_id = 1;
691 inputFrames->frm[mReceivedFrames].pbu_type = OAPV_PBU_TYPE_PRIMARY_FRAME;
692
693 switch (layout.type) {
694 case C2PlanarLayout::TYPE_RGB:
695 [[fallthrough]];
696 case C2PlanarLayout::TYPE_RGBA: {
697 // TODO: Add RGBA1010102 support
698 ALOGE("Not supported RGB color format");
699 return C2_BAD_VALUE;
700 }
701 case C2PlanarLayout::TYPE_YUV: {
702 if (IsP010(*input)) {
703 if (mColorFormat == OAPV_CF_YCBCR422) {
704 ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
705 } else if (mColorFormat == OAPV_CF_PLANAR2) {
706 ColorConvertP010ToP210(input, inputFrames->frm[0].imgb);
707 } else {
708 ALOGE("Not supported color format. %d", mColorFormat);
709 return C2_BAD_VALUE;
710 }
711 } else if (IsNV12(*input)) {
712 ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
713 } else if (IsNV21(*input)) {
714 ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
715 } else if (IsYUV420(*input)) {
716 return C2_BAD_VALUE;
717 } else if (IsI420(*input)) {
718 return C2_BAD_VALUE;
719 } else {
720 ALOGE("Not supported color format. %d", mColorFormat);
721 return C2_BAD_VALUE;
722 }
723 break;
724 }
725
726 default:
727 ALOGE("Unrecognized plane type: %d", layout.type);
728 return C2_BAD_VALUE;
729 }
730
731 return C2_OK;
732}
733
734void C2SoftApvEnc::ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
735 auto width = input->width();
736 auto height = input->height();
737
738 auto* yPlane = (uint8_t*)input->data()[0];
739 auto* uvPlane = (uint8_t*)input->data()[1];
740
741 auto* dst = (uint16_t*)imgb->a[0];
742 int32_t lumaOffset = 0;
743 for (int32_t y = 0; y < height; ++y) {
744 for (int32_t x = 0; x < width; ++x) {
745 lumaOffset = y * width + x;
746 dst[lumaOffset] = (mSdrToHdrMapping[yPlane[lumaOffset]] << 6) |
747 ((mSdrToHdrMapping[yPlane[lumaOffset]] & 0x300) >> 3);
748 }
749 }
750
751 auto* dst_uv = (uint16_t*)imgb->a[1];
752 uint32_t uvDstStride = width;
753 int32_t srcOffset = 0;
754 int32_t dstOffset1 = 0, dstOffset2 = 0;
755 int32_t tmp1 = 0, tmp2 = 0;
756 for (int32_t y = 0; y < height / 2; ++y) {
757 for (int32_t x = 0; x < width; x += 2) {
758 srcOffset = y * width + x;
759 dstOffset1 = (y * 2) * width + x;
760 dstOffset2 = ((y * 2) + 1) * width + x;
761
762 tmp1 = (mSdrToHdrMapping[uvPlane[srcOffset]] << 6) |
763 ((mSdrToHdrMapping[uvPlane[srcOffset]] & 0x300) >> 3);
764 tmp2 = (mSdrToHdrMapping[uvPlane[srcOffset + 1]] << 6) |
765 ((mSdrToHdrMapping[uvPlane[srcOffset + 1]] & 0x300) >> 3);
766 dst_uv[dstOffset1] = (uint16_t)tmp1;
767 dst_uv[dstOffset1 + 1] = (uint16_t)tmp2;
768 dst_uv[dstOffset2] = (uint16_t)tmp1;
769 dst_uv[dstOffset2 + 1] = (uint16_t)tmp2;
770 }
771 }
772}
773
774C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
775 int32_t bitrate, int32_t band) {
776 C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
777
778 struct LevelLimits {
779 C2Config::level_t level;
780 uint64_t samplesPerSec;
781 uint32_t bitratesOfBand;
782 };
783
784 constexpr LevelLimits kLimitsBand0[] = {
785 {LEVEL_APV_1_BAND_0, 3'041'280, 7'000},
786 {LEVEL_APV_1_1_BAND_0, 6'082'560, 14'000},
787 {LEVEL_APV_2_BAND_0, 15'667'200, 36'000},
788 {LEVEL_APV_2_1_BAND_0, 31'334'400, 71'000},
789 {LEVEL_APV_3_BAND_0, 66'846'720, 101'000},
790 {LEVEL_APV_3_1_BAND_0, 133'693'440, 201'000},
791 {LEVEL_APV_4_BAND_0, 265'420'800, 401'000},
792 {LEVEL_APV_4_1_BAND_0, 530'841'600, 780'000},
793 {LEVEL_APV_5_BAND_0, 1'061'683'200, 1'560'000},
794 {LEVEL_APV_5_1_BAND_0, 2'123'366'400, 3'324'000},
795 {LEVEL_APV_6_BAND_0, 4'777'574'400, 6'648'000},
796 {LEVEL_APV_6_1_BAND_0, 8'493'465'600, 13'296'000},
797 {LEVEL_APV_7_BAND_0, 16'986'931'200, 26'592'000},
798 {LEVEL_APV_7_1_BAND_0, 33'973'862'400, 53'184'000},
799 };
800
801 constexpr LevelLimits kLimitsBand1[] = {
802 {LEVEL_APV_1_BAND_1, 3'041'280, 11'000},
803 {LEVEL_APV_1_1_BAND_1, 6'082'560, 21'000},
804 {LEVEL_APV_2_BAND_1, 15'667'200, 53'000},
805 {LEVEL_APV_2_1_BAND_1, 31'334'400, 106'00},
806 {LEVEL_APV_3_BAND_1, 66'846'720, 151'000},
807 {LEVEL_APV_3_1_BAND_1, 133'693'440, 301'000},
808 {LEVEL_APV_4_BAND_1, 265'420'800, 602'000},
809 {LEVEL_APV_4_1_BAND_1, 530'841'600, 1'170'000},
810 {LEVEL_APV_5_BAND_1, 1'061'683'200, 2'340'000},
811 {LEVEL_APV_5_1_BAND_1, 2'123'366'400, 4'986'000},
812 {LEVEL_APV_6_BAND_1, 4'777'574'400, 9'972'000},
813 {LEVEL_APV_6_1_BAND_1, 8'493'465'600, 19'944'000},
814 {LEVEL_APV_7_BAND_1, 16'986'931'200, 39'888'000},
815 {LEVEL_APV_7_1_BAND_1, 33'973'862'400, 79'776'000},
816 };
817
818 constexpr LevelLimits kLimitsBand2[] = {
819 {LEVEL_APV_1_BAND_2, 3'041'280, 14'000},
820 {LEVEL_APV_1_1_BAND_2, 6'082'560, 28'000},
821 {LEVEL_APV_2_BAND_2, 15'667'200, 71'000},
822 {LEVEL_APV_2_1_BAND_2, 31'334'400, 141'000},
823 {LEVEL_APV_3_BAND_2, 66'846'720, 201'000},
824 {LEVEL_APV_3_1_BAND_2, 133'693'440, 401'000},
825 {LEVEL_APV_4_BAND_2, 265'420'800, 780'000},
826 {LEVEL_APV_4_1_BAND_2, 530'841'600, 1'560'000},
827 {LEVEL_APV_5_BAND_2, 1'061'683'200, 3'324'000},
828 {LEVEL_APV_5_1_BAND_2, 2'123'366'400, 6'648'000},
829 {LEVEL_APV_6_BAND_2, 4'777'574'400, 13'296'000},
830 {LEVEL_APV_6_1_BAND_2, 8'493'465'600, 26'592'000},
831 {LEVEL_APV_7_BAND_2, 16'986'931'200, 53'184'000},
832 {LEVEL_APV_7_1_BAND_2, 33'973'862'400, 106'368'000},
833 };
834
835 constexpr LevelLimits kLimitsBand3[] = {
836 {LEVEL_APV_1_BAND_3, 3'041'280, 21'000},
837 {LEVEL_APV_1_1_BAND_3, 6'082'560, 42'000},
838 {LEVEL_APV_2_BAND_3, 15'667'200, 106'000},
839 {LEVEL_APV_2_1_BAND_3, 31'334'400, 212'000},
840 {LEVEL_APV_3_BAND_3, 66'846'720, 301'000},
841 {LEVEL_APV_3_1_BAND_3, 133'693'440, 602'000},
842 {LEVEL_APV_4_BAND_3, 265'420'800, 1'170'000},
843 {LEVEL_APV_4_1_BAND_3, 530'841'600, 2'340'000},
844 {LEVEL_APV_5_BAND_3, 1'061'683'200, 4'986'000},
845 {LEVEL_APV_5_1_BAND_3, 2'123'366'400, 9'972'000},
846 {LEVEL_APV_6_BAND_3, 4'777'574'400, 19'944'000},
847 {LEVEL_APV_6_1_BAND_3, 8'493'465'600, 39'888'000},
848 {LEVEL_APV_7_BAND_3, 16'986'931'200, 79'776'000},
849 {LEVEL_APV_7_1_BAND_3, 33'973'862'400, 159'552'000},
850 };
851
852 uint64_t samplesPerSec = width * height * fps;
853 if (band == 0) {
854 for (const LevelLimits& limit : kLimitsBand0) {
855 if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
856 level = limit.level;
857 break;
858 }
859 }
860 } else if (band == 1) {
861 for (const LevelLimits& limit : kLimitsBand1) {
862 if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
863 level = limit.level;
864 break;
865 }
866 }
867 } else if (band == 2) {
868 for (const LevelLimits& limit : kLimitsBand2) {
869 if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
870 level = limit.level;
871 break;
872 }
873 }
874 } else if (band == 3) {
875 for (const LevelLimits& limit : kLimitsBand3) {
876 if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
877 level = limit.level;
878 break;
879 }
880 }
881 } else {
882 ALOGE("Invalid band_idc on calculte level");
883 }
884
885 return level;
886}
887
888void C2SoftApvEnc::ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
889 auto width = input->width();
890 auto height = input->height();
891
892 auto* yPlane = (uint8_t*)input->data()[0];
893 auto* uvPlane = (uint8_t*)input->data()[1];
894 uint32_t uvStride = width * 2;
895
896 auto* src = yPlane;
897 auto* dst = (uint8_t*)imgb->a[0];
898 std::memcpy(dst, src, width * height * 2);
899
900 auto* dst_uv = (uint8_t*)imgb->a[1];
901 int32_t offset1 = 0, offset2 = 0;
902 for (int32_t y = 0; y < height / 2; ++y) {
903 offset1 = (y * 2) * uvStride;
904 offset2 = (y * 2 + 1) * uvStride;
905 src = uvPlane + (y * uvStride);
906
907 std::memcpy(dst_uv + offset1, src, uvStride);
908 std::memcpy(dst_uv + offset2, src, uvStride);
909 }
910}
911
912void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
913 oapv_imgb_t* imgb) {
914 uint32_t width = input->width();
915 uint32_t height = input->height();
916
917 uint8_t* yPlane = (uint8_t*)input->data()[0];
918 auto* uvPlane = (uint8_t*)input->data()[1];
919 uint32_t stride[3];
920 stride[0] = width * 2;
921 stride[1] = stride[2] = width;
922
923 uint8_t *dst, *src;
924 uint16_t tmp;
925 for (int32_t y = 0; y < height; ++y) {
926 src = yPlane + y * stride[0];
927 dst = (uint8_t*)imgb->a[0] + y * stride[0];
928 for (int32_t x = 0; x < stride[0]; x += 2) {
929 tmp = (src[x + 1] << 2) | (src[x] >> 6);
930 dst[x] = tmp & 0xFF;
931 dst[x + 1] = tmp >> 8;
932 }
933 }
934
935 uint8_t *dst_u, *dst_v;
936 for (int32_t y = 0; y < height / 2; ++y) {
937 src = uvPlane + y * stride[1] * 2;
938 dst_u = (uint8_t*)imgb->a[1] + (y * 2) * stride[1];
939 dst_v = (uint8_t*)imgb->a[2] + (y * 2) * stride[2];
940 for (int32_t x = 0; x < stride[1] * 2; x += 4) {
941 tmp = (src[x + 1] << 2) | (src[x] >> 6); // cb
942 dst_u[x / 2] = tmp & 0xFF;
943 dst_u[x / 2 + 1] = tmp >> 8;
944 dst_u[x / 2 + stride[1]] = dst_u[x / 2];
945 dst_u[x / 2 + stride[1] + 1] = dst_u[x / 2 + 1];
946
947 tmp = (src[x + 3] << 2) | (src[x + 2] >> 6); // cr
948 dst_v[x / 2] = tmp & 0xFF;
949 dst_v[x / 2 + 1] = tmp >> 8;
950 dst_v[x / 2 + stride[2]] = dst_v[x / 2];
951 dst_v[x / 2 + stride[2] + 1] = dst_v[x / 2 + 1];
952 }
953 }
954}
955
956void C2SoftApvEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
957 const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
958 oapve_stat_t* stat) {
959 std::shared_ptr<C2LinearBlock> block;
960 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
961 c2_status_t status = pool->fetchLinearBlock(stat->write, usage, &block);
962 if (C2_OK != status) {
963 ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
964 mSignalledError = true;
965 work->result = status;
966 work->workletsProcessed = 1u;
967 return;
968 }
969
970 C2WriteView wView = block->map().get();
971 if (C2_OK != wView.error()) {
972 ALOGE("write view map failed with status 0x%x", wView.error());
973 mSignalledError = true;
974 work->result = wView.error();
975 work->workletsProcessed = 1u;
976 return;
977 }
978 if ((!mReceivedFirstFrame)) {
979 createCsdData(work, bitb, stat->write);
980 mReceivedFirstFrame = true;
981 }
982
983 memcpy(wView.data(), bitb->addr, stat->write);
984 std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, stat->write);
985
986 /* All frames are SYNC FRAME */
987 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u /* stream id */,
988 C2Config::SYNC_FRAME));
989
990 auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
991 work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
992 work->worklets.front()->output.buffers.clear();
993 work->worklets.front()->output.buffers.push_back(buffer);
994 work->worklets.front()->output.ordinal = work->input.ordinal;
995 work->workletsProcessed = 1u;
996 };
997 if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
998 fillWork(work);
999 if (mSignalledEos) {
1000 work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
1001 }
1002 } else {
1003 finish(workIndex, fillWork);
1004 }
1005}
1006void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
1007 uint32_t encodedSize) {
1008 uint32_t csdStart = 0, csdEnd = 0;
1009 uint32_t bitOffset = 0;
1010 uint8_t* buf = (uint8_t*)bitb->addr + csdStart;
1011
1012 if (encodedSize == 0) {
1013 ALOGE("the first frame size is zero, so no csd data will be created.");
1014 return;
1015 }
1016 ABitReader reader(buf, encodedSize);
1017
1018 /* pbu_header() */
1019 reader.skipBits(32);
1020 bitOffset += 32; // pbu_size
1021 reader.skipBits(32);
1022 bitOffset += 32; // currReadSize
1023 csdStart = bitOffset / 8;
1024
1025 int32_t pbu_type = reader.getBits(8);
1026 bitOffset += 8; // pbu_type
1027 reader.skipBits(16);
1028 bitOffset += 16; // group_id
1029 reader.skipBits(8);
1030 bitOffset += 8; // reserved_zero_8bits
1031
1032 /* frame info() */
1033 int32_t profile_idc = reader.getBits(8);
1034 bitOffset += 8; // profile_idc
1035 int32_t level_idc = reader.getBits(8);
1036 bitOffset += 8; // level_idc
1037 int32_t band_idc = reader.getBits(3);
1038 bitOffset += 3; // band_idc
1039 reader.skipBits(5);
1040 bitOffset += 5; // reserved_zero_5bits
1041 int32_t width = reader.getBits(32);
1042 bitOffset += 32; // width
1043 int32_t height = reader.getBits(32);
1044 bitOffset += 32; // height
1045 int32_t chroma_idc = reader.getBits(4);
1046 bitOffset += 4; // chroma_format_idc
1047 reader.skipBits(4);
1048 bitOffset += 4; // bit_depth
1049 reader.skipBits(8);
1050 bitOffset += 8; // capture_time_distance
1051 reader.skipBits(8);
1052 bitOffset += 8; // reserved_zero_8bits
1053
1054 /* frame header() */
1055 reader.skipBits(8);
1056 bitOffset += 8; // reserved_zero_8bit
1057 bool color_description_present_flag = reader.getBits(1);
1058 bitOffset += 1; // color_description_present_flag
1059 if (color_description_present_flag) {
1060 reader.skipBits(8);
1061 bitOffset += 8; // color_primaries
1062 reader.skipBits(8);
1063 bitOffset += 8; // transfer_characteristics
1064 reader.skipBits(8);
1065 bitOffset += 8; // matrix_coefficients
1066 }
1067 bool use_q_matrix = reader.getBits(1);
1068 bitOffset += 1; // use_q_matrix
1069 if (use_q_matrix) {
1070 /* quantization_matrix() */
1071 int32_t numComp = chroma_idc == 0 ? 1
1072 : chroma_idc == 2 ? 3
1073 : chroma_idc == 3 ? 3
1074 : chroma_idc == 4 ? 4
1075 : -1;
1076 int32_t needBitsForQ = 64 * 8 * numComp;
1077 reader.skipBits(needBitsForQ);
1078 bitOffset += needBitsForQ;
1079 }
1080
1081 /* tile_info() */
1082 int32_t tile_width_in_mbs_minus1 = reader.getBits(28);
1083 bitOffset += 28;
1084 int32_t tile_height_in_mbs_minus1 = reader.getBits(28);
1085 bitOffset += 28;
1086 bool tile_size_present_in_fh_flag = reader.getBits(1);
1087 bitOffset += 1;
1088 if (tile_size_present_in_fh_flag) {
1089 int32_t numTiles = ceil((double)width / (double)tile_width_in_mbs_minus1) *
1090 ceil((double)height / (double)tile_height_in_mbs_minus1);
1091 reader.skipBits(32 * numTiles);
1092 bitOffset += (32 * numTiles);
1093 }
1094
1095 reader.skipBits(8);
1096 bitOffset += 8; // reserved_zero_8bits
1097
1098 /* byte_alignmenet() */
1099 while (bitOffset % 8) {
1100 reader.skipBits(1);
1101 bitOffset += 1;
1102 }
1103 csdEnd = bitOffset / 8;
1104 int32_t csdSize = csdEnd - csdStart + 1;
1105
1106 std::unique_ptr<C2StreamInitDataInfo::output> csd =
1107 C2StreamInitDataInfo::output::AllocUnique(csdSize, 0u);
1108 if (!csd) {
1109 ALOGE("CSD allocation failed");
1110 mSignalledError = true;
1111 work->result = C2_NO_MEMORY;
1112 work->workletsProcessed = 1u;
1113 return;
1114 }
1115
1116 buf = buf + csdStart;
1117 memcpy(csd->m.value, buf, csdSize);
1118 work->worklets.front()->output.configUpdate.push_back(std::move(csd));
1119}
1120c2_status_t C2SoftApvEnc::drainInternal(uint32_t drainMode,
1121 const std::shared_ptr<C2BlockPool>& pool,
1122 const std::unique_ptr<C2Work>& work) {
1123 fillEmptyWork(work);
1124 return C2_OK;
1125}
1126
1127void C2SoftApvEnc::process(const std::unique_ptr<C2Work>& work,
1128 const std::shared_ptr<C2BlockPool>& pool) {
1129 c2_status_t error;
1130 work->result = C2_OK;
1131 work->workletsProcessed = 0u;
1132 work->worklets.front()->output.flags = work->input.flags;
1133
1134 nsecs_t timeDelay = 0;
1135 uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
1136
1137 mSignalledEos = false;
1138 mOutBlock = nullptr;
1139
1140 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
1141 ALOGV("Got FLAG_END_OF_STREAM");
1142 mSignalledEos = true;
1143 }
1144
1145 /* Initialize encoder if not already initialized */
1146 if (initEncoder() != C2_OK) {
1147 ALOGE("Failed to initialize encoder");
1148 mSignalledError = true;
1149 work->result = C2_CORRUPTED;
1150 work->workletsProcessed = 1u;
1151 ALOGE("[%s] Failed to make Codec context", __func__);
1152 return;
1153 }
1154 if (mSignalledError) {
1155 ALOGE("[%s] Received signalled error", __func__);
1156 return;
1157 }
1158
1159 if (mSignalledEos) {
1160 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
1161 return;
1162 }
1163
1164 if (work->input.buffers.empty()) {
1165 return;
1166 }
1167
1168 std::shared_ptr<C2GraphicView> view;
1169 std::shared_ptr<C2Buffer> inputBuffer = nullptr;
1170 if (!work->input.buffers.empty()) {
1171 inputBuffer = work->input.buffers[0];
1172 view = std::make_shared<C2GraphicView>(
1173 inputBuffer->data().graphicBlocks().front().map().get());
1174 if (view->error() != C2_OK) {
1175 ALOGE("graphic view map err = %d", view->error());
1176 work->workletsProcessed = 1u;
1177 return;
1178 }
1179 }
1180 if (!inputBuffer) {
1181 fillEmptyWork(work);
1182 return;
1183 }
1184
1185 oapve_stat_t stat;
1186 auto outBufferSize =
1187 mCodecDesc->param[mReceivedFrames].w * mCodecDesc->param[mReceivedFrames].h * 4;
1188 if (!mOutBlock) {
1189 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
1190 c2_status_t err = pool->fetchLinearBlock(outBufferSize, usage, &mOutBlock);
1191 if (err != C2_OK) {
1192 work->result = err;
1193 work->workletsProcessed = 1u;
1194 ALOGE("fetchLinearBlock has failed. err = %d", err);
1195 return;
1196 }
1197 }
1198
1199 C2WriteView wView = mOutBlock->map().get();
1200 if (wView.error() != C2_OK) {
1201 work->result = wView.error();
1202 work->workletsProcessed = 1u;
1203 return;
1204 }
1205
1206 error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
1207 if (error != C2_OK) {
1208 mSignalledError = true;
1209 work->result = error;
1210 work->workletsProcessed = 1u;
1211 return;
1212 }
1213
1214 if (++mReceivedFrames < mMaxFrames) {
1215 return;
1216 }
1217 mReceivedFrames = 0;
1218
1219 std::shared_ptr<oapv_bitb_t> bits = std::make_shared<oapv_bitb_t>();
1220 std::memset(mBitstreamBuf, 0, kMaxBitstreamBufSize);
1221 bits->addr = mBitstreamBuf;
1222 bits->bsize = kMaxBitstreamBufSize;
1223 bits->err = C2_OK;
1224
1225 if (mInputFrames.frm[0].imgb) {
1226 int32_t status =
1227 oapve_encode(mEncoderId, &mInputFrames, mMetaId, bits.get(), &stat, &mReconFrames);
1228 if (status != C2_OK) {
1229 mSignalledError = true;
1230 work->result = C2_CORRUPTED;
1231 work->workletsProcessed = 1u;
1232 return;
1233 }
1234 } else if (!mSignalledEos) {
1235 fillEmptyWork(work);
1236 }
1237 finishWork(workIndex, work, pool, bits.get(), &stat);
1238}
1239
1240class C2SoftApvEncFactory : public C2ComponentFactory {
1241 public:
1242 C2SoftApvEncFactory()
1243 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1244 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1245
1246 virtual c2_status_t createComponent(c2_node_id_t id,
1247 std::shared_ptr<C2Component>* const component,
1248 std::function<void(C2Component*)> deleter) override {
1249 *component = std::shared_ptr<C2Component>(
1250 new C2SoftApvEnc(COMPONENT_NAME, id,
1251 std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
1252 deleter);
1253 return C2_OK;
1254 }
1255
1256 c2_status_t createInterface(c2_node_id_t id,
1257 std::shared_ptr<C2ComponentInterface>* const interface,
1258 std::function<void(C2ComponentInterface*)> deleter) override {
1259 *interface = std::shared_ptr<C2ComponentInterface>(
1260 new SimpleInterface<C2SoftApvEnc::IntfImpl>(
1261 COMPONENT_NAME, id, std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
1262 deleter);
1263 return C2_OK;
1264 }
1265
1266 ~C2SoftApvEncFactory() override = default;
1267
1268 private:
1269 std::shared_ptr<C2ReflectorHelper> mHelper;
1270};
1271
1272} // namespace android
1273
1274__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
Fyodor Kyslov6379ef22024-11-07 16:44:51 +00001275 if (!android::media::swcodec::flags::apv_software_codec()) {
1276 ALOGV("APV SW Codec is not enabled");
1277 return nullptr;
1278 }
Fyodor Kyslovdd7d5992024-11-05 21:40:16 +00001279 return new ::android::C2SoftApvEncFactory();
1280}
1281
1282__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1283 ::C2ComponentFactory* factory) {
1284 delete factory;
1285}