blob: f99ee248dfa7a8abe18b8a6811f902424f888d06 [file] [log] [blame]
Pawin Vongmasa36653902018-11-15 00:10:25 -08001/*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftVpxEnc"
19#include <log/log.h>
20#include <utils/misc.h>
21
22#include <media/hardware/VideoAPI.h>
23
24#include <Codec2BufferUtils.h>
25#include <C2Debug.h>
26#include "C2SoftVpxEnc.h"
27
28#ifndef INT32_MAX
29#define INT32_MAX 2147483647
30#endif
31
32namespace android {
33
Manisha Jajooc743a112021-09-06 20:46:04 +053034C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
35 : SimpleInterface<void>::BaseParams(
36 helper,
37 COMPONENT_NAME,
38 C2Component::KIND_ENCODER,
39 C2Component::DOMAIN_VIDEO,
40 MEDIA_MIMETYPE_VIDEO) {
41 noPrivateBuffers(); // TODO: account for our buffers here
42 noInputReferences();
43 noOutputReferences();
44 noInputLatency();
45 noTimeStretch();
46 setDerivedInstance(this);
47
48 addParameter(
49 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
50 .withConstValue(new C2ComponentAttributesSetting(
51 C2Component::ATTRIB_IS_TEMPORAL))
52 .build());
53
54 addParameter(
55 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
56 .withConstValue(new C2StreamUsageTuning::input(
57 0u, (uint64_t)C2MemoryUsage::CPU_READ))
58 .build());
59
60 addParameter(
61 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
62 .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
63 .withFields({
64 C2F(mSize, width).inRange(2, 2048, 2),
65 C2F(mSize, height).inRange(2, 2048, 2),
66 })
67 .withSetter(SizeSetter)
68 .build());
69
70 addParameter(
71 DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
72 .withDefault(new C2StreamBitrateModeTuning::output(
73 0u, C2Config::BITRATE_VARIABLE))
74 .withFields({
75 C2F(mBitrateMode, value).oneOf({
76 C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
77 })
78 .withSetter(
79 Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
80 .build());
81
82 addParameter(
83 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
84 .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
85 // TODO: More restriction?
86 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
87 .withSetter(
88 Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
89 .build());
90
91 addParameter(
92 DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
93 .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
94 .withFields({
95 C2F(mLayering, m.layerCount).inRange(0, 4),
96 C2F(mLayering, m.bLayerCount).inRange(0, 0),
97 C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
98 })
99 .withSetter(LayeringSetter)
100 .build());
101
102 addParameter(
103 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
104 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
105 .withFields({C2F(mSyncFramePeriod, value).any()})
106 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
107 .build());
108
109 addParameter(
110 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
111 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
112 .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
113 .withSetter(BitrateSetter)
114 .build());
115
116 addParameter(
117 DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
118 .withConstValue(new C2StreamIntraRefreshTuning::output(
119 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
120 .build());
121#ifdef VP9
122 addParameter(
123 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
124 .withDefault(new C2StreamProfileLevelInfo::output(
125 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
126 .withFields({
127 C2F(mProfileLevel, profile).equalTo(
128 PROFILE_VP9_0
129 ),
130 C2F(mProfileLevel, level).equalTo(
131 LEVEL_VP9_4_1),
132 })
133 .withSetter(ProfileLevelSetter)
134 .build());
135#else
136 addParameter(
137 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
138 .withDefault(new C2StreamProfileLevelInfo::output(
139 0u, PROFILE_VP8_0, LEVEL_UNUSED))
140 .withFields({
141 C2F(mProfileLevel, profile).equalTo(
142 PROFILE_VP8_0
143 ),
144 C2F(mProfileLevel, level).equalTo(
145 LEVEL_UNUSED),
146 })
147 .withSetter(ProfileLevelSetter)
148 .build());
149#endif
150 addParameter(
151 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
152 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
153 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
154 .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
155 .build());
156
157 addParameter(
158 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
159 .withDefault(new C2StreamColorAspectsInfo::input(
160 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
161 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
162 .withFields({
163 C2F(mColorAspects, range).inRange(
164 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
165 C2F(mColorAspects, primaries).inRange(
166 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
167 C2F(mColorAspects, transfer).inRange(
168 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
169 C2F(mColorAspects, matrix).inRange(
170 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
171 })
172 .withSetter(ColorAspectsSetter)
173 .build());
174
175 addParameter(
176 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
177 .withDefault(new C2StreamColorAspectsInfo::output(
178 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
179 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
180 .withFields({
181 C2F(mCodedColorAspects, range).inRange(
182 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
183 C2F(mCodedColorAspects, primaries).inRange(
184 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
185 C2F(mCodedColorAspects, transfer).inRange(
186 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
187 C2F(mCodedColorAspects, matrix).inRange(
188 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
189 })
190 .withSetter(CodedColorAspectsSetter, mColorAspects)
191 .build());
192}
193
194C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
195 (void)mayBlock;
196 C2R res = C2R::Ok();
197 if (me.v.value < 4096) {
198 me.set().value = 4096;
199 }
200 return res;
201}
202
203C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
204 const C2P<C2StreamPictureSizeInfo::input>& oldMe,
205 C2P<C2StreamPictureSizeInfo::input>& me) {
206 (void)mayBlock;
207 C2R res = C2R::Ok();
208 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
209 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
210 me.set().width = oldMe.v.width;
211 }
212 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
213 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
214 me.set().height = oldMe.v.height;
215 }
216 return res;
217}
218
219C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
220 C2P<C2StreamProfileLevelInfo::output>& me) {
221 (void)mayBlock;
222 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
223 me.set().profile = PROFILE_VP9_0;
224 }
225 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
226 me.set().level = LEVEL_VP9_4_1;
227 }
228 return C2R::Ok();
229}
230
231C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
232 C2P<C2StreamTemporalLayeringTuning::output>& me) {
233 (void)mayBlock;
234 C2R res = C2R::Ok();
235 if (me.v.m.layerCount > 4) {
236 me.set().m.layerCount = 4;
237 }
238 me.set().m.bLayerCount = 0;
239 // ensure ratios are monotonic and clamped between 0 and 1
240 for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
241 me.set().m.bitrateRatios[ix] = c2_clamp(
242 ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
243 }
244 ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
245 return res;
246}
247
248uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
249 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
250 return 0;
251 }
252 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
253 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
254}
255C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
256 C2P<C2StreamColorAspectsInfo::input>& me) {
257 (void)mayBlock;
258 if (me.v.range > C2Color::RANGE_OTHER) {
259 me.set().range = C2Color::RANGE_OTHER;
260 }
261 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
262 me.set().primaries = C2Color::PRIMARIES_OTHER;
263 }
264 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
265 me.set().transfer = C2Color::TRANSFER_OTHER;
266 }
267 if (me.v.matrix > C2Color::MATRIX_OTHER) {
268 me.set().matrix = C2Color::MATRIX_OTHER;
269 }
270 return C2R::Ok();
271}
272C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
273 bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
274 const C2P<C2StreamColorAspectsInfo::input>& coded) {
275 (void)mayBlock;
276 me.set().range = coded.v.range;
277 me.set().primaries = coded.v.primaries;
278 me.set().transfer = coded.v.transfer;
279 me.set().matrix = coded.v.matrix;
280 return C2R::Ok();
281}
282
Pawin Vongmasa36653902018-11-15 00:10:25 -0800283#if 0
284static size_t getCpuCoreCount() {
285 long cpuCoreCount = 1;
286#if defined(_SC_NPROCESSORS_ONLN)
287 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
288#else
289 // _SC_NPROC_ONLN must be defined...
290 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
291#endif
292 CHECK(cpuCoreCount >= 1);
293 ALOGV("Number of CPU cores: %ld", cpuCoreCount);
294 return (size_t)cpuCoreCount;
295}
296#endif
297
298C2SoftVpxEnc::C2SoftVpxEnc(const char* name, c2_node_id_t id,
299 const std::shared_ptr<IntfImpl>& intfImpl)
300 : SimpleC2Component(
301 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
302 mIntf(intfImpl),
303 mCodecContext(nullptr),
304 mCodecConfiguration(nullptr),
305 mCodecInterface(nullptr),
306 mStrideAlign(2),
307 mColorFormat(VPX_IMG_FMT_I420),
308 mBitrateControlMode(VPX_VBR),
309 mErrorResilience(false),
310 mMinQuantizer(0),
311 mMaxQuantizer(0),
312 mTemporalLayers(0),
313 mTemporalPatternType(VPXTemporalLayerPatternNone),
314 mTemporalPatternLength(0),
315 mTemporalPatternIdx(0),
316 mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
317 mSignalledOutputEos(false),
318 mSignalledError(false) {
Harish Mahendrakarda612452020-03-14 17:41:29 -0700319 for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
320 mTemporalLayerBitrateRatio[i] = 1.0f;
321 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800322}
323
324C2SoftVpxEnc::~C2SoftVpxEnc() {
325 onRelease();
326}
327
328c2_status_t C2SoftVpxEnc::onInit() {
329 status_t err = initEncoder();
330 return err == OK ? C2_OK : C2_CORRUPTED;
331}
332
333void C2SoftVpxEnc::onRelease() {
334 if (mCodecContext) {
335 vpx_codec_destroy(mCodecContext);
336 delete mCodecContext;
337 mCodecContext = nullptr;
338 }
339
340 if (mCodecConfiguration) {
341 delete mCodecConfiguration;
342 mCodecConfiguration = nullptr;
343 }
344
345 // this one is not allocated by us
346 mCodecInterface = nullptr;
347}
348
349c2_status_t C2SoftVpxEnc::onStop() {
350 onRelease();
351 mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL;
352 mSignalledOutputEos = false;
353 mSignalledError = false;
354 return C2_OK;
355}
356
357void C2SoftVpxEnc::onReset() {
358 (void)onStop();
359}
360
361c2_status_t C2SoftVpxEnc::onFlush_sm() {
362 return onStop();
363}
364
365status_t C2SoftVpxEnc::initEncoder() {
366 vpx_codec_err_t codec_return;
367 status_t result = UNKNOWN_ERROR;
368 {
369 IntfImpl::Lock lock = mIntf->lock();
370 mSize = mIntf->getSize_l();
371 mBitrate = mIntf->getBitrate_l();
372 mBitrateMode = mIntf->getBitrateMode_l();
373 mFrameRate = mIntf->getFrameRate_l();
374 mIntraRefresh = mIntf->getIntraRefresh_l();
375 mRequestSync = mIntf->getRequestSync_l();
Harish Mahendrakarda612452020-03-14 17:41:29 -0700376 mLayering = mIntf->getTemporalLayers_l();
377 mTemporalLayers = mLayering->m.layerCount;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800378 }
379
380 switch (mBitrateMode->value) {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800381 case C2Config::BITRATE_CONST:
Pawin Vongmasa36653902018-11-15 00:10:25 -0800382 mBitrateControlMode = VPX_CBR;
383 break;
Harish Mahendrakar91cc4832019-06-27 11:25:58 -0700384 case C2Config::BITRATE_VARIABLE:
385 [[fallthrough]];
386 default:
387 mBitrateControlMode = VPX_VBR;
388 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800389 }
390
391 setCodecSpecificInterface();
392 if (!mCodecInterface) goto CleanUp;
393
394 ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
395 (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
396 mMinQuantizer, mMaxQuantizer);
397
398 mCodecConfiguration = new vpx_codec_enc_cfg_t;
399 if (!mCodecConfiguration) goto CleanUp;
400 codec_return = vpx_codec_enc_config_default(mCodecInterface,
401 mCodecConfiguration,
402 0);
403 if (codec_return != VPX_CODEC_OK) {
404 ALOGE("Error populating default configuration for vpx encoder.");
405 goto CleanUp;
406 }
407
408 mCodecConfiguration->g_w = mSize->width;
409 mCodecConfiguration->g_h = mSize->height;
410 //mCodecConfiguration->g_threads = getCpuCoreCount();
411 mCodecConfiguration->g_threads = 0;
412 mCodecConfiguration->g_error_resilient = mErrorResilience;
413
414 // timebase unit is microsecond
415 // g_timebase is in seconds (i.e. 1/1000000 seconds)
416 mCodecConfiguration->g_timebase.num = 1;
417 mCodecConfiguration->g_timebase.den = 1000000;
418 // rc_target_bitrate is in kbps, mBitrate in bps
419 mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
420 mCodecConfiguration->rc_end_usage = mBitrateControlMode;
421 // Disable frame drop - not allowed in MediaCodec now.
422 mCodecConfiguration->rc_dropframe_thresh = 0;
423 // Disable lagged encoding.
424 mCodecConfiguration->g_lag_in_frames = 0;
425 if (mBitrateControlMode == VPX_CBR) {
426 // Disable spatial resizing.
427 mCodecConfiguration->rc_resize_allowed = 0;
428 // Single-pass mode.
429 mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
430 // Maximum amount of bits that can be subtracted from the target
431 // bitrate - expressed as percentage of the target bitrate.
432 mCodecConfiguration->rc_undershoot_pct = 100;
433 // Maximum amount of bits that can be added to the target
434 // bitrate - expressed as percentage of the target bitrate.
435 mCodecConfiguration->rc_overshoot_pct = 15;
436 // Initial value of the buffer level in ms.
437 mCodecConfiguration->rc_buf_initial_sz = 500;
438 // Amount of data that the encoder should try to maintain in ms.
439 mCodecConfiguration->rc_buf_optimal_sz = 600;
440 // The amount of data that may be buffered by the decoding
441 // application in ms.
442 mCodecConfiguration->rc_buf_sz = 1000;
443 // Enable error resilience - needed for packet loss.
444 mCodecConfiguration->g_error_resilient = 1;
445 // Maximum key frame interval - for CBR boost to 3000
446 mCodecConfiguration->kf_max_dist = 3000;
447 // Encoder determines optimal key frame placement automatically.
448 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
449 }
450
451 // Frames temporal pattern - for now WebRTC like pattern is only supported.
452 switch (mTemporalLayers) {
453 case 0:
454 mTemporalPatternLength = 0;
455 break;
456 case 1:
457 mCodecConfiguration->ts_number_layers = 1;
458 mCodecConfiguration->ts_rate_decimator[0] = 1;
459 mCodecConfiguration->ts_periodicity = 1;
460 mCodecConfiguration->ts_layer_id[0] = 0;
461 mTemporalPattern[0] = kTemporalUpdateLastRefAll;
462 mTemporalPatternLength = 1;
463 break;
464 case 2:
465 mCodecConfiguration->ts_number_layers = 2;
466 mCodecConfiguration->ts_rate_decimator[0] = 2;
467 mCodecConfiguration->ts_rate_decimator[1] = 1;
468 mCodecConfiguration->ts_periodicity = 2;
469 mCodecConfiguration->ts_layer_id[0] = 0;
470 mCodecConfiguration->ts_layer_id[1] = 1;
471 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
472 mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
473 mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
474 mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
475 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
476 mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
477 mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
478 mTemporalPattern[7] = kTemporalUpdateNone;
Harish Mahendrakarda612452020-03-14 17:41:29 -0700479 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800480 mTemporalPatternLength = 8;
481 break;
482 case 3:
483 mCodecConfiguration->ts_number_layers = 3;
484 mCodecConfiguration->ts_rate_decimator[0] = 4;
485 mCodecConfiguration->ts_rate_decimator[1] = 2;
486 mCodecConfiguration->ts_rate_decimator[2] = 1;
487 mCodecConfiguration->ts_periodicity = 4;
488 mCodecConfiguration->ts_layer_id[0] = 0;
489 mCodecConfiguration->ts_layer_id[1] = 2;
490 mCodecConfiguration->ts_layer_id[2] = 1;
491 mCodecConfiguration->ts_layer_id[3] = 2;
492 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
493 mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
494 mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
495 mTemporalPattern[3] = kTemporalUpdateNone;
496 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
497 mTemporalPattern[5] = kTemporalUpdateNone;
498 mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
499 mTemporalPattern[7] = kTemporalUpdateNone;
Harish Mahendrakarda612452020-03-14 17:41:29 -0700500 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
501 mTemporalLayerBitrateRatio[1] = mLayering->m.bitrateRatios[1];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800502 mTemporalPatternLength = 8;
503 break;
504 default:
505 ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
506 goto CleanUp;
507 }
508 // Set bitrate values for each layer
509 for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
510 mCodecConfiguration->ts_target_bitrate[i] =
511 mCodecConfiguration->rc_target_bitrate *
Harish Mahendrakarda612452020-03-14 17:41:29 -0700512 mTemporalLayerBitrateRatio[i];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800513 }
514 if (mIntf->getSyncFramePeriod() >= 0) {
515 mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
516 mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
517 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
518 }
519 if (mMinQuantizer > 0) {
520 mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
521 }
522 if (mMaxQuantizer > 0) {
523 mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
524 }
525 setCodecSpecificConfiguration();
526 mCodecContext = new vpx_codec_ctx_t;
527 if (!mCodecContext) goto CleanUp;
528 codec_return = vpx_codec_enc_init(mCodecContext,
529 mCodecInterface,
530 mCodecConfiguration,
531 0); // flags
532 if (codec_return != VPX_CODEC_OK) {
533 ALOGE("Error initializing vpx encoder");
534 goto CleanUp;
535 }
536
537 // Extra CBR settings
538 if (mBitrateControlMode == VPX_CBR) {
539 codec_return = vpx_codec_control(mCodecContext,
540 VP8E_SET_STATIC_THRESHOLD,
541 1);
542 if (codec_return == VPX_CODEC_OK) {
543 uint32_t rc_max_intra_target =
544 (uint32_t)(mCodecConfiguration->rc_buf_optimal_sz * mFrameRate->value / 20 + 0.5);
545 // Don't go below 3 times per frame bandwidth.
546 if (rc_max_intra_target < 300) {
547 rc_max_intra_target = 300;
548 }
549 codec_return = vpx_codec_control(mCodecContext,
550 VP8E_SET_MAX_INTRA_BITRATE_PCT,
551 rc_max_intra_target);
552 }
553 if (codec_return == VPX_CODEC_OK) {
554 codec_return = vpx_codec_control(mCodecContext,
555 VP8E_SET_CPUUSED,
556 -8);
557 }
558 if (codec_return != VPX_CODEC_OK) {
559 ALOGE("Error setting cbr parameters for vpx encoder.");
560 goto CleanUp;
561 }
562 }
563
564 codec_return = setCodecSpecificControls();
565 if (codec_return != VPX_CODEC_OK) goto CleanUp;
566
567 {
568 uint32_t width = mSize->width;
569 uint32_t height = mSize->height;
570 if (((uint64_t)width * height) >
571 ((uint64_t)INT32_MAX / 3)) {
572 ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
573 } else {
574 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
575 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
576 mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2);
577 if (!mConversionBuffer.size()) {
578 ALOGE("Allocating conversion buffer failed.");
579 } else {
580 mNumInputFrames = -1;
581 return OK;
582 }
583 }
584 }
585
586CleanUp:
587 onRelease();
588 return result;
589}
590
591vpx_enc_frame_flags_t C2SoftVpxEnc::getEncodeFlags() {
592 vpx_enc_frame_flags_t flags = 0;
593 if (mTemporalPatternLength > 0) {
594 int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
595 mTemporalPatternIdx++;
596 switch (mTemporalPattern[patternIdx]) {
597 case kTemporalUpdateLast:
598 flags |= VP8_EFLAG_NO_UPD_GF;
599 flags |= VP8_EFLAG_NO_UPD_ARF;
600 flags |= VP8_EFLAG_NO_REF_GF;
601 flags |= VP8_EFLAG_NO_REF_ARF;
602 break;
603 case kTemporalUpdateGoldenWithoutDependency:
604 flags |= VP8_EFLAG_NO_REF_GF;
605 [[fallthrough]];
606 case kTemporalUpdateGolden:
607 flags |= VP8_EFLAG_NO_REF_ARF;
608 flags |= VP8_EFLAG_NO_UPD_ARF;
609 flags |= VP8_EFLAG_NO_UPD_LAST;
610 break;
611 case kTemporalUpdateAltrefWithoutDependency:
612 flags |= VP8_EFLAG_NO_REF_ARF;
613 flags |= VP8_EFLAG_NO_REF_GF;
614 [[fallthrough]];
615 case kTemporalUpdateAltref:
616 flags |= VP8_EFLAG_NO_UPD_GF;
617 flags |= VP8_EFLAG_NO_UPD_LAST;
618 break;
619 case kTemporalUpdateNoneNoRefAltref:
620 flags |= VP8_EFLAG_NO_REF_ARF;
621 [[fallthrough]];
622 case kTemporalUpdateNone:
623 flags |= VP8_EFLAG_NO_UPD_GF;
624 flags |= VP8_EFLAG_NO_UPD_ARF;
625 flags |= VP8_EFLAG_NO_UPD_LAST;
626 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
627 break;
628 case kTemporalUpdateNoneNoRefGoldenRefAltRef:
629 flags |= VP8_EFLAG_NO_REF_GF;
630 flags |= VP8_EFLAG_NO_UPD_GF;
631 flags |= VP8_EFLAG_NO_UPD_ARF;
632 flags |= VP8_EFLAG_NO_UPD_LAST;
633 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
634 break;
635 case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
636 flags |= VP8_EFLAG_NO_REF_GF;
637 flags |= VP8_EFLAG_NO_UPD_ARF;
638 flags |= VP8_EFLAG_NO_UPD_LAST;
639 break;
640 case kTemporalUpdateLastRefAltRef:
641 flags |= VP8_EFLAG_NO_UPD_GF;
642 flags |= VP8_EFLAG_NO_UPD_ARF;
643 flags |= VP8_EFLAG_NO_REF_GF;
644 break;
645 case kTemporalUpdateGoldenRefAltRef:
646 flags |= VP8_EFLAG_NO_UPD_ARF;
647 flags |= VP8_EFLAG_NO_UPD_LAST;
648 break;
649 case kTemporalUpdateLastAndGoldenRefAltRef:
650 flags |= VP8_EFLAG_NO_UPD_ARF;
651 flags |= VP8_EFLAG_NO_REF_GF;
652 break;
653 case kTemporalUpdateLastRefAll:
654 flags |= VP8_EFLAG_NO_UPD_ARF;
655 flags |= VP8_EFLAG_NO_UPD_GF;
656 break;
657 }
658 }
659 return flags;
660}
661
662// TODO: add support for YUV input color formats
663// TODO: add support for SVC, ARF. SVC and ARF returns multiple frames
664// (hierarchical / noshow) in one call. These frames should be combined in to
665// a single buffer and sent back to the client
666void C2SoftVpxEnc::process(
667 const std::unique_ptr<C2Work> &work,
668 const std::shared_ptr<C2BlockPool> &pool) {
669 // Initialize output work
670 work->result = C2_OK;
671 work->workletsProcessed = 1u;
672 work->worklets.front()->output.flags = work->input.flags;
673
674 if (mSignalledError || mSignalledOutputEos) {
675 work->result = C2_BAD_VALUE;
676 return;
677 }
678 // Initialize encoder if not already
679 if (!mCodecContext && OK != initEncoder()) {
680 ALOGE("Failed to initialize encoder");
681 mSignalledError = true;
682 work->result = C2_CORRUPTED;
683 return;
684 }
685
686 std::shared_ptr<const C2GraphicView> rView;
687 std::shared_ptr<C2Buffer> inputBuffer;
688 if (!work->input.buffers.empty()) {
689 inputBuffer = work->input.buffers[0];
690 rView = std::make_shared<const C2GraphicView>(
691 inputBuffer->data().graphicBlocks().front().map().get());
692 if (rView->error() != C2_OK) {
693 ALOGE("graphic view map err = %d", rView->error());
694 work->result = C2_CORRUPTED;
695 return;
696 }
697 } else {
698 ALOGV("Empty input Buffer");
699 uint32_t flags = 0;
700 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
701 flags |= C2FrameData::FLAG_END_OF_STREAM;
702 }
703 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
704 work->worklets.front()->output.buffers.clear();
705 work->worklets.front()->output.ordinal = work->input.ordinal;
706 work->workletsProcessed = 1u;
707 return;
708 }
709
710 const C2ConstGraphicBlock inBuffer =
711 inputBuffer->data().graphicBlocks().front();
Harish Mahendrakar66e98bd2020-06-09 07:45:33 +0530712 if (inBuffer.width() < mSize->width ||
713 inBuffer.height() < mSize->height) {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800714 ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
715 inBuffer.width(), mSize->width, inBuffer.height(),
716 mSize->height);
717 mSignalledError = true;
718 work->result = C2_BAD_VALUE;
719 return;
720 }
721 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
722 vpx_image_t raw_frame;
723 const C2PlanarLayout &layout = rView->layout();
Harish Mahendrakar66e98bd2020-06-09 07:45:33 +0530724 uint32_t width = mSize->width;
725 uint32_t height = mSize->height;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800726 if (width > 0x8000 || height > 0x8000) {
727 ALOGE("Image too big: %u x %u", width, height);
728 work->result = C2_BAD_VALUE;
729 return;
730 }
731 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
732 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
733 switch (layout.type) {
734 case C2PlanarLayout::TYPE_RGB:
735 case C2PlanarLayout::TYPE_RGBA: {
Manisha Jajood9f98442021-06-02 11:27:07 +0530736 std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
737 {
738 IntfImpl::Lock lock = mIntf->lock();
739 colorAspects = mIntf->getCodedColorAspects_l();
740 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800741 ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
Manisha Jajood9f98442021-06-02 11:27:07 +0530742 mConversionBuffer.size(), *rView.get(),
743 colorAspects->matrix, colorAspects->range);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800744 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
745 mStrideAlign, mConversionBuffer.data());
746 break;
747 }
748 case C2PlanarLayout::TYPE_YUV: {
749 if (!IsYUV420(*rView)) {
750 ALOGE("input is not YUV420");
751 work->result = C2_BAD_VALUE;
752 return;
753 }
754
755 if (layout.planes[layout.PLANE_Y].colInc == 1
756 && layout.planes[layout.PLANE_U].colInc == 1
757 && layout.planes[layout.PLANE_V].colInc == 1) {
758 // I420 compatible - though with custom offset and stride
759 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
760 mStrideAlign, (uint8_t*)rView->data()[0]);
761 raw_frame.planes[1] = (uint8_t*)rView->data()[1];
762 raw_frame.planes[2] = (uint8_t*)rView->data()[2];
763 raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
764 raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
765 raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
766 } else {
767 // copy to I420
768 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
769 if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
770 status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
771 if (err != OK) {
772 ALOGE("Buffer conversion failed: %d", err);
773 work->result = C2_BAD_VALUE;
774 return;
775 }
776 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, stride, vstride,
Liu, Kai1386c36f2019-08-22 13:37:41 +0800777 mStrideAlign, mConversionBuffer.data());
Pawin Vongmasa36653902018-11-15 00:10:25 -0800778 vpx_img_set_rect(&raw_frame, 0, 0, width, height);
779 } else {
780 ALOGE("Conversion buffer is too small: %u x %u for %zu",
781 stride, vstride, mConversionBuffer.size());
782 work->result = C2_BAD_VALUE;
783 return;
784 }
785 }
786 break;
787 }
788 default:
789 ALOGE("Unrecognized plane type: %d", layout.type);
790 work->result = C2_BAD_VALUE;
791 return;
792 }
793
794 vpx_enc_frame_flags_t flags = getEncodeFlags();
795 // handle dynamic config parameters
796 {
797 IntfImpl::Lock lock = mIntf->lock();
798 std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
799 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
800 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
801 lock.unlock();
802
803 if (intraRefresh != mIntraRefresh) {
804 mIntraRefresh = intraRefresh;
805 ALOGV("Got mIntraRefresh request");
806 }
807
808 if (requestSync != mRequestSync) {
809 // we can handle IDR immediately
810 if (requestSync->value) {
811 // unset request
812 C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
813 std::vector<std::unique_ptr<C2SettingResult>> failures;
814 mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
815 ALOGV("Got sync request");
816 flags |= VPX_EFLAG_FORCE_KF;
817 }
818 mRequestSync = requestSync;
819 }
820
821 if (bitrate != mBitrate) {
822 mBitrate = bitrate;
823 mCodecConfiguration->rc_target_bitrate =
824 (mBitrate->value + 500) / 1000;
825 vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
826 mCodecConfiguration);
827 if (res != VPX_CODEC_OK) {
828 ALOGE("vpx encoder failed to update bitrate: %s",
829 vpx_codec_err_to_string(res));
830 mSignalledError = true;
831 work->result = C2_CORRUPTED;
832 return;
833 }
834 }
835 }
836
837 uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
838 uint32_t frameDuration;
839 if (inputTimeStamp > mLastTimestamp) {
840 frameDuration = (uint32_t)(inputTimeStamp - mLastTimestamp);
841 } else {
842 // Use default of 30 fps in case of 0 frame rate.
843 float frameRate = mFrameRate->value;
844 if (frameRate < 0.001) {
845 frameRate = 30;
846 }
847 frameDuration = (uint32_t)(1000000 / frameRate + 0.5);
848 }
849 mLastTimestamp = inputTimeStamp;
850
851 vpx_codec_err_t codec_return = vpx_codec_encode(mCodecContext, &raw_frame,
852 inputTimeStamp,
853 frameDuration, flags,
854 VPX_DL_REALTIME);
855 if (codec_return != VPX_CODEC_OK) {
856 ALOGE("vpx encoder failed to encode frame");
857 mSignalledError = true;
858 work->result = C2_CORRUPTED;
859 return;
860 }
861
862 bool populated = false;
863 vpx_codec_iter_t encoded_packet_iterator = nullptr;
864 const vpx_codec_cx_pkt_t* encoded_packet;
865 while ((encoded_packet = vpx_codec_get_cx_data(
866 mCodecContext, &encoded_packet_iterator))) {
867 if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
868 std::shared_ptr<C2LinearBlock> block;
869 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
870 c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
871 if (err != C2_OK) {
872 ALOGE("fetchLinearBlock for Output failed with status %d", err);
873 work->result = C2_NO_MEMORY;
874 return;
875 }
876 C2WriteView wView = block->map().get();
877 if (wView.error()) {
878 ALOGE("write view map failed %d", wView.error());
879 work->result = C2_CORRUPTED;
880 return;
881 }
882
883 memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
884 ++mNumInputFrames;
885
886 ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
887 uint32_t flags = 0;
888 if (eos) {
889 flags |= C2FrameData::FLAG_END_OF_STREAM;
890 }
891 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
892 work->worklets.front()->output.buffers.clear();
Lajos Molnar2d83c002021-06-09 16:25:07 -0700893 std::shared_ptr<C2Buffer> buffer =
894 createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800895 if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
896 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
Lajos Molnar3bb81cd2019-02-20 15:10:30 -0800897 0u /* stream id */, C2Config::SYNC_FRAME));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800898 }
899 work->worklets.front()->output.buffers.push_back(buffer);
900 work->worklets.front()->output.ordinal = work->input.ordinal;
901 work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
902 work->workletsProcessed = 1u;
903 populated = true;
904 if (eos) {
905 mSignalledOutputEos = true;
906 ALOGV("signalled EOS");
907 }
908 }
909 }
910 if (!populated) {
911 work->workletsProcessed = 0u;
912 }
913}
914
915c2_status_t C2SoftVpxEnc::drain(
916 uint32_t drainMode,
917 const std::shared_ptr<C2BlockPool> &pool) {
918 (void)pool;
919 if (drainMode == NO_DRAIN) {
920 ALOGW("drain with NO_DRAIN: no-op");
921 return C2_OK;
922 }
923 if (drainMode == DRAIN_CHAIN) {
924 ALOGW("DRAIN_CHAIN not supported");
925 return C2_OMITTED;
926 }
927
928 return C2_OK;
929}
930
931} // namespace android