Merge "Camera: Support physical camera metadata in capture result"
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index f9e4639..46bd8f0 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -73,6 +73,7 @@
static enum {
FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
} gOutputFormat = FORMAT_MP4; // data format for output
+static AString gCodecName = ""; // codec name override
static bool gSizeSpecified = false; // was size explicitly requested?
static bool gWantInfoScreen = false; // do we want initial info screen?
static bool gWantFrameTime = false; // do we want times on each frame?
@@ -154,6 +155,7 @@
if (gVerbose) {
printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
+ fflush(stdout);
}
sp<AMessage> format = new AMessage;
@@ -169,11 +171,21 @@
looper->setName("screenrecord_looper");
looper->start();
ALOGV("Creating codec");
- sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
- if (codec == NULL) {
- fprintf(stderr, "ERROR: unable to create %s codec instance\n",
- kMimeTypeAvc);
- return UNKNOWN_ERROR;
+ sp<MediaCodec> codec;
+ if (gCodecName.empty()) {
+ codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
+ if (codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ kMimeTypeAvc);
+ return UNKNOWN_ERROR;
+ }
+ } else {
+ codec = MediaCodec::CreateByComponentName(looper, gCodecName);
+ if (codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ gCodecName.c_str());
+ return UNKNOWN_ERROR;
+ }
}
err = codec->configure(format, NULL, NULL,
@@ -275,9 +287,11 @@
if (gRotate) {
printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
outHeight, outWidth, offY, offX);
+ fflush(stdout);
} else {
printf("Content area is %ux%u at offset x=%d y=%d\n",
outWidth, outHeight, offX, offY);
+ fflush(stdout);
}
}
@@ -346,6 +360,7 @@
if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
if (gVerbose) {
printf("Time limit reached\n");
+ fflush(stdout);
}
break;
}
@@ -483,6 +498,7 @@
printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
debugNumFrames, nanoseconds_to_seconds(
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
+ fflush(stdout);
}
return NO_ERROR;
}
@@ -556,6 +572,7 @@
printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
mainDpyInfo.orientation);
+ fflush(stdout);
}
bool rotated = isDeviceRotated(mainDpyInfo.orientation);
@@ -623,6 +640,7 @@
}
if (gVerbose) {
printf("Bugreport overlay created\n");
+ fflush(stdout);
}
} else {
// Use the encoder's input surface as the virtual display surface.
@@ -715,6 +733,7 @@
if (gVerbose) {
printf("Stopping encoder and muxer\n");
+ fflush(stdout);
}
}
@@ -761,6 +780,7 @@
printf(" %s", argv[i]);
}
putchar('\n');
+ fflush(stdout);
}
pid_t pid = fork();
@@ -898,6 +918,7 @@
{ "show-frame-time", no_argument, NULL, 'f' },
{ "rotate", no_argument, NULL, 'r' },
{ "output-format", required_argument, NULL, 'o' },
+ { "codec-name", required_argument, NULL, 'N' },
{ "monotonic-time", no_argument, NULL, 'm' },
{ NULL, 0, NULL, 0 }
};
@@ -978,6 +999,9 @@
return 2;
}
break;
+ case 'N':
+ gCodecName = optarg;
+ break;
case 'm':
gMonotonicTime = true;
break;
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 2e8da4b..ea239c5 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -18,6 +18,7 @@
"PluginMetricsReporting.cpp",
"SharedLibrary.cpp",
"DrmHal.cpp",
+ "DrmMetrics.cpp",
"CryptoHal.cpp",
"protos/plugin_metrics.proto",
],
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index b9f3abb..7ab0bcd 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -30,6 +30,7 @@
#include <media/DrmHal.h>
#include <media/DrmSessionClientInterface.h>
#include <media/DrmSessionManager.h>
+#include <media/EventMetric.h>
#include <media/PluginMetricsReporting.h>
#include <media/drm/DrmAPI.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -53,6 +54,14 @@
using ::android::hidl::manager::V1_0::IServiceManager;
using ::android::sp;
+namespace {
+
+// This constant corresponds to the PROPERTY_DEVICE_UNIQUE_ID constant
+// in the MediaDrm API.
+constexpr char kPropertyDeviceUniqueId[] = "deviceUniqueId";
+
+}
+
namespace android {
#define INIT_CHECK() {if (mInitCheck != OK) return mInitCheck;}
@@ -227,7 +236,6 @@
DrmHal::DrmHal()
: mDrmSessionClient(new DrmSessionClient(this)),
mFactories(makeDrmFactories()),
- mOpenSessionCounter("/drm/mediadrm/open_session", "status"),
mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {
}
@@ -320,6 +328,7 @@
Return<void> DrmHal::sendEvent(EventType hEventType,
const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& data) {
+ mMetrics.mEventCounter.Increment(hEventType);
mEventLock.lock();
sp<IDrmClient> listener = mListener;
@@ -410,12 +419,21 @@
break;
}
obj.writeInt32(type);
+ mMetrics.mKeyStatusChangeCounter.Increment(keyStatus.type);
}
obj.writeInt32(hasNewUsableKey);
Mutex::Autolock lock(mNotifyLock);
listener->notify(DrmPlugin::kDrmPluginEventKeysChange, 0, &obj);
+ } else {
+ // There's no listener. But we still want to count the key change
+ // events.
+ size_t nKeys = keyStatusList.size();
+ for (size_t i = 0; i < nKeys; i++) {
+ mMetrics.mKeyStatusChangeCounter.Increment(keyStatusList[i].type);
+ }
}
+
return Void();
}
@@ -519,7 +537,7 @@
mOpenSessions.push(sessionId);
}
- mOpenSessionCounter.Increment(err);
+ mMetrics.mOpenSessionCounter.Increment(err);
return err;
}
@@ -539,8 +557,11 @@
}
}
reportMetrics();
- return toStatusT(status);
+ status_t response = toStatusT(status);
+ mMetrics.mCloseSessionCounter.Increment(response);
+ return response;
}
+ mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
return DEAD_OBJECT;
}
@@ -551,6 +572,7 @@
String8 &defaultUrl, DrmPlugin::KeyRequestType *keyRequestType) {
Mutex::Autolock autoLock(mLock);
INIT_CHECK();
+ EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTiming);
DrmSessionManager::Instance()->useSession(sessionId);
@@ -562,6 +584,7 @@
} else if (keyType == DrmPlugin::kKeyType_Release) {
hKeyType = KeyType::RELEASE;
} else {
+ keyRequestTimer.SetAttribute(BAD_VALUE);
return BAD_VALUE;
}
@@ -636,12 +659,16 @@
}
});
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ keyRequestTimer.SetAttribute(err);
+ return err;
}
status_t DrmHal::provideKeyResponse(Vector<uint8_t> const &sessionId,
Vector<uint8_t> const &response, Vector<uint8_t> &keySetId) {
Mutex::Autolock autoLock(mLock);
+ EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTiming);
+
INIT_CHECK();
DrmSessionManager::Instance()->useSession(sessionId);
@@ -657,8 +684,9 @@
err = toStatusT(status);
}
);
-
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ keyResponseTimer.SetAttribute(err);
+ return err;
}
status_t DrmHal::removeKeys(Vector<uint8_t> const &keySetId) {
@@ -722,7 +750,9 @@
}
);
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ mMetrics.mGetProvisionRequestCounter.Increment(err);
+ return err;
}
status_t DrmHal::provideProvisionResponse(Vector<uint8_t> const &response,
@@ -743,7 +773,9 @@
}
);
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ mMetrics.mProvideProvisionResponseCounter.Increment(err);
+ return err;
}
status_t DrmHal::getSecureStops(List<Vector<uint8_t>> &secureStops) {
@@ -965,7 +997,11 @@
}
);
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ if (name == kPropertyDeviceUniqueId) {
+ mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
+ }
+ return err;
}
status_t DrmHal::setPropertyString(String8 const &name, String8 const &value ) const {
@@ -987,26 +1023,12 @@
return toStatusT(status);
}
-status_t DrmHal::getMetrics(MediaAnalyticsItem* metrics) {
- // TODO: Move mOpenSessionCounter and suffixes to a separate class
- // that manages the collection of metrics and exporting them.
- std::string success_count_name =
- mOpenSessionCounter.metric_name() + "/ok/count";
- std::string error_count_name =
- mOpenSessionCounter.metric_name() + "/error/count";
- mOpenSessionCounter.ExportValues(
- [&] (status_t status, int64_t value) {
- if (status == OK) {
- metrics->setInt64(success_count_name.c_str(), value);
- } else {
- int64_t total_errors(0);
- metrics->getInt64(error_count_name.c_str(), &total_errors);
- metrics->setInt64(error_count_name.c_str(),
- total_errors + value);
- // TODO: Add support for exporting the list of error values.
- // This probably needs to be added to MediaAnalyticsItem.
- }
- });
+status_t DrmHal::getMetrics(MediaAnalyticsItem* item) {
+ if (item == nullptr) {
+ return UNEXPECTED_NULL;
+ }
+
+ mMetrics.Export(item);
return OK;
}
diff --git a/drm/libmediadrm/DrmMetrics.cpp b/drm/libmediadrm/DrmMetrics.cpp
new file mode 100644
index 0000000..68730b7
--- /dev/null
+++ b/drm/libmediadrm/DrmMetrics.cpp
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/macros.h>
+#include <media/DrmMetrics.h>
+
+using ::android::hardware::drm::V1_0::EventType;
+using ::android::hardware::drm::V1_0::KeyStatusType;
+
+namespace {
+
+template<typename T>
+std::string GetAttributeName(T type);
+
+template<>
+std::string GetAttributeName<KeyStatusType>(KeyStatusType type) {
+ static const char* type_names[] = {
+ "USABLE", "EXPIRED", "OUTPUT_NOT_ALLOWED",
+ "STATUS_PENDING", "INTERNAL_ERROR" };
+ if (((size_t) type) > arraysize(type_names)) {
+ return "UNKNOWN_TYPE";
+ }
+ return type_names[(size_t) type];
+}
+
+template<>
+std::string GetAttributeName<EventType>(EventType type) {
+ static const char* type_names[] = {
+ "PROVISION_REQUIRED", "KEY_NEEDED", "KEY_EXPIRED",
+ "VENDOR_DEFINED", "SESSION_RECLAIMED" };
+ if (((size_t) type) > arraysize(type_names)) {
+ return "UNKNOWN_TYPE";
+ }
+ return type_names[(size_t) type];
+}
+
+template<typename T>
+void ExportCounterMetric(const android::CounterMetric<T>& counter,
+ android::MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ std::string success_count_name = counter.metric_name() + "/ok/count";
+ std::string error_count_name = counter.metric_name() + "/error/count";
+ counter.ExportValues(
+ [&] (const android::status_t status, const int64_t value) {
+ if (status == android::OK) {
+ item->setInt64(success_count_name.c_str(), value);
+ } else {
+ int64_t total_errors(0);
+ item->getInt64(error_count_name.c_str(), &total_errors);
+ item->setInt64(error_count_name.c_str(), total_errors + value);
+ // TODO: Add support for exporting the list of error values.
+ // This probably needs to be added to MediaAnalyticsItem.
+ }
+ });
+}
+
+template<typename T>
+void ExportCounterMetricWithAttributeNames(
+ const android::CounterMetric<T>& counter,
+ android::MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ counter.ExportValues(
+ [&] (const T& attribute, const int64_t value) {
+ std::string name = counter.metric_name()
+ + "/" + GetAttributeName(attribute) + "/count";
+ item->setInt64(name.c_str(), value);
+ });
+}
+
+template<typename T>
+void ExportEventMetric(const android::EventMetric<T>& event,
+ android::MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ std::string success_count_name = event.metric_name() + "/ok/count";
+ std::string error_count_name = event.metric_name() + "/error/count";
+ std::string timing_name = event.metric_name() + "/ok/average_time_micros";
+ event.ExportValues(
+ [&] (const android::status_t& status,
+ const android::EventStatistics& value) {
+ if (status == android::OK) {
+ item->setInt64(success_count_name.c_str(), value.count);
+ item->setInt64(timing_name.c_str(), value.mean);
+ } else {
+ int64_t total_errors(0);
+ item->getInt64(error_count_name.c_str(), &total_errors);
+ item->setInt64(error_count_name.c_str(),
+ total_errors + value.count);
+ // TODO: Add support for exporting the list of error values.
+ // This probably needs to be added to MediaAnalyticsItem.
+ }
+ });
+}
+
+} // namespace anonymous
+
+namespace android {
+
+MediaDrmMetrics::MediaDrmMetrics()
+ : mOpenSessionCounter("/drm/mediadrm/open_session", "status"),
+ mCloseSessionCounter("/drm/mediadrm/close_session", "status"),
+ mGetKeyRequestTiming("/drm/mediadrm/get_key_request", "status"),
+ mProvideKeyResponseTiming("/drm/mediadrm/provide_key_response", "status"),
+ mGetProvisionRequestCounter(
+ "/drm/mediadrm/get_provision_request", "status"),
+ mProvideProvisionResponseCounter(
+ "/drm/mediadrm/provide_provision_response", "status"),
+ mKeyStatusChangeCounter(
+ "/drm/mediadrm/key_status_change", "key_status_type"),
+ mEventCounter("/drm/mediadrm/event", "event_type"),
+ mGetDeviceUniqueIdCounter(
+ "/drm/mediadrm/get_device_unique_id", "status") {
+}
+
+void MediaDrmMetrics::Export(MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ ExportCounterMetric(mOpenSessionCounter, item);
+ ExportCounterMetric(mCloseSessionCounter, item);
+ ExportEventMetric(mGetKeyRequestTiming, item);
+ ExportEventMetric(mProvideKeyResponseTiming, item);
+ ExportCounterMetric(mGetProvisionRequestCounter, item);
+ ExportCounterMetric(mProvideProvisionResponseCounter, item);
+ ExportCounterMetricWithAttributeNames(mKeyStatusChangeCounter, item);
+ ExportCounterMetricWithAttributeNames(mEventCounter, item);
+ ExportCounterMetric(mGetDeviceUniqueIdCounter, item);
+}
+
+} // namespace android
diff --git a/drm/libmediadrm/tests/Android.bp b/drm/libmediadrm/tests/Android.bp
index 674d3eb..fdc982d 100644
--- a/drm/libmediadrm/tests/Android.bp
+++ b/drm/libmediadrm/tests/Android.bp
@@ -10,3 +10,35 @@
"-Wall",
],
}
+
+cc_test {
+ name: "DrmMetrics_test",
+ srcs: ["DrmMetrics_test.cpp"],
+ shared_libs: [
+ "android.hardware.drm@1.0",
+ "liblog",
+ "libmediadrm",
+ "libmediametrics",
+ "libutils",
+ ],
+ include_dirs: ["frameworks/av/include/media"],
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
+
+cc_test {
+ name: "EventMetric_test",
+ srcs: ["EventMetric_test.cpp"],
+ shared_libs: [
+ "liblog",
+ "libmediadrm",
+ "libutils",
+ ],
+ include_dirs: ["frameworks/av/include/media"],
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
diff --git a/drm/libmediadrm/tests/DrmMetrics_test.cpp b/drm/libmediadrm/tests/DrmMetrics_test.cpp
new file mode 100644
index 0000000..87e9752
--- /dev/null
+++ b/drm/libmediadrm/tests/DrmMetrics_test.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "DrmMetrics.h"
+
+using ::android::hardware::drm::V1_0::EventType;
+using ::android::hardware::drm::V1_0::KeyStatusType;
+
+namespace android {
+
+/**
+ * Unit tests for the MediaDrmMetrics class.
+ */
+class MediaDrmMetricsTest : public ::testing::Test {
+};
+
+TEST_F(MediaDrmMetricsTest, EmptySuccess) {
+ MediaDrmMetrics metrics;
+ MediaAnalyticsItem item;
+
+ metrics.Export(&item);
+ EXPECT_EQ(0, item.count());
+}
+
+TEST_F(MediaDrmMetricsTest, AllValuesSuccessCounts) {
+ MediaDrmMetrics metrics;
+
+ metrics.mOpenSessionCounter.Increment(OK);
+ metrics.mCloseSessionCounter.Increment(OK);
+
+ {
+ EventTimer<status_t> get_key_request_timer(&metrics.mGetKeyRequestTiming);
+ EventTimer<status_t> provide_key_response_timer(
+ &metrics.mProvideKeyResponseTiming);
+ get_key_request_timer.SetAttribute(OK);
+ provide_key_response_timer.SetAttribute(OK);
+ }
+
+ metrics.mGetProvisionRequestCounter.Increment(OK);
+ metrics.mProvideProvisionResponseCounter.Increment(OK);
+ metrics.mGetDeviceUniqueIdCounter.Increment(OK);
+
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
+ metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
+
+ MediaAnalyticsItem item;
+
+ metrics.Export(&item);
+ EXPECT_EQ(11, item.count());
+
+ // Verify the list of pairs of int64 metrics.
+ std::vector<std::pair<std::string, int64_t>> expected_values = {
+ { "/drm/mediadrm/open_session/ok/count", 1 },
+ { "/drm/mediadrm/close_session/ok/count", 1 },
+ { "/drm/mediadrm/get_key_request/ok/count", 1 },
+ { "/drm/mediadrm/provide_key_response/ok/count", 1 },
+ { "/drm/mediadrm/get_provision_request/ok/count", 1 },
+ { "/drm/mediadrm/provide_provision_response/ok/count", 1 },
+ { "/drm/mediadrm/key_status_change/USABLE/count", 1 },
+ { "/drm/mediadrm/event/PROVISION_REQUIRED/count", 1 },
+ { "/drm/mediadrm/get_device_unique_id/ok/count", 1 }};
+ for (const auto& expected_pair : expected_values) {
+ int64_t value = -1;
+ EXPECT_TRUE(item.getInt64(expected_pair.first.c_str(), &value))
+ << "Failed to get " << expected_pair.first;
+ EXPECT_EQ(expected_pair.second, value)
+ << "Unexpected value for " << expected_pair.first;
+ }
+
+ // Validate timing values exist.
+ int64_t value = -1;
+ EXPECT_TRUE(
+ item.getInt64("/drm/mediadrm/get_key_request/ok/average_time_micros",
+ &value));
+ EXPECT_GE(value, 0);
+
+ value = -1;
+ EXPECT_TRUE(
+ item.getInt64("/drm/mediadrm/provide_key_response/ok/average_time_micros",
+ &value));
+ EXPECT_GE(value, 0);
+}
+
+TEST_F(MediaDrmMetricsTest, AllValuesFull) {
+ MediaDrmMetrics metrics;
+
+ metrics.mOpenSessionCounter.Increment(OK);
+ metrics.mOpenSessionCounter.Increment(UNEXPECTED_NULL);
+
+ metrics.mCloseSessionCounter.Increment(OK);
+ metrics.mCloseSessionCounter.Increment(UNEXPECTED_NULL);
+
+ for (status_t s : {OK, UNEXPECTED_NULL}) {
+ {
+ EventTimer<status_t> get_key_request_timer(&metrics.mGetKeyRequestTiming);
+ EventTimer<status_t> provide_key_response_timer(
+ &metrics.mProvideKeyResponseTiming);
+ get_key_request_timer.SetAttribute(s);
+ provide_key_response_timer.SetAttribute(s);
+ }
+ }
+
+ metrics.mGetProvisionRequestCounter.Increment(OK);
+ metrics.mGetProvisionRequestCounter.Increment(UNEXPECTED_NULL);
+ metrics.mProvideProvisionResponseCounter.Increment(OK);
+ metrics.mProvideProvisionResponseCounter.Increment(UNEXPECTED_NULL);
+ metrics.mGetDeviceUniqueIdCounter.Increment(OK);
+ metrics.mGetDeviceUniqueIdCounter.Increment(UNEXPECTED_NULL);
+
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::EXPIRED);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::OUTPUTNOTALLOWED);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::STATUSPENDING);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::INTERNALERROR);
+ metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
+ metrics.mEventCounter.Increment(EventType::KEY_NEEDED);
+ metrics.mEventCounter.Increment(EventType::KEY_EXPIRED);
+ metrics.mEventCounter.Increment(EventType::VENDOR_DEFINED);
+ metrics.mEventCounter.Increment(EventType::SESSION_RECLAIMED);
+
+ MediaAnalyticsItem item;
+
+ metrics.Export(&item);
+ EXPECT_EQ(26, item.count());
+
+ // Verify the list of pairs of int64 metrics.
+ std::vector<std::pair<std::string, int64_t>> expected_values = {
+ { "/drm/mediadrm/open_session/ok/count", 1 },
+ { "/drm/mediadrm/close_session/ok/count", 1 },
+ { "/drm/mediadrm/get_key_request/ok/count", 1 },
+ { "/drm/mediadrm/provide_key_response/ok/count", 1 },
+ { "/drm/mediadrm/get_provision_request/ok/count", 1 },
+ { "/drm/mediadrm/provide_provision_response/ok/count", 1 },
+ { "/drm/mediadrm/get_device_unique_id/ok/count", 1 },
+ { "/drm/mediadrm/open_session/error/count", 1 },
+ { "/drm/mediadrm/close_session/error/count", 1 },
+ { "/drm/mediadrm/get_key_request/error/count", 1 },
+ { "/drm/mediadrm/provide_key_response/error/count", 1 },
+ { "/drm/mediadrm/get_provision_request/error/count", 1 },
+ { "/drm/mediadrm/provide_provision_response/error/count", 1 },
+ { "/drm/mediadrm/get_device_unique_id/error/count", 1 },
+ { "/drm/mediadrm/key_status_change/USABLE/count", 1 },
+ { "/drm/mediadrm/key_status_change/EXPIRED/count", 1 },
+ { "/drm/mediadrm/key_status_change/OUTPUT_NOT_ALLOWED/count", 1 },
+ { "/drm/mediadrm/key_status_change/STATUS_PENDING/count", 1 },
+ { "/drm/mediadrm/key_status_change/INTERNAL_ERROR/count", 1 },
+ { "/drm/mediadrm/event/PROVISION_REQUIRED/count", 1 },
+ { "/drm/mediadrm/event/KEY_NEEDED/count", 1 },
+ { "/drm/mediadrm/event/KEY_EXPIRED/count", 1 },
+ { "/drm/mediadrm/event/VENDOR_DEFINED/count", 1 },
+ { "/drm/mediadrm/event/SESSION_RECLAIMED/count", 1 }};
+ for (const auto& expected_pair : expected_values) {
+ int64_t value = -1;
+ EXPECT_TRUE(item.getInt64(expected_pair.first.c_str(), &value))
+ << "Failed to get " << expected_pair.first;
+ EXPECT_EQ(expected_pair.second, value)
+ << "Unexpected value for " << expected_pair.first;
+ }
+
+ // Validate timing values exist.
+ int64_t value = -1;
+ std::string name = metrics.mGetKeyRequestTiming.metric_name()
+ + "/ok/average_time_micros";
+ EXPECT_TRUE(item.getInt64(name.c_str(), &value));
+ EXPECT_GE(value, 0);
+
+ value = -1;
+ name = metrics.mProvideKeyResponseTiming.metric_name()
+ + "/ok/average_time_micros";
+ EXPECT_TRUE(item.getInt64(name.c_str(), &value));
+ EXPECT_GE(value, 0);
+}
+
+
+
+} // namespace android
diff --git a/drm/libmediadrm/tests/EventMetric_test.cpp b/drm/libmediadrm/tests/EventMetric_test.cpp
new file mode 100644
index 0000000..eb6c4f6
--- /dev/null
+++ b/drm/libmediadrm/tests/EventMetric_test.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "EventMetric.h"
+
+namespace android {
+
+/**
+ * Unit tests for the EventMetric class.
+ */
+
+TEST(EventMetricTest, IntDataTypeEmpty) {
+ EventMetric<int> metric("MyMetricName", "MetricAttributeName");
+
+ std::map<int, EventStatistics> values;
+
+ metric.ExportValues(
+ [&] (int attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ EXPECT_TRUE(values.empty());
+}
+
+TEST(EventMetricTest, IntDataType) {
+ EventMetric<int> metric("MyMetricName", "MetricAttributeName");
+
+ std::map<int, EventStatistics> values;
+
+ metric.Record(4, 7);
+ metric.Record(5, 8);
+ metric.Record(5, 8);
+ metric.Record(5, 8);
+ metric.Record(6, 8);
+ metric.Record(6, 8);
+ metric.Record(6, 8);
+
+ metric.ExportValues(
+ [&] (int attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ ASSERT_EQ(2u, values.size());
+ EXPECT_EQ(4, values[7].min);
+ EXPECT_EQ(4, values[7].max);
+ EXPECT_EQ(4, values[7].mean);
+ EXPECT_EQ(1, values[7].count);
+
+ EXPECT_EQ(5, values[8].min);
+ EXPECT_EQ(6, values[8].max);
+ // This is an approximate value because of the technique we're using.
+ EXPECT_NEAR(5.5, values[8].mean, 0.2);
+ EXPECT_EQ(6, values[8].count);
+}
+
+TEST(EventMetricTest, StringDataType) {
+ EventMetric<std::string> metric("MyMetricName", "MetricAttributeName");
+
+ std::map<std::string, EventStatistics> values;
+
+ metric.Record(1, "a");
+ metric.Record(2, "b");
+ metric.Record(2, "b");
+ metric.Record(3, "b");
+ metric.Record(3, "b");
+
+ metric.ExportValues(
+ [&] (std::string attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ ASSERT_EQ(2u, values.size());
+ EXPECT_EQ(1, values["a"].min);
+ EXPECT_EQ(1, values["a"].max);
+ EXPECT_EQ(1, values["a"].mean);
+ EXPECT_EQ(1, values["a"].count);
+
+ EXPECT_EQ(2, values["b"].min);
+ EXPECT_EQ(3, values["b"].max);
+ EXPECT_NEAR(2.5, values["b"].mean, 0.2);
+ EXPECT_EQ(4, values["b"].count);
+}
+
+// Helper class that allows us to mock the clock.
+template<typename AttributeType>
+class MockEventTimer : public EventTimer<AttributeType> {
+ public:
+ explicit MockEventTimer(nsecs_t time_delta_ns,
+ EventMetric<AttributeType>* metric)
+ : EventTimer<AttributeType>(metric) {
+ // Pretend the event started earlier.
+ this->start_time_ = systemTime() - time_delta_ns;
+ }
+};
+
+TEST(EventTimerTest, IntDataType) {
+ EventMetric<int> metric("MyMetricName", "MetricAttributeName");
+
+ for (int i = 0; i < 5; i++) {
+ {
+ // Add a mock time delta.
+ MockEventTimer<int> metric_timer(i * 1000000, &metric);
+ metric_timer.SetAttribute(i % 2);
+ }
+ }
+
+ std::map<int, EventStatistics> values;
+ metric.ExportValues(
+ [&] (int attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ ASSERT_EQ(2u, values.size());
+ EXPECT_LT(values[0].min, values[0].max);
+ EXPECT_GE(4000, values[0].max);
+ EXPECT_GT(values[0].mean, values[0].min);
+ EXPECT_LE(values[0].mean, values[0].max);
+ EXPECT_EQ(3, values[0].count);
+
+ EXPECT_LT(values[1].min, values[1].max);
+ EXPECT_GE(3000, values[1].max);
+ EXPECT_GT(values[1].mean, values[1].min);
+ EXPECT_LE(values[1].mean, values[1].max);
+ EXPECT_EQ(2, values[1].count);
+}
+
+} // namespace android
diff --git a/include/media/DrmMetrics.h b/include/media/DrmMetrics.h
new file mode 120000
index 0000000..abc966b
--- /dev/null
+++ b/include/media/DrmMetrics.h
@@ -0,0 +1 @@
+../../media/libmedia/include/media/DrmMetrics.h
\ No newline at end of file
diff --git a/include/media/EventMetric.h b/include/media/EventMetric.h
new file mode 120000
index 0000000..5707d9a
--- /dev/null
+++ b/include/media/EventMetric.h
@@ -0,0 +1 @@
+../../media/libmedia/include/media/EventMetric.h
\ No newline at end of file
diff --git a/media/libmedia/include/media/CounterMetric.h b/media/libmedia/include/media/CounterMetric.h
index f39ca7c..b53470d 100644
--- a/media/libmedia/include/media/CounterMetric.h
+++ b/media/libmedia/include/media/CounterMetric.h
@@ -16,6 +16,10 @@
#ifndef ANDROID_COUNTER_METRIC_H_
#define ANDROID_COUNTER_METRIC_H_
+#include <functional>
+#include <map>
+#include <string>
+
#include <media/MediaAnalyticsItem.h>
#include <utils/Log.h>
@@ -71,13 +75,13 @@
// of Attribute.
void ExportValues(
std::function<void (const AttributeType&,
- const int64_t count)> function) {
+ const int64_t count)> function) const {
for (auto it = values_.begin(); it != values_.end(); it++) {
function(it->first, it->second);
}
}
- const std::string& metric_name() { return metric_name_; };
+ const std::string& metric_name() const { return metric_name_; };
private:
const std::string metric_name_;
diff --git a/media/libmedia/include/media/DrmHal.h b/media/libmedia/include/media/DrmHal.h
index f2b25cd..c18d845 100644
--- a/media/libmedia/include/media/DrmHal.h
+++ b/media/libmedia/include/media/DrmHal.h
@@ -23,7 +23,7 @@
#include <android/hardware/drm/1.0/IDrmPluginListener.h>
#include <android/hardware/drm/1.0/IDrmFactory.h>
-#include <media/CounterMetric.h>
+#include <media/DrmMetrics.h>
#include <media/IDrm.h>
#include <media/IDrmClient.h>
#include <media/MediaAnalyticsItem.h>
@@ -181,7 +181,8 @@
sp<IDrmPlugin> mPlugin;
sp<drm::V1_1::IDrmPlugin> mPluginV1_1;
- CounterMetric<status_t> mOpenSessionCounter;
+ // Mutable to allow modification within GetPropertyByteArray.
+ mutable MediaDrmMetrics mMetrics;
Vector<Vector<uint8_t>> mOpenSessions;
void closeOpenSessions();
diff --git a/media/libmedia/include/media/DrmMetrics.h b/media/libmedia/include/media/DrmMetrics.h
new file mode 100644
index 0000000..bb7509b
--- /dev/null
+++ b/media/libmedia/include/media/DrmMetrics.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_METRICS_H_
+#define DRM_METRICS_H_
+
+#include <map>
+
+#include <android/hardware/drm/1.0/types.h>
+#include <media/CounterMetric.h>
+#include <media/EventMetric.h>
+
+namespace android {
+
+/**
+ * This class contains the definition of metrics captured within MediaDrm.
+ * It also contains a method for exporting all of the metrics to a
+ * MediaAnalyticsItem instance.
+ */
+class MediaDrmMetrics {
+ public:
+ explicit MediaDrmMetrics();
+ // Count of openSession calls.
+ CounterMetric<status_t> mOpenSessionCounter;
+ // Count of closeSession calls.
+ CounterMetric<status_t> mCloseSessionCounter;
+ // Count and timing of getKeyRequest calls.
+ EventMetric<status_t> mGetKeyRequestTiming;
+ // Count and timing of provideKeyResponse calls.
+ EventMetric<status_t> mProvideKeyResponseTiming;
+ // Count of getProvisionRequest calls.
+ CounterMetric<status_t> mGetProvisionRequestCounter;
+ // Count of provideProvisionResponse calls.
+ CounterMetric<status_t> mProvideProvisionResponseCounter;
+
+ // Count of key status events broken out by status type.
+ CounterMetric<::android::hardware::drm::V1_0::KeyStatusType>
+ mKeyStatusChangeCounter;
+ // Count of events broken out by event type
+ CounterMetric<::android::hardware::drm::V1_0::EventType> mEventCounter;
+
+ // Count getPropertyByteArray calls to retrieve the device unique id.
+ CounterMetric<status_t> mGetDeviceUniqueIdCounter;
+
+ // TODO: Add session start and end time support. These are a special case.
+
+ // Export the metrics to a MediaAnalyticsItem.
+ void Export(MediaAnalyticsItem* item);
+};
+
+} // namespace android
+
+#endif // DRM_METRICS_H_
diff --git a/media/libmedia/include/media/EventMetric.h b/media/libmedia/include/media/EventMetric.h
new file mode 100644
index 0000000..dbb736a
--- /dev/null
+++ b/media/libmedia/include/media/EventMetric.h
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_EVENT_METRIC_H_
+#define ANDROID_EVENT_METRIC_H_
+
+#include <media/MediaAnalyticsItem.h>
+#include <utils/Timers.h>
+
+namespace android {
+
+// This is a simple holder for the statistics recorded in EventMetric.
+struct EventStatistics {
+ // The count of times the event occurred.
+ int64_t count;
+
+ // The minimum and maximum values recorded in the Record method.
+ double min;
+ double max;
+
+ // The average (mean) of all values recorded.
+ double mean;
+ // The sum of squared devation. Variance can be calculated from
+ // this value.
+ // var = sum_squared_deviation / count;
+ double sum_squared_deviation;
+};
+
+// The EventMetric class is used to accumulate stats about an event over time.
+// A common use case is to track clock timings for a method call or operation.
+// An EventMetric can break down stats by a dimension specified by the
+// application. E.g. an application may want to track counts broken out by
+// error code or the size of some parameter.
+//
+// Example:
+//
+// struct C {
+// status_t DoWork() {
+// unsigned long start_time = now();
+// status_t result;
+//
+// // DO WORK and determine result;
+//
+// work_event_.Record(now() - start_time, result);
+//
+// return result;
+// }
+// EventMetric<status_t> work_event_;
+// };
+//
+// C c;
+// c.DoWork();
+//
+// std::map<int, int64_t> values;
+// metric.ExportValues(
+// [&] (int attribute_value, int64_t value) {
+// values[attribute_value] = value;
+// });
+// // Do something with the exported stat.
+//
+template<typename AttributeType>
+class EventMetric {
+ public:
+ // Instantiate the counter with the given metric name and
+ // attribute names. |attribute_names| must not be null.
+ EventMetric(
+ const std::string& metric_name,
+ const std::string& attribute_name)
+ : metric_name_(metric_name),
+ attribute_name_(attribute_name) {}
+
+ // Increment the count of times the operation occurred with this
+ // combination of attributes.
+ void Record(double value, AttributeType attribute) {
+ if (values_.find(attribute) != values_.end()) {
+ EventStatistics* stats = values_[attribute].get();
+ // Using method of provisional means.
+ double deviation = value - stats->mean;
+ stats->mean = stats->mean + (deviation / stats->count);
+ stats->sum_squared_deviation =
+ stats->sum_squared_deviation + (deviation * (value - stats->mean));
+ stats->count++;
+
+ stats->min = stats->min < value ? stats->min : value;
+ stats->max = stats->max > value ? stats->max : value;
+ } else {
+ std::unique_ptr<EventStatistics> stats =
+ std::make_unique<EventStatistics>();
+ stats->count = 1;
+ stats->min = value;
+ stats->max = value;
+ stats->mean = value;
+ stats->sum_squared_deviation = 0;
+ values_[attribute] = std::move(stats);
+ }
+ };
+
+ // Export the metrics to the provided |function|. Each value for Attribute
+ // has a separate set of stats. As such, |function| will be called once per
+ // value of Attribute.
+ void ExportValues(
+ std::function<void (const AttributeType&,
+ const EventStatistics&)> function) const {
+ for (auto it = values_.begin(); it != values_.end(); it++) {
+ function(it->first, *(it->second));
+ }
+ }
+
+ const std::string& metric_name() const { return metric_name_; };
+
+ private:
+ const std::string metric_name_;
+ const std::string attribute_name_;
+ std::map<AttributeType, std::unique_ptr<struct EventStatistics>> values_;
+};
+
+// The EventTimer is a supporting class for EventMetric instances that are used
+// to time methods. The EventTimer starts a timer when first in scope, and
+// records the timing when exiting scope.
+//
+// Example:
+//
+// EventMetric<int> my_metric;
+//
+// {
+// EventTimer<int> my_timer(&my_metric);
+// // Set the attribute to associate with this timing.
+// my_timer.SetAttribtue(42);
+//
+// // Do some work that you want to time.
+//
+// } // The EventTimer destructor will record the the timing in my_metric;
+//
+template<typename AttributeType>
+class EventTimer {
+ public:
+ explicit EventTimer(EventMetric<AttributeType>* metric)
+ :start_time_(systemTime()), metric_(metric) {
+ }
+
+ virtual ~EventTimer() {
+ if (metric_) {
+ metric_->Record(ns2us(systemTime() - start_time_), attribute_);
+ }
+ }
+
+ // Set the attribute to associate with this timing. E.g. this can be used to
+ // record the return code from the work that was timed.
+ void SetAttribute(const AttributeType& attribute) {
+ attribute_ = attribute;
+ }
+
+ protected:
+ // Visible for testing only.
+ nsecs_t start_time_;
+
+ private:
+ EventMetric<AttributeType>* metric_;
+ AttributeType attribute_;
+};
+
+} // namespace android
+
+#endif // ANDROID_EVENT_METRIC_H_
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 0ded5be..541093a 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -126,6 +126,32 @@
}
}
+static OMX_VIDEO_CONTROLRATETYPE getVideoBitrateMode(const sp<AMessage> &msg) {
+ int32_t tmp;
+ if (msg->findInt32("bitrate-mode", &tmp)) {
+ // explicitly translate from MediaCodecInfo.EncoderCapabilities.
+ // BITRATE_MODE_* into OMX bitrate mode.
+ switch (tmp) {
+ //BITRATE_MODE_CQ
+ case 0: return OMX_Video_ControlRateConstantQuality;
+ //BITRATE_MODE_VBR
+ case 1: return OMX_Video_ControlRateVariable;
+ //BITRATE_MODE_CBR
+ case 2: return OMX_Video_ControlRateConstant;
+ default: break;
+ }
+ }
+ return OMX_Video_ControlRateVariable;
+}
+
+static bool findVideoBitrateControlInfo(const sp<AMessage> &msg,
+ OMX_VIDEO_CONTROLRATETYPE *mode, int32_t *bitrate, int32_t *quality) {
+ *mode = getVideoBitrateMode(msg);
+ bool isCQ = (*mode == OMX_Video_ControlRateConstantQuality);
+ return (!isCQ && msg->findInt32("bitrate", bitrate))
+ || (isCQ && msg->findInt32("quality", quality));
+}
+
struct MessageList : public RefBase {
MessageList() {
}
@@ -1686,6 +1712,7 @@
mConfigFormat = msg;
mIsEncoder = encoder;
+ mIsVideo = !strncasecmp(mime, "video/", 6);
mPortMode[kPortIndexInput] = IOMX::kPortModePresetByteBuffer;
mPortMode[kPortIndexOutput] = IOMX::kPortModePresetByteBuffer;
@@ -1696,19 +1723,26 @@
return err;
}
- int32_t bitRate = 0;
- // FLAC encoder doesn't need a bitrate, other encoders do
- if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
- && !msg->findInt32("bitrate", &bitRate)) {
- return INVALID_OPERATION;
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode;
+ int32_t bitrate = 0, quality;
+ // FLAC encoder or video encoder in constant quality mode doesn't need a
+ // bitrate, other encoders do.
+ if (encoder) {
+ if (mIsVideo && !findVideoBitrateControlInfo(
+ msg, &bitrateMode, &bitrate, &quality)) {
+ return INVALID_OPERATION;
+ } else if (!mIsVideo && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
+ && !msg->findInt32("bitrate", &bitrate)) {
+ return INVALID_OPERATION;
+ }
}
// propagate bitrate to the output so that the muxer has it
- if (encoder && msg->findInt32("bitrate", &bitRate)) {
+ if (encoder && msg->findInt32("bitrate", &bitrate)) {
// Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
// average bitrate. We've been setting both bitrate and max-bitrate to this same value.
- outputFormat->setInt32("bitrate", bitRate);
- outputFormat->setInt32("max-bitrate", bitRate);
+ outputFormat->setInt32("bitrate", bitrate);
+ outputFormat->setInt32("max-bitrate", bitrate);
}
int32_t storeMeta;
@@ -1765,9 +1799,7 @@
// Only enable metadata mode on encoder output if encoder can prepend
// sps/pps to idr frames, since in metadata mode the bitstream is in an
// opaque handle, to which we don't have access.
- int32_t video = !strncasecmp(mime, "video/", 6);
- mIsVideo = video;
- if (encoder && video) {
+ if (encoder && mIsVideo) {
OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
&& msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta)
&& storeMeta != 0);
@@ -1813,9 +1845,9 @@
// NOTE: we only use native window for video decoders
sp<RefBase> obj;
bool haveNativeWindow = msg->findObject("native-window", &obj)
- && obj != NULL && video && !encoder;
+ && obj != NULL && mIsVideo && !encoder;
mUsingNativeWindow = haveNativeWindow;
- if (video && !encoder) {
+ if (mIsVideo && !encoder) {
inputFormat->setInt32("adaptive-playback", false);
int32_t usageProtected;
@@ -1978,7 +2010,7 @@
(void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
// invalid encodings will default to PCM-16bit in setupRawAudioFormat.
- if (video) {
+ if (mIsVideo) {
// determine need for software renderer
bool usingSwRenderer = false;
if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) {
@@ -2112,14 +2144,14 @@
}
err = setupAACCodec(
- encoder, numChannels, sampleRate, bitRate, aacProfile,
+ encoder, numChannels, sampleRate, bitrate, aacProfile,
isADTS != 0, sbrMode, maxOutputChannelCount, drc,
pcmLimiterEnable);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
- err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
+ err = setupAMRCodec(encoder, false /* isWAMR */, bitrate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
- err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
+ err = setupAMRCodec(encoder, true /* isWAMR */, bitrate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
// These are PCM-like formats with a fixed sample rate but
@@ -2233,7 +2265,7 @@
rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
}
if (rateFloat > 0) {
- err = setOperatingRate(rateFloat, video);
+ err = setOperatingRate(rateFloat, mIsVideo);
err = OK; // ignore errors
}
@@ -2258,7 +2290,7 @@
}
// create data converters if needed
- if (!video && err == OK) {
+ if (!mIsVideo && err == OK) {
AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
if (encoder) {
(void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
@@ -3709,10 +3741,12 @@
return err;
}
- int32_t width, height, bitrate;
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode;
+ int32_t width, height, bitrate = 0, quality;
if (!msg->findInt32("width", &width)
|| !msg->findInt32("height", &height)
- || !msg->findInt32("bitrate", &bitrate)) {
+ || !findVideoBitrateControlInfo(
+ msg, &bitrateMode, &bitrate, &quality)) {
return INVALID_OPERATION;
}
@@ -3965,15 +3999,6 @@
return ret > 0 ? ret - 1 : 0;
}
-static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) {
- int32_t tmp;
- if (!msg->findInt32("bitrate-mode", &tmp)) {
- return OMX_Video_ControlRateVariable;
- }
-
- return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp);
-}
-
status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
int32_t bitrate;
float iFrameInterval;
@@ -3982,7 +4007,7 @@
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4047,7 +4072,7 @@
return err;
}
- err = configureBitrate(bitrate, bitrateMode);
+ err = configureBitrate(bitrateMode, bitrate);
if (err != OK) {
return err;
@@ -4064,7 +4089,7 @@
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4124,7 +4149,7 @@
return err;
}
- err = configureBitrate(bitrate, bitrateMode);
+ err = configureBitrate(bitrateMode, bitrate);
if (err != OK) {
return err;
@@ -4194,7 +4219,7 @@
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4350,18 +4375,20 @@
}
}
- return configureBitrate(bitrate, bitrateMode);
+ return configureBitrate(bitrateMode, bitrate);
}
status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) {
- int32_t bitrate;
float iFrameInterval;
- if (!msg->findInt32("bitrate", &bitrate)
- || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
+ if (!msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode;
+ int32_t bitrate, quality;
+ if (!findVideoBitrateControlInfo(msg, &bitrateMode, &bitrate, &quality)) {
+ return INVALID_OPERATION;
+ }
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4407,7 +4434,7 @@
return err;
}
- return configureBitrate(bitrate, bitrateMode);
+ return configureBitrate(bitrateMode, bitrate, quality);
}
status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) {
@@ -4428,7 +4455,7 @@
}
msg->findAsFloat("i-frame-interval", &iFrameInterval);
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4505,7 +4532,7 @@
}
}
- return configureBitrate(bitrate, bitrateMode);
+ return configureBitrate(bitrateMode, bitrate);
}
status_t ACodec::verifySupportForProfileAndLevel(
@@ -4541,7 +4568,7 @@
}
status_t ACodec::configureBitrate(
- int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) {
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode, int32_t bitrate, int32_t quality) {
OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
InitOMXParams(&bitrateType);
bitrateType.nPortIndex = kPortIndexOutput;
@@ -4554,7 +4581,13 @@
}
bitrateType.eControlRate = bitrateMode;
- bitrateType.nTargetBitrate = bitrate;
+
+ // write it out explicitly even if it's a union
+ if (bitrateMode == OMX_Video_ControlRateConstantQuality) {
+ bitrateType.nQualityFactor = quality;
+ } else {
+ bitrateType.nTargetBitrate = bitrate;
+ }
return mOMXNode->setParameter(
OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType));
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 01f73a1..90f6ed7 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -105,6 +105,7 @@
"libmedia_helper",
"libstagefright_codec2",
"libstagefright_foundation",
+ "libstagefright_gbs",
"libstagefright_omx",
"libstagefright_omx_utils",
"libstagefright_xmlparser",
diff --git a/media/libstagefright/CCodec.cpp b/media/libstagefright/CCodec.cpp
index 0103abd..b058f37 100644
--- a/media/libstagefright/CCodec.cpp
+++ b/media/libstagefright/CCodec.cpp
@@ -23,7 +23,9 @@
#include <C2PlatformSupport.h>
#include <gui/Surface.h>
+#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/CCodec.h>
+#include <media/stagefright/PersistentSurface.h>
#include "include/CCodecBufferChannel.h"
@@ -111,33 +113,35 @@
class CCodecListener : public C2Component::Listener {
public:
- CCodecListener(const std::shared_ptr<CCodecBufferChannel> &channel)
- : mChannel(channel) {
- }
+ explicit CCodecListener(const wp<CCodec> &codec) : mCodec(codec) {}
virtual void onWorkDone_nb(
std::weak_ptr<C2Component> component,
std::vector<std::unique_ptr<C2Work>> workItems) override {
- (void) component;
- mChannel->onWorkDone(std::move(workItems));
+ (void)component;
+ sp<CCodec> codec(mCodec.promote());
+ if (!codec) {
+ return;
+ }
+ codec->onWorkDone(workItems);
}
virtual void onTripped_nb(
std::weak_ptr<C2Component> component,
std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
// TODO
- (void) component;
- (void) settingResult;
+ (void)component;
+ (void)settingResult;
}
virtual void onError_nb(std::weak_ptr<C2Component> component, uint32_t errorCode) override {
// TODO
- (void) component;
- (void) errorCode;
+ (void)component;
+ (void)errorCode;
}
private:
- std::shared_ptr<CCodecBufferChannel> mChannel;
+ wp<CCodec> mCodec;
};
} // namespace
@@ -159,11 +163,11 @@
void CCodec::initiateAllocateComponent(const sp<AMessage> &msg) {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != RELEASED) {
+ if (state->get() != RELEASED) {
mCallback->onError(INVALID_OPERATION, ACTION_CODE_FATAL);
return;
}
- state->mState = ALLOCATING;
+ state->set(ALLOCATING);
}
AString componentName;
@@ -178,14 +182,14 @@
void CCodec::allocate(const AString &componentName) {
// TODO: use C2ComponentStore to create component
- mListener.reset(new CCodecListener(mChannel));
+ mListener.reset(new CCodecListener(this));
std::shared_ptr<C2Component> comp;
c2_status_t err = GetCodec2PlatformComponentStore()->createComponent(
componentName.c_str(), &comp);
if (err != C2_OK) {
Mutexed<State>::Locked state(mState);
- state->mState = RELEASED;
+ state->set(RELEASED);
state.unlock();
mCallback->onError(err, ACTION_CODE_FATAL);
state.lock();
@@ -194,15 +198,15 @@
comp->setListener_vb(mListener, C2_MAY_BLOCK);
{
Mutexed<State>::Locked state(mState);
- if (state->mState != ALLOCATING) {
- state->mState = RELEASED;
+ if (state->get() != ALLOCATING) {
+ state->set(RELEASED);
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = ALLOCATED;
- state->mComp = comp;
+ state->set(ALLOCATED);
+ state->comp = comp;
}
mChannel->setComponent(comp);
mCallback->onComponentAllocated(comp->intf()->getName().c_str());
@@ -211,7 +215,7 @@
void CCodec::initiateConfigureComponent(const sp<AMessage> &format) {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != ALLOCATED) {
+ if (state->get() != ALLOCATED) {
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
return;
}
@@ -252,6 +256,9 @@
inputFormat->setInt32("sample-rate", 44100);
outputFormat->setInt32("channel-count", 1);
outputFormat->setInt32("sample-rate", 44100);
+ } else {
+ outputFormat->setInt32("width", 1080);
+ outputFormat->setInt32("height", 1920);
}
} else {
inputFormat->setString("mime", mime);
@@ -272,32 +279,81 @@
{
Mutexed<Formats>::Locked formats(mFormats);
- formats->mInputFormat = inputFormat;
- formats->mOutputFormat = outputFormat;
+ formats->inputFormat = inputFormat;
+ formats->outputFormat = outputFormat;
}
mCallback->onComponentConfigured(inputFormat, outputFormat);
}
-
void CCodec::initiateCreateInputSurface() {
- // TODO
+ (new AMessage(kWhatCreateInputSurface, this))->post();
+}
+
+void CCodec::createInputSurface() {
+ sp<IGraphicBufferProducer> producer;
+ sp<GraphicBufferSource> source(new GraphicBufferSource);
+
+ status_t err = source->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to initialize graphic buffer source: %d", err);
+ mCallback->onInputSurfaceCreationFailed(err);
+ return;
+ }
+ producer = source->getIGraphicBufferProducer();
+
+ err = setupInputSurface(source);
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceCreationFailed(err);
+ return;
+ }
+
+ sp<AMessage> inputFormat;
+ sp<AMessage> outputFormat;
+ {
+ Mutexed<Formats>::Locked formats(mFormats);
+ inputFormat = formats->inputFormat;
+ outputFormat = formats->outputFormat;
+ }
+ mCallback->onInputSurfaceCreated(
+ inputFormat,
+ outputFormat,
+ new BufferProducerWrapper(producer));
+}
+
+status_t CCodec::setupInputSurface(const sp<GraphicBufferSource> &source) {
+ status_t err = mChannel->setGraphicBufferSource(source);
+ if (err != OK) {
+ return err;
+ }
+
+ // TODO: configure |source| with other settings.
+ return OK;
}
void CCodec::initiateSetInputSurface(const sp<PersistentSurface> &surface) {
+ sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
+ msg->setObject("surface", surface);
+ msg->post();
+}
+
+void CCodec::setInputSurface(const sp<PersistentSurface> &surface) {
// TODO
- (void) surface;
+ (void)surface;
+
+ mCallback->onInputSurfaceDeclined(ERROR_UNSUPPORTED);
}
void CCodec::initiateStart() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != ALLOCATED) {
+ if (state->get() != ALLOCATED) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = STARTING;
+ state->set(STARTING);
}
(new AMessage(kWhatStart, this))->post();
@@ -307,13 +363,13 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState != STARTING) {
+ if (state->get() != STARTING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
c2_status_t err = comp->start();
if (err != C2_OK) {
@@ -325,20 +381,20 @@
sp<AMessage> outputFormat;
{
Mutexed<Formats>::Locked formats(mFormats);
- inputFormat = formats->mInputFormat;
- outputFormat = formats->mOutputFormat;
+ inputFormat = formats->inputFormat;
+ outputFormat = formats->outputFormat;
}
mChannel->start(inputFormat, outputFormat);
{
Mutexed<State>::Locked state(mState);
- if (state->mState != STARTING) {
+ if (state->get() != STARTING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = RUNNING;
+ state->set(RUNNING);
}
mCallback->onStartCompleted();
}
@@ -354,17 +410,17 @@
void CCodec::initiateStop() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState == ALLOCATED
- || state->mState == RELEASED
- || state->mState == STOPPING
- || state->mState == RELEASING) {
+ if (state->get() == ALLOCATED
+ || state->get() == RELEASED
+ || state->get() == STOPPING
+ || state->get() == RELEASING) {
// We're already stopped, released, or doing it right now.
state.unlock();
mCallback->onStopCompleted();
state.lock();
return;
}
- state->mState = STOPPING;
+ state->set(STOPPING);
}
(new AMessage(kWhatStop, this))->post();
@@ -374,19 +430,19 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState == RELEASING) {
+ if (state->get() == RELEASING) {
state.unlock();
// We're already stopped or release is in progress.
mCallback->onStopCompleted();
state.lock();
return;
- } else if (state->mState != STOPPING) {
+ } else if (state->get() != STOPPING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
mChannel->stop();
status_t err = comp->stop();
@@ -397,8 +453,8 @@
{
Mutexed<State>::Locked state(mState);
- if (state->mState == STOPPING) {
- state->mState = ALLOCATED;
+ if (state->get() == STOPPING) {
+ state->set(ALLOCATED);
}
}
mCallback->onStopCompleted();
@@ -407,7 +463,7 @@
void CCodec::initiateRelease(bool sendCallback /* = true */) {
{
Mutexed<State>::Locked state(mState);
- if (state->mState == RELEASED || state->mState == RELEASING) {
+ if (state->get() == RELEASED || state->get() == RELEASING) {
// We're already released or doing it right now.
if (sendCallback) {
state.unlock();
@@ -416,8 +472,8 @@
}
return;
}
- if (state->mState == ALLOCATING) {
- state->mState = RELEASING;
+ if (state->get() == ALLOCATING) {
+ state->set(RELEASING);
// With the altered state allocate() would fail and clean up.
if (sendCallback) {
state.unlock();
@@ -426,7 +482,7 @@
}
return;
}
- state->mState = RELEASING;
+ state->set(RELEASING);
}
std::thread([this, sendCallback] { release(sendCallback); }).detach();
@@ -436,7 +492,7 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState == RELEASED) {
+ if (state->get() == RELEASED) {
if (sendCallback) {
state.unlock();
mCallback->onReleaseCompleted();
@@ -444,15 +500,15 @@
}
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
mChannel->stop();
comp->release();
{
Mutexed<State>::Locked state(mState);
- state->mState = RELEASED;
- state->mComp.reset();
+ state->set(RELEASED);
+ state->comp.reset();
}
if (sendCallback) {
mCallback->onReleaseCompleted();
@@ -466,11 +522,11 @@
void CCodec::signalFlush() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != RUNNING) {
+ if (state->get() != RUNNING) {
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
return;
}
- state->mState = FLUSHING;
+ state->set(FLUSHING);
}
(new AMessage(kWhatFlush, this))->post();
@@ -480,13 +536,13 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState != FLUSHING) {
+ if (state->get() != FLUSHING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
mChannel->stop();
@@ -502,7 +558,7 @@
{
Mutexed<State>::Locked state(mState);
- state->mState = FLUSHED;
+ state->set(FLUSHED);
}
mCallback->onFlushCompleted();
}
@@ -510,26 +566,26 @@
void CCodec::signalResume() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != FLUSHED) {
+ if (state->get() != FLUSHED) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = RESUMING;
+ state->set(RESUMING);
}
mChannel->start(nullptr, nullptr);
{
Mutexed<State>::Locked state(mState);
- if (state->mState != RESUMING) {
+ if (state->get() != RESUMING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = RUNNING;
+ state->set(RUNNING);
}
}
@@ -545,6 +601,14 @@
// TODO
}
+void CCodec::onWorkDone(std::vector<std::unique_ptr<C2Work>> &workItems) {
+ Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+ for (std::unique_ptr<C2Work> &item : workItems) {
+ queue->push_back(std::move(item));
+ }
+ (new AMessage(kWhatWorkDone, this))->post();
+}
+
void CCodec::onMessageReceived(const sp<AMessage> &msg) {
TimePoint now = std::chrono::steady_clock::now();
switch (msg->what()) {
@@ -582,6 +646,37 @@
flush();
break;
}
+ case kWhatCreateInputSurface: {
+ // Surface operations may be briefly blocking.
+ setDeadline(now + 100ms);
+ createInputSurface();
+ break;
+ }
+ case kWhatSetInputSurface: {
+ // Surface operations may be briefly blocking.
+ setDeadline(now + 100ms);
+ sp<RefBase> obj;
+ CHECK(msg->findObject("surface", &obj));
+ sp<PersistentSurface> surface(static_cast<PersistentSurface *>(obj.get()));
+ setInputSurface(surface);
+ break;
+ }
+ case kWhatWorkDone: {
+ std::unique_ptr<C2Work> work;
+ {
+ Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+ if (queue->empty()) {
+ break;
+ }
+ work.swap(queue->front());
+ queue->pop_front();
+ if (!queue->empty()) {
+ (new AMessage(kWhatWorkDone, this))->post();
+ }
+ }
+ mChannel->onWorkDone(work);
+ break;
+ }
default: {
ALOGE("unrecognized message");
break;
diff --git a/media/libstagefright/CCodecBufferChannel.cpp b/media/libstagefright/CCodecBufferChannel.cpp
index ebc9b0a..2f4a7ea 100644
--- a/media/libstagefright/CCodecBufferChannel.cpp
+++ b/media/libstagefright/CCodecBufferChannel.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#define LOG_TAG "CCodecBufferChannel"
#include <utils/Log.h>
@@ -48,10 +48,189 @@
using namespace hardware::cas::V1_0;
using namespace hardware::cas::native::V1_0;
+/**
+ * Base class for representation of buffers at one port.
+ */
+class CCodecBufferChannel::Buffers {
+public:
+ Buffers() = default;
+ virtual ~Buffers() = default;
+
+ /**
+ * Set format for MediaCodec-facing buffers.
+ */
+ void setFormat(const sp<AMessage> &format) { mFormat = format; }
+
+ /**
+ * Returns true if the buffers are operating under array mode.
+ */
+ virtual bool isArrayMode() const { return false; }
+
+ /**
+ * Fills the vector with MediaCodecBuffer's if in array mode; otherwise,
+ * no-op.
+ */
+ virtual void getArray(Vector<sp<MediaCodecBuffer>> *) const {}
+
+protected:
+ // Format to be used for creating MediaCodec-facing buffers.
+ sp<AMessage> mFormat;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(Buffers);
+};
+
+class CCodecBufferChannel::InputBuffers : public CCodecBufferChannel::Buffers {
+public:
+ InputBuffers() = default;
+ virtual ~InputBuffers() = default;
+
+ /**
+ * Set a block pool to obtain input memory blocks.
+ */
+ void setPool(const std::shared_ptr<C2BlockPool> &pool) { mPool = pool; }
+
+ /**
+ * Get a new MediaCodecBuffer for input and its corresponding index.
+ * Returns false if no new buffer can be obtained at the moment.
+ */
+ virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
+
+ /**
+ * Release the buffer obtained from requestNewBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ *
+ * XXX: this is a quick hack to be removed
+ */
+ virtual std::shared_ptr<C2Buffer> releaseBufferIndex(size_t /* index */) { return nullptr; }
+
+ /**
+ * Release the buffer obtained from requestNewBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ */
+ virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+ /**
+ * Flush internal state. After this call, no index or buffer previously
+ * returned from requestNewBuffer() is valid.
+ */
+ virtual void flush() = 0;
+
+ /**
+ * Return array-backed version of input buffers. The returned object
+ * shall retain the internal state so that it will honor index and
+ * buffer from previous calls of requestNewBuffer().
+ */
+ virtual std::unique_ptr<InputBuffers> toArrayMode() = 0;
+
+protected:
+ // Pool to obtain blocks for input buffers.
+ std::shared_ptr<C2BlockPool> mPool;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
+};
+
+class CCodecBufferChannel::InputBufferClient {
+public:
+ explicit InputBufferClient(
+ const std::shared_ptr<CCodecBufferChannel> &channel) : mChannel(channel) {}
+ virtual ~InputBufferClient() = default;
+
+ virtual void onInputBufferAdded(size_t index, const sp<MediaCodecBuffer> &buffer) {
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return;
+ }
+ channel->mCallback->onInputBufferAvailable(index, buffer);
+ }
+
+ virtual void onStart() {
+ // no-op
+ }
+
+ virtual void onStop() {
+ // no-op
+ }
+
+ virtual void onRelease() {
+ // no-op
+ }
+
+ virtual void onInputBufferAvailable(size_t index, const sp<MediaCodecBuffer> &buffer) {
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return;
+ }
+ channel->mCallback->onInputBufferAvailable(index, buffer);
+ }
+
+protected:
+ InputBufferClient() = default;
+ std::weak_ptr<CCodecBufferChannel> mChannel;
+
+ DISALLOW_EVIL_CONSTRUCTORS(InputBufferClient);
+};
+
+class CCodecBufferChannel::OutputBuffers : public CCodecBufferChannel::Buffers {
+public:
+ OutputBuffers() = default;
+ virtual ~OutputBuffers() = default;
+
+ /**
+ * Register output C2Buffer from the component and obtain corresponding
+ * index and MediaCodecBuffer object. Returns false if registration
+ * fails.
+ */
+ virtual bool registerBuffer(
+ const std::shared_ptr<C2Buffer> &buffer,
+ size_t *index,
+ sp<MediaCodecBuffer> *codecBuffer) = 0;
+
+ /**
+ * Register codec specific data as a buffer to be consistent with
+ * MediaCodec behavior.
+ */
+ virtual bool registerCsd(
+ const C2StreamCsdInfo::output * /* csd */,
+ size_t * /* index */,
+ sp<MediaCodecBuffer> * /* codecBuffer */) {
+ return false;
+ }
+
+ /**
+ * Release the buffer obtained from registerBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ */
+ virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+ /**
+ * Flush internal state. After this call, no index or buffer previously
+ * returned from registerBuffer() is valid.
+ */
+ virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) = 0;
+
+ /**
+ * Return array-backed version of output buffers. The returned object
+ * shall retain the internal state so that it will honor index and
+ * buffer from previous calls of registerBuffer().
+ */
+ virtual std::unique_ptr<OutputBuffers> toArrayMode() = 0;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
+};
+
namespace {
+constexpr int32_t kMaskI32 = ~0;
+
// TODO: get this info from component
const static size_t kMinBufferArraySize = 16;
+const static size_t kLinearBufferSize = 524288;
template <class T>
ssize_t findBufferSlot(
@@ -88,9 +267,14 @@
return Codec2Buffer::allocate(format, block);
}
-class LinearBuffer : public C2Buffer {
+class Buffer1D : public C2Buffer {
public:
- explicit LinearBuffer(C2ConstLinearBlock block) : C2Buffer({ block }) {}
+ explicit Buffer1D(C2ConstLinearBlock block) : C2Buffer({ block }) {}
+};
+
+class Buffer2D : public C2Buffer {
+public:
+ explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
};
class InputBuffersArray : public CCodecBufferChannel::InputBuffers {
@@ -99,36 +283,36 @@
void add(
size_t index,
- const sp<MediaCodecBuffer> &clientBuffer,
- const std::shared_ptr<C2Buffer> &compBuffer,
+ const sp<Codec2Buffer> &clientBuffer,
bool available) {
- if (mBufferArray.size() < index) {
+ if (mBufferArray.size() <= index) {
+ // TODO: make this more efficient
mBufferArray.resize(index + 1);
}
mBufferArray[index].clientBuffer = clientBuffer;
- mBufferArray[index].compBuffer = compBuffer;
mBufferArray[index].available = available;
}
- bool isArrayMode() final { return true; }
+ bool isArrayMode() const final { return true; }
std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
return nullptr;
}
- void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+ void getArray(Vector<sp<MediaCodecBuffer>> *array) const final {
array->clear();
- for (const auto &entry : mBufferArray) {
+ for (const Entry &entry : mBufferArray) {
array->push(entry.clientBuffer);
}
}
bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
- if (mBufferArray[i].available) {
+ if (mBufferArray[i].available && mBufferArray[i].compBuffer.expired()) {
mBufferArray[i].available = false;
*index = i;
*buffer = mBufferArray[i].clientBuffer;
+ (*buffer)->setRange(0, (*buffer)->capacity());
return true;
}
}
@@ -138,8 +322,10 @@
std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
if (!mBufferArray[i].available && mBufferArray[i].clientBuffer == buffer) {
+ std::shared_ptr<C2Buffer> buffer = std::make_shared<Buffer1D>(mBufferArray[i].clientBuffer->share());
mBufferArray[i].available = true;
- return std::move(mBufferArray[i].compBuffer);
+ mBufferArray[i].compBuffer = buffer;
+ return buffer;
}
}
return nullptr;
@@ -148,14 +334,13 @@
void flush() override {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
mBufferArray[i].available = true;
- mBufferArray[i].compBuffer.reset();
}
}
private:
struct Entry {
- sp<MediaCodecBuffer> clientBuffer;
- std::shared_ptr<C2Buffer> compBuffer;
+ sp<Codec2Buffer> clientBuffer;
+ std::weak_ptr<C2Buffer> compBuffer;
bool available;
};
@@ -177,7 +362,7 @@
// TODO: proper max input size and usage
// TODO: read usage from intf
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- sp<Codec2Buffer> newBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+ sp<Codec2Buffer> newBuffer = allocateLinearBuffer(mPool, mFormat, kLinearBufferSize, usage);
if (newBuffer == nullptr) {
return false;
}
@@ -195,7 +380,7 @@
sp<Codec2Buffer> codecBuffer = it->promote();
// We got sp<> reference from the caller so this should never happen..
CHECK(codecBuffer != nullptr);
- return std::make_shared<LinearBuffer>(codecBuffer->share());
+ return std::make_shared<Buffer1D>(codecBuffer->share());
}
void flush() override {
@@ -203,8 +388,10 @@
std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
std::unique_ptr<InputBuffersArray> array(new InputBuffersArray);
+ array->setFormat(mFormat);
// TODO
const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ mBuffers.resize(size);
for (size_t i = 0; i < size; ++i) {
sp<Codec2Buffer> clientBuffer = mBuffers[i].promote();
bool available = false;
@@ -212,13 +399,12 @@
// TODO: proper max input size
// TODO: read usage from intf
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- clientBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+ clientBuffer = allocateLinearBuffer(mPool, mFormat, kLinearBufferSize, usage);
available = true;
}
array->add(
i,
clientBuffer,
- std::make_shared<LinearBuffer>(clientBuffer->share()),
available);
}
return std::move(array);
@@ -230,19 +416,30 @@
std::vector<wp<Codec2Buffer>> mBuffers;
};
-// TODO: stub
class GraphicInputBuffers : public CCodecBufferChannel::InputBuffers {
public:
- using CCodecBufferChannel::InputBuffers::InputBuffers;
+ GraphicInputBuffers() = default;
bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
- (void)index;
- (void)buffer;
- return false;
+ *buffer = nullptr;
+ for (size_t i = 0; i < mAvailable.size(); ++i) {
+ if (mAvailable[i]) {
+ *index = i;
+ mAvailable[i] = false;
+ return true;
+ }
+ }
+ *index = mAvailable.size();
+ mAvailable.push_back(false);
+ return true;
}
- std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
- (void)buffer;
+ std::shared_ptr<C2Buffer> releaseBufferIndex(size_t index) override {
+ mAvailable[index] = true;
+ return nullptr;
+ }
+
+ std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &) override {
return nullptr;
}
@@ -252,6 +449,9 @@
std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
return nullptr;
}
+
+private:
+ std::vector<bool> mAvailable;
};
class OutputBuffersArray : public CCodecBufferChannel::OutputBuffers {
@@ -263,7 +463,8 @@
const sp<MediaCodecBuffer> &clientBuffer,
const std::shared_ptr<C2Buffer> &compBuffer,
bool available) {
- if (mBufferArray.size() < index) {
+ if (mBufferArray.size() <= index) {
+ // TODO: make this more efficient
mBufferArray.resize(index + 1);
}
mBufferArray[index].clientBuffer = clientBuffer;
@@ -271,7 +472,7 @@
mBufferArray[index].available = available;
}
- bool isArrayMode() final { return true; }
+ bool isArrayMode() const final { return true; }
std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() final {
return nullptr;
@@ -285,6 +486,7 @@
if (mBufferArray[i].available && copy(buffer, mBufferArray[i].clientBuffer)) {
*index = i;
*codecBuffer = mBufferArray[i].clientBuffer;
+ (*codecBuffer)->setFormat(mFormat);
mBufferArray[i].compBuffer = buffer;
mBufferArray[i].available = false;
return true;
@@ -299,10 +501,17 @@
sp<MediaCodecBuffer> *codecBuffer) final {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
if (mBufferArray[i].available
- && mBufferArray[i].clientBuffer->capacity() <= csd->flexCount()) {
+ && mBufferArray[i].clientBuffer->capacity() >= csd->flexCount()) {
+ // TODO: proper format update
+ sp<ABuffer> csdBuffer = ABuffer::CreateAsCopy(csd->m.value, csd->flexCount());
+ mFormat = mFormat->dup();
+ mFormat->setBuffer("csd-0", csdBuffer);
+
memcpy(mBufferArray[i].clientBuffer->base(), csd->m.value, csd->flexCount());
+ mBufferArray[i].clientBuffer->setRange(0, csd->flexCount());
*index = i;
*codecBuffer = mBufferArray[i].clientBuffer;
+ (*codecBuffer)->setFormat(mFormat);
mBufferArray[i].available = false;
return true;
}
@@ -333,9 +542,9 @@
const std::shared_ptr<C2Buffer> &buffer,
const sp<MediaCodecBuffer> &clientBuffer) = 0;
- void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+ void getArray(Vector<sp<MediaCodecBuffer>> *array) const final {
array->clear();
- for (const auto &entry : mBufferArray) {
+ for (const Entry &entry : mBufferArray) {
array->push(entry.clientBuffer);
}
}
@@ -363,6 +572,7 @@
}
C2ReadView view = buffer->data().linearBlocks().front().map().get();
if (clientBuffer->capacity() < view.capacity()) {
+ ALOGV("view.capacity() = %u", view.capacity());
return false;
}
clientBuffer->setRange(0u, view.capacity());
@@ -425,9 +635,11 @@
if (ret < 0) {
return false;
}
- sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
- mFormat,
- ABuffer::CreateAsCopy(csd->m.value, csd->flexCount()));
+ // TODO: proper format update
+ sp<ABuffer> csdBuffer = ABuffer::CreateAsCopy(csd->m.value, csd->flexCount());
+ mFormat = mFormat->dup();
+ mFormat->setBuffer("csd-0", csdBuffer);
+ sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(mFormat, csdBuffer);
mBuffers[ret] = { newBuffer, nullptr };
*index = ret;
*codecBuffer = newBuffer;
@@ -498,15 +710,17 @@
std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
std::unique_ptr<OutputBuffersArray> array(new LinearOutputBuffersArray);
+ array->setFormat(mFormat);
const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ mBuffers.resize(size);
for (size_t i = 0; i < size; ++i) {
sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
bool available = false;
if (clientBuffer == nullptr) {
// TODO: proper max input size
- clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(65536));
+ clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(kLinearBufferSize));
available = true;
compBuffer.reset();
}
@@ -526,8 +740,10 @@
std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
std::unique_ptr<OutputBuffersArray> array(new GraphicOutputBuffersArray);
+ array->setFormat(mFormat);
const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ mBuffers.resize(size);
for (size_t i = 0; i < size; ++i) {
sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
@@ -543,8 +759,163 @@
}
};
+class BufferQueueClient : public CCodecBufferChannel::InputBufferClient {
+public:
+ explicit BufferQueueClient(const sp<GraphicBufferSource> &source) : mSource(source) {}
+ virtual ~BufferQueueClient() = default;
+
+ void onInputBufferAdded(size_t index, const sp<MediaCodecBuffer> &buffer) override {
+ (void)buffer;
+ mSource->onInputBufferAdded(index & kMaskI32);
+ }
+
+ void onStart() override {
+ mSource->start();
+ }
+
+ void onStop() override {
+ mSource->stop();
+ }
+
+ void onRelease() override {
+ mSource->release();
+ }
+
+ void onInputBufferAvailable(size_t index, const sp<MediaCodecBuffer> &buffer) override {
+ ALOGV("onInputBufferEmptied index = %zu", index);
+ (void)buffer;
+ // TODO: can we really ignore fence here?
+ mSource->onInputBufferEmptied(index & kMaskI32, -1 /* fenceFd */);
+ }
+
+private:
+ sp<GraphicBufferSource> mSource;
+};
+
+class GraphicBlock : public C2GraphicBlock {
+ using C2GraphicBlock::C2GraphicBlock;
+ friend class ::android::CCodecBufferChannel;
+};
+
} // namespace
+class CCodecBufferChannel::C2ComponentWrapper : public ComponentWrapper {
+public:
+ explicit C2ComponentWrapper(
+ const std::shared_ptr<CCodecBufferChannel> &channel)
+ : mChannel(channel), mLastTimestamp(0) {}
+
+ virtual ~C2ComponentWrapper() {
+ for (const std::pair<int32_t, C2Handle *> &entry : mHandles) {
+ native_handle_delete(entry.second);
+ }
+ }
+
+ status_t submitBuffer(
+ int32_t bufferId, const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ ALOGV("submitBuffer bufferId = %d", bufferId);
+ // TODO: Use fd to construct fence
+ (void)fenceFd;
+
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return NO_INIT;
+ }
+
+ std::shared_ptr<C2Allocator> allocator = mAllocator.lock();
+ if (!allocator) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2AllocatorStore::PLATFORM_START + 1, // GRALLOC
+ &allocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ mAllocator = allocator;
+ }
+
+ std::shared_ptr<C2GraphicAllocation> alloc;
+ C2Handle *handle = WrapNativeCodec2GrallocHandle(
+ buffer->handle, buffer->width, buffer->height,
+ buffer->format, buffer->usage, buffer->stride);
+ c2_status_t err = allocator->priorGraphicAllocation(handle, &alloc);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ std::shared_ptr<C2GraphicBlock> block(new GraphicBlock(alloc));
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = (C2FrameData::flags_t)0;
+ work->input.ordinal.timestamp = timestamp;
+ work->input.ordinal.frameIndex = channel->mFrameIndex++;
+ work->input.buffers.clear();
+ work->input.buffers.emplace_back(new Buffer2D(
+ // TODO: fence
+ block->share(C2Rect(block->width(), block->height()), ::android::C2Fence())));
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ std::list<std::unique_ptr<C2Work>> items;
+ items.push_back(std::move(work));
+
+ err = channel->mComponent->queue_nb(&items);
+ if (err != OK) {
+ native_handle_delete(handle);
+ return UNKNOWN_ERROR;
+ }
+
+ mLastTimestamp = timestamp;
+ if (mHandles.count(bufferId) > 0) {
+ native_handle_delete(mHandles[bufferId]);
+ }
+ mHandles[bufferId] = handle;
+
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(channel->mInputBuffers);
+ ALOGV("releaseBufferIndex index = %d", bufferId);
+ (*buffers)->releaseBufferIndex(bufferId);
+
+ return OK;
+ }
+
+ status_t submitEos(int32_t bufferId) override {
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return NO_INIT;
+ }
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
+ work->input.ordinal.timestamp = mLastTimestamp;
+ work->input.ordinal.frameIndex = channel->mFrameIndex++;
+ work->input.buffers.clear();
+ work->input.buffers.push_back(nullptr);
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ std::list<std::unique_ptr<C2Work>> items;
+ items.push_back(std::move(work));
+
+ c2_status_t err = channel->mComponent->queue_nb(&items);
+
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(channel->mInputBuffers);
+ (*buffers)->releaseBufferIndex(bufferId);
+
+ return (err == C2_OK) ? OK : UNKNOWN_ERROR;
+ }
+
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ // TODO
+ (void)dataSpace;
+ (void)aspects;
+ (void)pixelFormat;
+ }
+
+private:
+ std::weak_ptr<CCodecBufferChannel> mChannel;
+ std::map<int32_t, C2Handle *> mHandles;
+ int64_t mLastTimestamp;
+ std::weak_ptr<C2Allocator> mAllocator;
+};
+
CCodecBufferChannel::QueueGuard::QueueGuard(
CCodecBufferChannel::QueueSync &sync) : mSync(sync) {
std::unique_lock<std::mutex> l(mSync.mMutex);
@@ -601,10 +972,14 @@
if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
mCrypto->unsetHeap(mHeapSeqNum);
}
+ // TODO: is this the right place?
+ mInputClient->onRelease();
}
void CCodecBufferChannel::setComponent(const std::shared_ptr<C2Component> &component) {
mComponent = component;
+ mInputClient.reset(new InputBufferClient(shared_from_this()));
+
C2StreamFormatConfig::input inputFormat(0u);
C2StreamFormatConfig::output outputFormat(0u);
c2_status_t err = mComponent->intf()->query_vb(
@@ -638,6 +1013,10 @@
} else {
// TODO: error
}
+ // TODO: remove once we switch to proper buffer pool.
+ if (!graphic) {
+ *buffers = (*buffers)->toArrayMode();
+ }
}
{
@@ -652,6 +1031,18 @@
}
}
+status_t CCodecBufferChannel::setGraphicBufferSource(
+ const sp<GraphicBufferSource> &source) {
+ ALOGV("setGraphicBufferSource");
+ mInputClient.reset(new BufferQueueClient(source));
+
+ // TODO: proper color aspect & dataspace
+ android_dataspace dataSpace = HAL_DATASPACE_BT709;
+ // TODO: read settings properly from the interface
+ return source->configure(new C2ComponentWrapper(
+ shared_from_this()), dataSpace, 16, 1080, 1920, GRALLOC_USAGE_SW_READ_OFTEN);
+}
+
status_t CCodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
QueueGuard guard(mSync);
if (!guard.isRunning()) {
@@ -708,9 +1099,24 @@
return -ENOSYS;
}
+void CCodecBufferChannel::feedInputBufferIfAvailable() {
+ sp<MediaCodecBuffer> inBuffer;
+ size_t index;
+ {
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+ if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
+ ALOGW("no new buffer available");
+ inBuffer = nullptr;
+ }
+ }
+ ALOGV("new input index = %zu", index);
+ mInputClient->onInputBufferAvailable(index, inBuffer);
+}
+
status_t CCodecBufferChannel::renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
ALOGV("renderOutputBuffer");
+ feedInputBufferIfAvailable();
std::shared_ptr<C2Buffer> c2Buffer;
{
@@ -780,7 +1186,9 @@
}
{
Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- (void)(*buffers)->releaseBuffer(buffer);
+ if((*buffers)->releaseBuffer(buffer)) {
+ feedInputBufferIfAvailable();
+ }
}
return OK;
}
@@ -832,13 +1240,15 @@
return;
}
}
- mCallback->onInputBufferAvailable(index, buffer);
+ mInputClient->onInputBufferAdded(index, buffer);
}
+ mInputClient->onStart();
}
void CCodecBufferChannel::stop() {
mSync.stop();
mFirstValidFrameIndex = mFrameIndex.load();
+ mInputClient->onStop();
}
void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
@@ -852,106 +1262,110 @@
}
}
-void CCodecBufferChannel::onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems) {
- for (const auto &work : workItems) {
- sp<MediaCodecBuffer> inBuffer;
- size_t index;
- {
- Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
- if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
- ALOGW("no new buffer available");
- inBuffer = nullptr;
- }
- }
- if (inBuffer != nullptr) {
- mCallback->onInputBufferAvailable(index, inBuffer);
- }
+void CCodecBufferChannel::onWorkDone(const std::unique_ptr<C2Work> &work) {
+ if (work->result != OK) {
+ ALOGE("work failed to complete: %d", work->result);
+ mOnError(work->result, ACTION_CODE_FATAL);
+ return;
+ }
- if (work->result != OK) {
- ALOGE("work failed to complete: %d", work->result);
- mOnError(work->result, ACTION_CODE_FATAL);
+ // NOTE: MediaCodec usage supposedly have only one worklet
+ if (work->worklets.size() != 1u) {
+ ALOGE("onWorkDone: incorrect number of worklets: %zu",
+ work->worklets.size());
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ return;
+ }
+
+ const std::unique_ptr<C2Worklet> &worklet = work->worklets.front();
+ if ((worklet->output.ordinal.frameIndex - mFirstValidFrameIndex.load()).peek() < 0) {
+ // Discard frames from previous generation.
+ return;
+ }
+ std::shared_ptr<C2Buffer> buffer;
+ // NOTE: MediaCodec usage supposedly have only one output stream.
+ if (worklet->output.buffers.size() > 1u) {
+ ALOGE("onWorkDone: incorrect number of output buffers: %zu",
+ worklet->output.buffers.size());
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ return;
+ } else if (worklet->output.buffers.size() == 1u) {
+ buffer = worklet->output.buffers[0];
+ if (!buffer) {
+ ALOGW("onWorkDone: nullptr found in buffers; ignored.");
+ }
+ }
+
+ const C2StreamCsdInfo::output *csdInfo = nullptr;
+ for (const std::unique_ptr<C2Param> &info : worklet->output.configUpdate) {
+ if (info->coreIndex() == C2StreamCsdInfo::output::CORE_INDEX) {
+ ALOGV("onWorkDone: csd found");
+ csdInfo = static_cast<const C2StreamCsdInfo::output *>(info.get());
+ }
+ }
+
+ int32_t flags = 0;
+ if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ ALOGV("onWorkDone: output EOS");
+ }
+
+ sp<MediaCodecBuffer> outBuffer;
+ size_t index;
+ if (csdInfo != nullptr) {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if ((*buffers)->registerCsd(csdInfo, &index, &outBuffer)) {
+ outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
+ outBuffer->meta()->setInt32("flags", flags | MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ ALOGV("onWorkDone: csd index = %zu", index);
+
+ buffers.unlock();
+ mCallback->onOutputBufferAvailable(index, outBuffer);
+ buffers.lock();
+ } else {
+ ALOGE("onWorkDone: unable to register csd");
+ buffers.unlock();
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ buffers.lock();
return;
}
-
- // NOTE: MediaCodec usage supposedly have only one worklet
- if (work->worklets.size() != 1u) {
- ALOGE("incorrect number of worklets: %zu", work->worklets.size());
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- continue;
- }
-
- const std::unique_ptr<C2Worklet> &worklet = work->worklets.front();
- if ((worklet->output.ordinal.frameIndex - mFirstValidFrameIndex.load()).peek() < 0) {
- // Discard frames from previous generation.
- continue;
- }
- // NOTE: MediaCodec usage supposedly have only one output stream.
- if (worklet->output.buffers.size() != 1u) {
- ALOGE("incorrect number of output buffers: %zu", worklet->output.buffers.size());
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- continue;
- }
-
- const std::shared_ptr<C2Buffer> &buffer = worklet->output.buffers[0];
- const C2StreamCsdInfo::output *csdInfo = nullptr;
- if (buffer) {
- // TODO: transfer infos() into buffer metadata
- }
- for (const auto &info : worklet->output.configUpdate) {
- if (info->coreIndex() == C2StreamCsdInfo::output::CORE_INDEX) {
- ALOGV("csd found");
- csdInfo = static_cast<const C2StreamCsdInfo::output *>(info.get());
- }
- }
-
- int32_t flags = 0;
- if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
- flags |= MediaCodec::BUFFER_FLAG_EOS;
- ALOGV("output EOS");
- }
-
- sp<MediaCodecBuffer> outBuffer;
- if (csdInfo != nullptr) {
- Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- if ((*buffers)->registerCsd(csdInfo, &index, &outBuffer)) {
- outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
- outBuffer->meta()->setInt32("flags", flags | MediaCodec::BUFFER_FLAG_CODECCONFIG);
- ALOGV("csd index = %zu", index);
-
- buffers.unlock();
- mCallback->onOutputBufferAvailable(index, outBuffer);
- buffers.lock();
- } else {
- ALOGE("unable to register output buffer");
- buffers.unlock();
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- buffers.lock();
- continue;
- }
- }
-
- if (!buffer && !flags) {
- ALOGV("Not reporting output buffer");
- continue;
- }
-
- {
- Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
- ALOGE("unable to register output buffer");
-
- buffers.unlock();
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- buffers.lock();
- continue;
- }
- }
-
- outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
- outBuffer->meta()->setInt32("flags", flags);
- ALOGV("index = %zu", index);
- mCallback->onOutputBufferAvailable(index, outBuffer);
}
+
+ if (!buffer && !flags) {
+ ALOGV("onWorkDone: Not reporting output buffer");
+ return;
+ }
+
+ if (buffer) {
+ for (const std::shared_ptr<const C2Info> &info : buffer->info()) {
+ // TODO: properly translate these to metadata
+ switch (info->coreIndex().coreIndex()) {
+ case C2StreamPictureTypeMaskInfo::CORE_INDEX:
+ if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2PictureTypeKeyFrame) {
+ flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
+ ALOGE("onWorkDone: unable to register output buffer");
+ buffers.unlock();
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ buffers.lock();
+ return;
+ }
+ }
+
+ outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
+ outBuffer->meta()->setInt32("flags", flags);
+ ALOGV("onWorkDone: out buffer index = %zu", index);
+ mCallback->onOutputBufferAvailable(index, outBuffer);
}
status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface) {
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
index 83cb72c..2a2b9de 100644
--- a/media/libstagefright/codec2/include/C2Config.h
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -67,6 +67,7 @@
kParamIndexVideoSizeTuning,
kParamIndexCsd,
+ kParamIndexPictureTypeMask,
// video info
@@ -133,6 +134,12 @@
typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexCsd> C2StreamCsdInfo;
+C2ENUM(C2PictureTypeMask, uint32_t,
+ C2PictureTypeKeyFrame = (1u << 0),
+)
+
+typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexPictureTypeMask> C2StreamPictureTypeMaskInfo;
+
/*
Component description fields:
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
index 18db3e9..a5ea511 100644
--- a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
@@ -38,6 +38,15 @@
using ::android::hardware::hidl_handle;
using ::android::hardware::hidl_vec;
+namespace {
+
+struct BufferDescriptorInfo {
+ IMapper::BufferDescriptorInfo mapperInfo;
+ uint32_t stride;
+};
+
+}
+
/* ===================================== GRALLOC ALLOCATION ==================================== */
static c2_status_t maperr2error(Error maperr) {
switch (maperr) {
@@ -73,6 +82,7 @@
uint32_t format;
uint32_t usage_lo;
uint32_t usage_hi;
+ uint32_t stride;
uint32_t magic;
};
@@ -109,13 +119,16 @@
static C2HandleGralloc* WrapNativeHandle(
const native_handle_t *const handle,
- uint32_t width, uint32_t height, uint32_t format, uint64_t usage) {
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride) {
//CHECK(handle != nullptr);
if (native_handle_is_invalid(handle) ||
handle->numInts > int((INT_MAX - handle->version) / sizeof(int)) - NUM_INTS - handle->numFds) {
return nullptr;
}
- ExtraData xd = { width, height, format, uint32_t(usage & 0xFFFFFFFF), uint32_t(usage >> 32), MAGIC };
+ ExtraData xd = {
+ width, height, format, uint32_t(usage & 0xFFFFFFFF), uint32_t(usage >> 32),
+ stride, MAGIC
+ };
native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
if (res != nullptr) {
memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
@@ -138,7 +151,8 @@
static const C2HandleGralloc* Import(
const C2Handle *const handle,
- uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage) {
+ uint32_t *width, uint32_t *height, uint32_t *format,
+ uint64_t *usage, uint32_t *stride) {
const ExtraData *xd = getExtraData(handle);
if (xd == nullptr) {
return nullptr;
@@ -147,15 +161,22 @@
*height = xd->height;
*format = xd->format;
*usage = xd->usage_lo | (uint64_t(xd->usage_hi) << 32);
+ *stride = xd->stride;
return reinterpret_cast<const C2HandleGralloc *>(handle);
}
};
-native_handle_t* UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
+native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
return C2HandleGralloc::UnwrapNativeHandle(handle);
}
+C2Handle *WrapNativeCodec2GrallocHandle(
+ const native_handle_t *const handle,
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride) {
+ return C2HandleGralloc::WrapNativeHandle(handle, width, height, format, usage, stride);
+}
+
class C2AllocationGralloc : public C2GraphicAllocation {
public:
virtual ~C2AllocationGralloc() override;
@@ -171,7 +192,7 @@
// internal methods
// |handle| will be moved.
C2AllocationGralloc(
- const IMapper::BufferDescriptorInfo &info,
+ const BufferDescriptorInfo &info,
const sp<IMapper> &mapper,
hidl_handle &hidlHandle,
const C2HandleGralloc *const handle);
@@ -179,7 +200,7 @@
c2_status_t status() const;
private:
- const IMapper::BufferDescriptorInfo mInfo;
+ const BufferDescriptorInfo mInfo;
const sp<IMapper> mMapper;
const hidl_handle mHidlHandle;
const C2HandleGralloc *mHandle;
@@ -189,11 +210,11 @@
};
C2AllocationGralloc::C2AllocationGralloc(
- const IMapper::BufferDescriptorInfo &info,
+ const BufferDescriptorInfo &info,
const sp<IMapper> &mapper,
hidl_handle &hidlHandle,
const C2HandleGralloc *const handle)
- : C2GraphicAllocation(info.width, info.height),
+ : C2GraphicAllocation(info.mapperInfo.width, info.mapperInfo.height),
mInfo(info),
mMapper(mapper),
mHidlHandle(std::move(hidlHandle)),
@@ -241,83 +262,133 @@
return C2_CORRUPTED;
}
mLockedHandle = C2HandleGralloc::WrapNativeHandle(
- mBuffer, mInfo.width, mInfo.height, (uint32_t)mInfo.format, mInfo.usage);
+ mBuffer, mInfo.mapperInfo.width, mInfo.mapperInfo.height,
+ (uint32_t)mInfo.mapperInfo.format, mInfo.mapperInfo.usage, mInfo.stride);
}
- if (mInfo.format == PixelFormat::YCBCR_420_888 || mInfo.format == PixelFormat::YV12) {
- YCbCrLayout ycbcrLayout;
- mMapper->lockYCbCr(
- const_cast<native_handle_t *>(mBuffer),
- BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
- { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
- // TODO: fence
- hidl_handle(),
- [&err, &ycbcrLayout](const auto &maperr, const auto &mapLayout) {
- err = maperr2error(maperr);
- if (err == C2_OK) {
- ycbcrLayout = mapLayout;
- }
- });
- if (err != C2_OK) {
- return err;
+ switch (mInfo.mapperInfo.format) {
+ case PixelFormat::YCBCR_420_888:
+ case PixelFormat::YV12: {
+ YCbCrLayout ycbcrLayout;
+ mMapper->lockYCbCr(
+ const_cast<native_handle_t *>(mBuffer),
+ BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+ // TODO: fence
+ hidl_handle(),
+ [&err, &ycbcrLayout](const auto &maperr, const auto &mapLayout) {
+ err = maperr2error(maperr);
+ if (err == C2_OK) {
+ ycbcrLayout = mapLayout;
+ }
+ });
+ if (err != C2_OK) {
+ return err;
+ }
+ addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
+ addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
+ addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
+ layout->type = C2PlanarLayout::TYPE_YUV;
+ layout->numPlanes = 3;
+ layout->planes[C2PlanarLayout::PLANE_Y] = {
+ C2PlaneInfo::CHANNEL_Y, // channel
+ 1, // colInc
+ (int32_t)ycbcrLayout.yStride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_U] = {
+ C2PlaneInfo::CHANNEL_CB, // channel
+ (int32_t)ycbcrLayout.chromaStep, // colInc
+ (int32_t)ycbcrLayout.cStride, // rowInc
+ 2, // mColSampling
+ 2, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_V] = {
+ C2PlaneInfo::CHANNEL_CR, // channel
+ (int32_t)ycbcrLayout.chromaStep, // colInc
+ (int32_t)ycbcrLayout.cStride, // rowInc
+ 2, // mColSampling
+ 2, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ break;
}
- addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
- addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
- addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
- layout->type = C2PlanarLayout::TYPE_YUV;
- layout->numPlanes = 3;
- layout->planes[C2PlanarLayout::PLANE_Y] = {
- C2PlaneInfo::CHANNEL_Y, // channel
- 1, // colInc
- (int32_t)ycbcrLayout.yStride, // rowInc
- 1, // mColSampling
- 1, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- };
- layout->planes[C2PlanarLayout::PLANE_U] = {
- C2PlaneInfo::CHANNEL_CB, // channel
- (int32_t)ycbcrLayout.chromaStep, // colInc
- (int32_t)ycbcrLayout.cStride, // rowInc
- 2, // mColSampling
- 2, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- };
- layout->planes[C2PlanarLayout::PLANE_V] = {
- C2PlaneInfo::CHANNEL_CR, // channel
- (int32_t)ycbcrLayout.chromaStep, // colInc
- (int32_t)ycbcrLayout.cStride, // rowInc
- 2, // mColSampling
- 2, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- };
- } else {
- void *pointer = nullptr;
- mMapper->lock(
- const_cast<native_handle_t *>(mBuffer),
- BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
- { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
- // TODO: fence
- hidl_handle(),
- [&err, &pointer](const auto &maperr, const auto &mapPointer) {
- err = maperr2error(maperr);
- if (err == C2_OK) {
- pointer = mapPointer;
- }
- });
- if (err != C2_OK) {
- return err;
+
+ case PixelFormat::RGBA_8888:
+ // TODO: alpha channel
+ // fall-through
+ case PixelFormat::RGBX_8888: {
+ void *pointer = nullptr;
+ mMapper->lock(
+ const_cast<native_handle_t *>(mBuffer),
+ BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+ // TODO: fence
+ hidl_handle(),
+ [&err, &pointer](const auto &maperr, const auto &mapPointer) {
+ err = maperr2error(maperr);
+ if (err == C2_OK) {
+ pointer = mapPointer;
+ }
+ });
+ if (err != C2_OK) {
+ return err;
+ }
+ addr[C2PlanarLayout::PLANE_R] = (uint8_t *)pointer;
+ addr[C2PlanarLayout::PLANE_G] = (uint8_t *)pointer + 1;
+ addr[C2PlanarLayout::PLANE_B] = (uint8_t *)pointer + 2;
+ layout->type = C2PlanarLayout::TYPE_RGB;
+ layout->numPlanes = 3;
+ layout->planes[C2PlanarLayout::PLANE_R] = {
+ C2PlaneInfo::CHANNEL_R, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_G] = {
+ C2PlaneInfo::CHANNEL_G, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_B] = {
+ C2PlaneInfo::CHANNEL_B, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ break;
}
- // TODO
- return C2_OMITTED;
+ default: {
+ return C2_OMITTED;
+ }
}
mLocked = true;
@@ -396,17 +467,20 @@
// TODO: buffer usage should be determined according to |usage|
(void) usage;
- IMapper::BufferDescriptorInfo info = {
- width,
- height,
- 1u, // layerCount
- (PixelFormat)format,
- BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ BufferDescriptorInfo info = {
+ {
+ width,
+ height,
+ 1u, // layerCount
+ (PixelFormat)format,
+ BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ },
+ 0u, // stride placeholder
};
c2_status_t err = C2_OK;
BufferDescriptor desc;
mMapper->createDescriptor(
- info, [&err, &desc](const auto &maperr, const auto &descriptor) {
+ info.mapperInfo, [&err, &desc](const auto &maperr, const auto &descriptor) {
err = maperr2error(maperr);
if (err == C2_OK) {
desc = descriptor;
@@ -421,8 +495,7 @@
mAllocator->allocate(
desc,
1u,
- [&err, &buffer](const auto &maperr, const auto &stride, auto &buffers) {
- (void) stride;
+ [&err, &buffer, &info](const auto &maperr, const auto &stride, auto &buffers) {
err = maperr2error(maperr);
if (err != C2_OK) {
return;
@@ -431,6 +504,7 @@
err = C2_CORRUPTED;
return;
}
+ info.stride = stride;
buffer = std::move(buffers[0]);
});
if (err != C2_OK) {
@@ -442,18 +516,20 @@
info, mMapper, buffer,
C2HandleGralloc::WrapNativeHandle(
buffer.getNativeHandle(),
- info.width, info.height, (uint32_t)info.format, info.usage)));
+ info.mapperInfo.width, info.mapperInfo.height,
+ (uint32_t)info.mapperInfo.format, info.mapperInfo.usage, info.stride)));
return C2_OK;
}
c2_status_t C2AllocatorGralloc::Impl::priorGraphicAllocation(
const C2Handle *handle,
std::shared_ptr<C2GraphicAllocation> *allocation) {
- IMapper::BufferDescriptorInfo info;
- info.layerCount = 1u;
+ BufferDescriptorInfo info;
+ info.mapperInfo.layerCount = 1u;
const C2HandleGralloc *grallocHandle = C2HandleGralloc::Import(
handle,
- &info.width, &info.height, (uint32_t *)&info.format, (uint64_t *)&info.usage);
+ &info.mapperInfo.width, &info.mapperInfo.height,
+ (uint32_t *)&info.mapperInfo.format, (uint64_t *)&info.mapperInfo.usage, &info.stride);
if (grallocHandle == nullptr) {
return C2_BAD_VALUE;
}
@@ -461,7 +537,7 @@
hidl_handle hidlHandle = C2HandleGralloc::UnwrapNativeHandle(grallocHandle);
allocation->reset(new C2AllocationGralloc(info, mMapper, hidlHandle, grallocHandle));
- return C2_OMITTED;
+ return C2_OK;
}
C2AllocatorGralloc::C2AllocatorGralloc() : mImpl(new Impl) {}
diff --git a/media/libstagefright/codec2/vndk/C2Store.cpp b/media/libstagefright/codec2/vndk/C2Store.cpp
index 555e77b..496cbe1 100644
--- a/media/libstagefright/codec2/vndk/C2Store.cpp
+++ b/media/libstagefright/codec2/vndk/C2Store.cpp
@@ -404,6 +404,7 @@
C2PlatformComponentStore::C2PlatformComponentStore() {
// TODO: move this also into a .so so it can be updated
mComponents.emplace("c2.google.avc.decoder", "libstagefright_soft_c2avcdec.so");
+ mComponents.emplace("c2.google.avc.encoder", "libstagefright_soft_c2avcenc.so");
mComponents.emplace("c2.google.aac.decoder", "libstagefright_soft_c2aacdec.so");
mComponents.emplace("c2.google.aac.encoder", "libstagefright_soft_c2aacenc.so");
}
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
index 5311747..56fa317 100644
--- a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
@@ -1,4 +1,3 @@
-
/*
* Copyright (C) 2016 The Android Open Source Project
*
@@ -32,7 +31,17 @@
*
* @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
*/
-native_handle_t*UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle);
+native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle);
+
+/**
+ * Wrap the gralloc handle and metadata into Codec2 handle recognized by
+ * C2AllocatorGralloc.
+ *
+ * @return a new NON-OWNING C2Handle that must be deleted using native_handle_delete.
+ */
+C2Handle *WrapNativeCodec2GrallocHandle(
+ const native_handle_t *const handle,
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride);
class C2AllocatorGralloc : public C2Allocator {
public:
diff --git a/media/libstagefright/codecs/aacdec/C2SoftAac.cpp b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
index 5ddfc14..b57c2aa 100644
--- a/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
+++ b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
@@ -375,7 +375,6 @@
work->worklets.front()->output.ordinal = work->input.ordinal;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(nullptr);
return;
}
@@ -602,7 +601,6 @@
auto fillEmptyWork = [](const std::unique_ptr<C2Work> &work) {
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.emplace_back(nullptr);
work->worklets.front()->output.ordinal = work->input.ordinal;
work->workletsProcessed = 1u;
};
diff --git a/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp b/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp
index 7bce21d..6f1b325 100644
--- a/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp
+++ b/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp
@@ -344,8 +344,6 @@
if (nOutputBytes) {
work->worklets.front()->output.buffers.push_back(
createLinearBuffer(block, 0, nOutputBytes));
- } else {
- work->worklets.front()->output.buffers.emplace_back(nullptr);
}
#if 0
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
index cc12d3c..4f64f6e 100644
--- a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
@@ -211,7 +211,6 @@
}
work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.emplace_back(nullptr);
work->worklets.front()->output.ordinal = work->input.ordinal;
work->workletsProcessed = 1u;
}
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
index cefe77c..67a2fdc 100644
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ b/media/libstagefright/codecs/avcenc/Android.bp
@@ -1,4 +1,48 @@
cc_library_shared {
+ name: "libstagefright_soft_c2avcenc",
+// vendor_available: true,
+// vndk: {
+// enabled: true,
+// },
+
+ static_libs: [ "libavcenc" ],
+ srcs: ["C2SoftAvcEnc.cpp"],
+
+ include_dirs: [
+ "external/libavc/encoder",
+ "external/libavc/common",
+ "frameworks/av/media/libstagefright/include",
+ "frameworks/native/include/media/hardware",
+ ],
+
+ shared_libs: [
+ "libstagefright_codec2",
+ "libstagefright_codec2_vndk",
+ "libstagefright_foundation",
+ "libstagefright_simple_c2component",
+ "libutils",
+ "liblog",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wno-unused-variable",
+ ],
+ ldflags: ["-Wl,-Bsymbolic"],
+}
+
+cc_library_shared {
name: "libstagefright_soft_avcenc",
vendor_available: true,
vndk: {
diff --git a/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.cpp b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.cpp
new file mode 100644
index 0000000..7c281e3
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.cpp
@@ -0,0 +1,1239 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcEncEnc"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "ih264_typedefs.h"
+#include "ih264e.h"
+#include "ih264e_error.h"
+#include "iv2.h"
+#include "ive2.h"
+#include "C2SoftAvcEnc.h"
+
+namespace android {
+
+#define ive_api_function ih264e_api_function
+
+namespace {
+
+// From external/libavc/encoder/ih264e_bitstream.h
+constexpr uint32_t MIN_STREAM_SIZE = 0x800;
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+void ConvertRGBToPlanarYUV(
+ uint8_t *dstY, size_t dstStride, size_t dstVStride,
+ const C2GraphicView &src) {
+ CHECK((src.width() & 1) == 0);
+ CHECK((src.height() & 1) == 0);
+
+ uint8_t *dstU = dstY + dstStride * dstVStride;
+ uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1);
+
+ const C2PlanarLayout &layout = src.layout();
+ const uint8_t *pRed = src.data()[C2PlanarLayout::PLANE_R];
+ const uint8_t *pGreen = src.data()[C2PlanarLayout::PLANE_G];
+ const uint8_t *pBlue = src.data()[C2PlanarLayout::PLANE_B];
+
+ for (size_t y = 0; y < src.height(); ++y) {
+ for (size_t x = 0; x < src.width(); ++x) {
+ unsigned red = *pRed;
+ unsigned green = *pGreen;
+ unsigned blue = *pBlue;
+
+ // using ITU-R BT.601 conversion matrix
+ unsigned luma =
+ ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
+
+ dstY[x] = luma;
+
+ if ((x & 1) == 0 && (y & 1) == 0) {
+ unsigned U =
+ ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
+
+ unsigned V =
+ ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
+
+ dstU[x >> 1] = U;
+ dstV[x >> 1] = V;
+ }
+ pRed += layout.planes[C2PlanarLayout::PLANE_R].colInc;
+ pGreen += layout.planes[C2PlanarLayout::PLANE_G].colInc;
+ pBlue += layout.planes[C2PlanarLayout::PLANE_B].colInc;
+ }
+
+ if ((y & 1) == 0) {
+ dstU += dstStride >> 1;
+ dstV += dstStride >> 1;
+ }
+
+ pRed -= layout.planes[C2PlanarLayout::PLANE_R].colInc * src.width();
+ pGreen -= layout.planes[C2PlanarLayout::PLANE_G].colInc * src.width();
+ pBlue -= layout.planes[C2PlanarLayout::PLANE_B].colInc * src.width();
+ pRed += layout.planes[C2PlanarLayout::PLANE_R].rowInc;
+ pGreen += layout.planes[C2PlanarLayout::PLANE_G].rowInc;
+ pBlue += layout.planes[C2PlanarLayout::PLANE_B].rowInc;
+
+ dstY += dstStride;
+ }
+}
+
+} // namespace
+
+C2SoftAvcEnc::C2SoftAvcEnc(const char *name, c2_node_id_t id)
+ : SimpleC2Component(
+ SimpleC2Interface::Builder(name, id)
+ .inputFormat(C2FormatVideo)
+ .outputFormat(C2FormatCompressed)
+ .build()),
+ mUpdateFlag(0),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mAVCEncProfile(IV_PROFILE_BASE),
+ mAVCEncLevel(41),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSawOutputEOS(false),
+ mSignalledError(false),
+ mCodecCtx(NULL),
+ mWidth(1080),
+ mHeight(1920),
+ mFramerate(60),
+ mBitrate(20000),
+ // TODO: output buffer size
+ mOutBufferSize(524288) {
+
+ // If dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ initEncParams();
+}
+
+C2SoftAvcEnc::~C2SoftAvcEnc() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onInit() {
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::onStop() {
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::onReset() {
+ // TODO: use IVE_CMD_CTL_RESET?
+ releaseEncoder();
+ initEncParams();
+}
+
+void C2SoftAvcEnc::onRelease() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onFlush_sm() {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::initEncParams() {
+ mCodecCtx = NULL;
+ mMemRecords = NULL;
+ mNumMemRecords = DEFAULT_MEM_REC_CNT;
+ mHeaderGenerated = 0;
+ mNumCores = GetCPUCoreCount();
+ mArch = DEFAULT_ARCH;
+ mSliceMode = DEFAULT_SLICE_MODE;
+ mSliceParam = DEFAULT_SLICE_PARAM;
+ mHalfPelEnable = DEFAULT_HPEL;
+ mIInterval = DEFAULT_I_INTERVAL;
+ mIDRInterval = DEFAULT_IDR_INTERVAL;
+ mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
+ mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
+ mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
+ mEncSpeed = DEFAULT_ENC_SPEED;
+ mIntra4x4 = DEFAULT_INTRA4x4;
+ mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
+ mAIRMode = DEFAULT_AIR;
+ mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD;
+ mPSNREnable = DEFAULT_PSNR_ENABLE;
+ mReconEnable = DEFAULT_RECON_ENABLE;
+ mEntropyMode = DEFAULT_ENTROPY_MODE;
+ mBframes = DEFAULT_B_FRAMES;
+
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+}
+
+c2_status_t C2SoftAvcEnc::setDimensions() {
+ ive_ctl_set_dimensions_ip_t s_dimensions_ip;
+ ive_ctl_set_dimensions_op_t s_dimensions_op;
+ IV_STATUS_T status;
+
+ s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
+ s_dimensions_ip.u4_ht = mHeight;
+ s_dimensions_ip.u4_wd = mWidth;
+
+ s_dimensions_ip.u4_timestamp_high = -1;
+ s_dimensions_ip.u4_timestamp_low = -1;
+
+ s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
+ s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame dimensions = 0x%x\n",
+ s_dimensions_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setNumCores() {
+ IV_STATUS_T status;
+ ive_ctl_set_num_cores_ip_t s_num_cores_ip;
+ ive_ctl_set_num_cores_op_t s_num_cores_op;
+ s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
+ s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
+ s_num_cores_ip.u4_timestamp_high = -1;
+ s_num_cores_ip.u4_timestamp_low = -1;
+ s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
+
+ s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
+
+ status = ive_api_function(
+ mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set processor params = 0x%x\n",
+ s_num_cores_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameRate() {
+ ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
+ ive_ctl_set_frame_rate_op_t s_frame_rate_op;
+ IV_STATUS_T status;
+
+ s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
+
+ s_frame_rate_ip.u4_src_frame_rate = mFramerate;
+ s_frame_rate_ip.u4_tgt_frame_rate = mFramerate;
+
+ s_frame_rate_ip.u4_timestamp_high = -1;
+ s_frame_rate_ip.u4_timestamp_low = -1;
+
+ s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
+ s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame rate = 0x%x\n",
+ s_frame_rate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setIpeParams() {
+ ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
+ ive_ctl_set_ipe_params_op_t s_ipe_params_op;
+ IV_STATUS_T status;
+
+ s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
+
+ s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
+ s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
+ s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
+
+ s_ipe_params_ip.u4_timestamp_high = -1;
+ s_ipe_params_ip.u4_timestamp_low = -1;
+
+ s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
+ s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set ipe params = 0x%x\n",
+ s_ipe_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setBitRate() {
+ ive_ctl_set_bitrate_ip_t s_bitrate_ip;
+ ive_ctl_set_bitrate_op_t s_bitrate_op;
+ IV_STATUS_T status;
+
+ s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
+
+ s_bitrate_ip.u4_target_bitrate = mBitrate;
+
+ s_bitrate_ip.u4_timestamp_high = -1;
+ s_bitrate_ip.u4_timestamp_low = -1;
+
+ s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
+ s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
+ ive_ctl_set_frame_type_ip_t s_frame_type_ip;
+ ive_ctl_set_frame_type_op_t s_frame_type_op;
+ IV_STATUS_T status;
+ s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
+
+ s_frame_type_ip.e_frame_type = e_frame_type;
+
+ s_frame_type_ip.u4_timestamp_high = -1;
+ s_frame_type_ip.u4_timestamp_low = -1;
+
+ s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
+ s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame type = 0x%x\n",
+ s_frame_type_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setQp() {
+ ive_ctl_set_qp_ip_t s_qp_ip;
+ ive_ctl_set_qp_op_t s_qp_op;
+ IV_STATUS_T status;
+
+ s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
+
+ s_qp_ip.u4_i_qp = DEFAULT_I_QP;
+ s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_p_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_b_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_timestamp_high = -1;
+ s_qp_ip.u4_timestamp_low = -1;
+
+ s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
+ s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
+ IV_STATUS_T status;
+ ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
+ ive_ctl_set_enc_mode_op_t s_enc_mode_op;
+
+ s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
+
+ s_enc_mode_ip.e_enc_mode = e_enc_mode;
+
+ s_enc_mode_ip.u4_timestamp_high = -1;
+ s_enc_mode_ip.u4_timestamp_low = -1;
+
+ s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
+ s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set in header encode mode = 0x%x\n",
+ s_enc_mode_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setVbvParams() {
+ ive_ctl_set_vbv_params_ip_t s_vbv_ip;
+ ive_ctl_set_vbv_params_op_t s_vbv_op;
+ IV_STATUS_T status;
+
+ s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
+
+ s_vbv_ip.u4_vbv_buf_size = 0;
+ s_vbv_ip.u4_vbv_buffer_delay = 1000;
+
+ s_vbv_ip.u4_timestamp_high = -1;
+ s_vbv_ip.u4_timestamp_low = -1;
+
+ s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
+ s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set VBV params = 0x%x\n", s_vbv_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setAirParams() {
+ ive_ctl_set_air_params_ip_t s_air_ip;
+ ive_ctl_set_air_params_op_t s_air_op;
+ IV_STATUS_T status;
+
+ s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
+
+ s_air_ip.e_air_mode = mAIRMode;
+ s_air_ip.u4_air_refresh_period = mAIRRefreshPeriod;
+
+ s_air_ip.u4_timestamp_high = -1;
+ s_air_ip.u4_timestamp_low = -1;
+
+ s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
+ s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setMeParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_me_params_ip_t s_me_params_ip;
+ ive_ctl_set_me_params_op_t s_me_params_op;
+
+ s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
+
+ s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
+ s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
+
+ s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
+ s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
+ s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
+ s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
+ s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
+
+ s_me_params_ip.u4_timestamp_high = -1;
+ s_me_params_ip.u4_timestamp_low = -1;
+
+ s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
+ s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setGopParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_gop_params_ip_t s_gop_params_ip;
+ ive_ctl_set_gop_params_op_t s_gop_params_op;
+
+ s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
+
+ s_gop_params_ip.u4_i_frm_interval = mIInterval;
+ s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
+
+ s_gop_params_ip.u4_timestamp_high = -1;
+ s_gop_params_ip.u4_timestamp_low = -1;
+
+ s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
+ s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set GOP params = 0x%x\n",
+ s_gop_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setProfileParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_profile_params_ip_t s_profile_params_ip;
+ ive_ctl_set_profile_params_op_t s_profile_params_op;
+
+ s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
+
+ s_profile_params_ip.e_profile = DEFAULT_EPROFILE;
+ s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;
+ s_profile_params_ip.u4_timestamp_high = -1;
+ s_profile_params_ip.u4_timestamp_low = -1;
+
+ s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
+ s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set profile params = 0x%x\n",
+ s_profile_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setDeblockParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
+ ive_ctl_set_deblock_params_op_t s_deblock_params_op;
+
+ s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
+
+ s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
+
+ s_deblock_params_ip.u4_timestamp_high = -1;
+ s_deblock_params_ip.u4_timestamp_low = -1;
+
+ s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
+ s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to enable/disable deblock params = 0x%x\n",
+ s_deblock_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::logVersion() {
+ ive_ctl_getversioninfo_ip_t s_ctl_ip;
+ ive_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
+ s_ctl_ip.pu1_version = au1_buf;
+ s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
+
+ status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
+ }
+ return;
+}
+
+c2_status_t C2SoftAvcEnc::initEncoder() {
+ IV_STATUS_T status;
+ WORD32 level;
+ uint32_t displaySizeY;
+
+ CHECK(!mStarted);
+
+ c2_status_t errType = C2_OK;
+
+ displaySizeY = mWidth * mHeight;
+ if (displaySizeY > (1920 * 1088)) {
+ level = 50;
+ } else if (displaySizeY > (1280 * 720)) {
+ level = 40;
+ } else if (displaySizeY > (720 * 576)) {
+ level = 31;
+ } else if (displaySizeY > (624 * 320)) {
+ level = 30;
+ } else if (displaySizeY > (352 * 288)) {
+ level = 21;
+ } else if (displaySizeY > (176 * 144)) {
+ level = 20;
+ } else {
+ level = 10;
+ }
+ mAVCEncLevel = MAX(level, mAVCEncLevel);
+
+ mStride = mWidth;
+
+ // TODO
+ mIvVideoColorFormat = IV_YUV_420P;
+
+ ALOGD("Params width %d height %d level %d colorFormat %d", mWidth,
+ mHeight, mAVCEncLevel, mIvVideoColorFormat);
+
+ /* Getting Number of MemRecords */
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
+ s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
+
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ status = ive_api_function(0, &s_num_mem_rec_ip, &s_num_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Get number of memory records failed = 0x%x\n",
+ s_num_mem_rec_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ /* Allocate array to hold memory records */
+ if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
+ ALOGE("requested memory size is too big.");
+ return C2_CORRUPTED;
+ }
+ mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (NULL == mMemRecords) {
+ ALOGE("Unable to allocate memory for hold memory records: Size %zu",
+ mNumMemRecords * sizeof(iv_mem_rec_t));
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return C2_CORRUPTED;
+ }
+
+ {
+ iv_mem_rec_t *ps_mem_rec;
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
+ ps_mem_rec->pv_base = NULL;
+ ps_mem_rec->u4_mem_size = 0;
+ ps_mem_rec->u4_mem_alignment = 0;
+ ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Getting MemRecords Attributes */
+ {
+ iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
+ iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+
+ s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
+ s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+
+ s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
+ s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
+ s_fill_mem_rec_ip.u4_max_wd = mWidth;
+ s_fill_mem_rec_ip.u4_max_ht = mHeight;
+ s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
+ s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
+ s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+
+ status = ive_api_function(0, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Fill memory records failed = 0x%x\n",
+ s_fill_mem_rec_op.u4_error_code);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Allocating Memory for Mem Records */
+ {
+ WORD32 total_size;
+ iv_mem_rec_t *ps_mem_rec;
+ total_size = 0;
+ ps_mem_rec = mMemRecords;
+
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ive_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for mem record id %zu size %u\n", i,
+ ps_mem_rec->u4_mem_size);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return C2_CORRUPTED;
+
+ }
+ total_size += ps_mem_rec->u4_mem_size;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Codec Instance Creation */
+ {
+ ive_init_ip_t s_init_ip;
+ ive_init_op_t s_init_op;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+ mCodecCtx->pv_fxns = (void *)ive_api_function;
+
+ s_init_ip.u4_size = sizeof(ive_init_ip_t);
+ s_init_op.u4_size = sizeof(ive_init_op_t);
+
+ s_init_ip.e_cmd = IV_CMD_INIT;
+ s_init_ip.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.ps_mem_rec = mMemRecords;
+ s_init_ip.u4_max_wd = mWidth;
+ s_init_ip.u4_max_ht = mHeight;
+ s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_init_ip.u4_max_level = mAVCEncLevel;
+ s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+
+ if (mReconEnable || mPSNREnable) {
+ s_init_ip.u4_enable_recon = 1;
+ } else {
+ s_init_ip.u4_enable_recon = 0;
+ }
+ s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+ s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
+ s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+ s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
+ s_init_ip.u4_num_bframes = mBframes;
+ s_init_ip.e_content_type = IV_PROGRESSIVE;
+ s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ s_init_ip.e_slice_mode = mSliceMode;
+ s_init_ip.u4_slice_param = mSliceParam;
+ s_init_ip.e_arch = mArch;
+ s_init_ip.e_soc = DEFAULT_SOC;
+
+ status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0 /* arg2 */, NULL /* data */);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Get Codec Version */
+ logVersion();
+
+ /* set processor details */
+ setNumCores();
+
+ /* Video control Set Frame dimensions */
+ setDimensions();
+
+ /* Video control Set Frame rates */
+ setFrameRate();
+
+ /* Video control Set IPE Params */
+ setIpeParams();
+
+ /* Video control Set Bitrate */
+ setBitRate();
+
+ /* Video control Set QP */
+ setQp();
+
+ /* Video control Set AIR params */
+ setAirParams();
+
+ /* Video control Set VBV params */
+ setVbvParams();
+
+ /* Video control Set Motion estimation params */
+ setMeParams();
+
+ /* Video control Set GOP params */
+ setGopParams();
+
+ /* Video control Set Deblock params */
+ setDeblockParams();
+
+ /* Video control Set Profile params */
+ setProfileParams();
+
+ /* Video control Set in Encode header mode */
+ setEncMode(IVE_ENC_MODE_HEADER);
+
+ ALOGV("init_codec successfull");
+
+ mSpsPpsHeaderReceived = false;
+ mStarted = true;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::releaseEncoder() {
+ IV_STATUS_T status = IV_SUCCESS;
+ iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+ iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ if (!mStarted) {
+ return C2_OK;
+ }
+
+ s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+ s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+ s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+ s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
+
+ status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to retrieve memory records = 0x%x\n",
+ s_retrieve_mem_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ /* Free memory records */
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
+ ive_aligned_free(ps_mem_rec->pv_base);
+ ps_mem_rec++;
+ }
+
+ free(mMemRecords);
+
+ // clear other pointers into the space being free()d
+ mCodecCtx = NULL;
+
+ mStarted = false;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp) {
+ iv_raw_buf_t *ps_inp_raw_buf;
+
+ ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
+ ps_encode_ip->s_out_buf.pv_buf = base;
+ ps_encode_ip->s_out_buf.u4_bytes = 0;
+ ps_encode_ip->s_out_buf.u4_bufsize = capacity;
+ ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
+ ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
+
+ ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
+ ps_encode_ip->pv_bufs = NULL;
+ ps_encode_ip->pv_mb_info = NULL;
+ ps_encode_ip->pv_pic_info = NULL;
+ ps_encode_ip->u4_mb_info_type = 0;
+ ps_encode_ip->u4_pic_info_type = 0;
+ ps_encode_ip->u4_is_last = 0;
+ ps_encode_ip->u4_timestamp_high = timestamp >> 32;
+ ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF;
+ ps_encode_op->s_out_buf.pv_buf = NULL;
+
+ /* Initialize color formats */
+ memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t));
+ ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
+ ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
+ if (input == nullptr) {
+ if (mSawInputEOS){
+ ps_encode_ip->u4_is_last = 1;
+ }
+ return C2_OK;
+ }
+
+ ALOGV("width = %d, height = %d", input->width(), input->height());
+ const C2PlanarLayout &layout = input->layout();
+ uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *vPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ // fall-through
+ case C2PlanarLayout::TYPE_RGBA: {
+ size_t yPlaneSize = input->width() * input->height();
+ std::unique_ptr<uint8_t[]> freeBuffer;
+ if (mFreeConversionBuffers.empty()) {
+ freeBuffer.reset(new uint8_t[yPlaneSize * 3 / 2]);
+ } else {
+ freeBuffer.swap(mFreeConversionBuffers.front());
+ mFreeConversionBuffers.pop_front();
+ }
+ yPlane = freeBuffer.get();
+ mConversionBuffersInUse.push_back(std::move(freeBuffer));
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = input->width();
+ uStride = vStride = input->width() / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, input->height(), *input);
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV:
+ // fall-through
+ case C2PlanarLayout::TYPE_YUVA:
+ // Do nothing
+ break;
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ {
+ // input buffer is supposed to be const but Ittiam API wants bare pointer.
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+ ps_inp_raw_buf->apv_bufs[2] = vPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
+ ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ ps_inp_raw_buf->au4_strd[2] = vStride;
+ break;
+ }
+
+ case IV_YUV_422ILE:
+ {
+ // TODO
+ // ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ // ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
+ // ps_inp_raw_buf->au4_ht[0] = mHeight;
+ // ps_inp_raw_buf->au4_strd[0] = mStride * 2;
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width();
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ break;
+ }
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ work->workletsProcessed = 0u;
+
+ IV_STATUS_T status;
+ WORD32 timeDelay, timeTaken;
+ uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+
+ // Initialize encoder if not already initialized
+ if (mCodecCtx == NULL) {
+ if (C2_OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ // TODO: notify(error, C2_CORRUPTED, 0 /* arg2 */, NULL /* data */);
+ return;
+ }
+ }
+ if (mSignalledError) {
+ return;
+ }
+
+ // while (!mSawOutputEOS && !outQueue.empty()) {
+ c2_status_t error;
+ ive_video_encode_ip_t s_encode_ip;
+ ive_video_encode_op_t s_encode_op;
+
+ if (!mSpsPpsHeaderReceived) {
+ constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
+ uint8_t header[kHeaderLength];
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, NULL, header, kHeaderLength, timestamp);
+ if (error != C2_OK) {
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return;
+ }
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Encode header failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ return;
+ } else {
+ ALOGV("Bytes Generated in header %d\n",
+ s_encode_op.s_out_buf.u4_bytes);
+ }
+
+ mSpsPpsHeaderReceived = true;
+
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::alloc_unique(s_encode_op.s_out_buf.u4_bytes, 0u);
+ memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+
+ DUMP_TO_FILE(
+ mOutFile, csd->m.value, csd->flexCount());
+ }
+
+ if (mUpdateFlag) {
+ if (mUpdateFlag & kUpdateBitrate) {
+ setBitRate();
+ }
+ if (mUpdateFlag & kRequestKeyFrame) {
+ setFrameType(IV_IDR_FRAME);
+ }
+ if (mUpdateFlag & kUpdateAIRMode) {
+ setAirParams();
+ // notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+ // OMX_IndexConfigAndroidIntraRefresh, NULL);
+ }
+ mUpdateFlag = 0;
+ }
+
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ mSawInputEOS = true;
+ }
+
+ /* In normal mode, store inputBufferInfo and this will be returned
+ when encoder consumes this input */
+ // if (!mInputDataIsMeta && (inputBufferInfo != NULL)) {
+ // for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
+ // if (NULL == mInputBufferInfo[i]) {
+ // mInputBufferInfo[i] = inputBufferInfo;
+ // break;
+ // }
+ // }
+ // }
+ const C2GraphicView view =
+ work->input.buffers[0]->data().graphicBlocks().front().map().get();
+ if (view.error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view.error());
+ return;
+ }
+
+ std::shared_ptr<C2LinearBlock> block;
+
+ do {
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetch linear block err = %d", err);
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error() != C2_OK) {
+ ALOGE("write view map err = %d", wView.error());
+ return;
+ }
+
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, &view, wView.base(), wView.capacity(), timestamp);
+ if (error != C2_OK) {
+ mSignalledError = true;
+ ALOGE("setEncodeArgs failed : %d", error);
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return;
+ }
+
+ // DUMP_TO_FILE(
+ // mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
+ // (mHeight * mStride * 3 / 2));
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+ // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
+ mOutBufferSize *= 2;
+ continue;
+ }
+ ALOGE("Encode Frame failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return;
+ }
+ } while (IV_SUCCESS != status);
+
+ // Hold input buffer reference
+ mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = work->input.buffers[0];
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_encode_op.s_out_buf.u4_bytes);
+
+ void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ /* If encoder frees up an input buffer, mark it as free */
+ if (freed != NULL) {
+ if (mBuffers.count(freed) == 0u) {
+ // TODO: error
+ return;
+ }
+ // Release input buffer reference
+ mBuffers.erase(freed);
+
+ auto it = std::find_if(
+ mConversionBuffersInUse.begin(), mConversionBuffersInUse.end(),
+ [freed](const auto &elem) { return elem.get() == freed; });
+ if (it != mConversionBuffersInUse.end()) {
+ mFreeConversionBuffers.push_back(std::move(*it));
+ mConversionBuffersInUse.erase(it);
+ }
+ }
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp =
+ ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low;
+ work->worklets.front()->output.buffers.clear();
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes);
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->workletsProcessed = 1u;
+
+ if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+
+ if (s_encode_op.u4_is_last) {
+ // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+ mSawOutputEOS = true;
+ } else {
+ // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ }
+}
+
+c2_status_t C2SoftAvcEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ (void)drainMode;
+ (void)pool;
+ return C2_OK;
+}
+
+
+class C2SoftAvcEncFactory : public C2ComponentFactory {
+public:
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(::android::C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(new C2SoftAvcEnc("avcenc", id), deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(::android::C2ComponentInterface*)> deleter) override {
+ *interface =
+ SimpleC2Interface::Builder("avcenc", id, deleter)
+ .inputFormat(C2FormatVideo)
+ .outputFormat(C2FormatCompressed)
+ .build();
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAvcEncFactory() override = default;
+};
+
+} // namespace android
+
+extern "C" ::android::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAvcEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.h b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.h
new file mode 100644
index 0000000..5b2a514
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.h
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_AVC_ENC_H__
+#define C2_SOFT_AVC_ENC_H__
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Vector.h>
+
+#include <SimpleC2Component.h>
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+#define LEN_STATUS_BUFFER (10 * 1024)
+#define MAX_VBV_BUFF_SIZE (120 * 16384)
+#define MAX_NUM_IO_BUFS 3
+
+#define DEFAULT_MAX_REF_FRM 2
+#define DEFAULT_MAX_REORDER_FRM 0
+#define DEFAULT_QP_MIN 10
+#define DEFAULT_QP_MAX 40
+#define DEFAULT_MAX_BITRATE 20000000
+#define DEFAULT_MAX_SRCH_RANGE_X 256
+#define DEFAULT_MAX_SRCH_RANGE_Y 256
+#define DEFAULT_MAX_FRAMERATE 120000
+#define DEFAULT_NUM_CORES 1
+#define DEFAULT_NUM_CORES_PRE_ENC 0
+#define DEFAULT_FPS 30
+#define DEFAULT_ENC_SPEED IVE_NORMAL
+
+#define DEFAULT_MEM_REC_CNT 0
+#define DEFAULT_RECON_ENABLE 0
+#define DEFAULT_CHKSUM_ENABLE 0
+#define DEFAULT_START_FRM 0
+#define DEFAULT_NUM_FRMS 0xFFFFFFFF
+#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU
+#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P
+#define DEFAULT_LOOPBACK 0
+#define DEFAULT_SRC_FRAME_RATE 30
+#define DEFAULT_TGT_FRAME_RATE 30
+#define DEFAULT_MAX_WD 1920
+#define DEFAULT_MAX_HT 1920
+#define DEFAULT_MAX_LEVEL 41
+#define DEFAULT_STRIDE 0
+#define DEFAULT_WD 1280
+#define DEFAULT_HT 720
+#define DEFAULT_PSNR_ENABLE 0
+#define DEFAULT_ME_SPEED 100
+#define DEFAULT_ENABLE_FAST_SAD 0
+#define DEFAULT_ENABLE_ALT_REF 0
+#define DEFAULT_RC_MODE IVE_RC_STORAGE
+#define DEFAULT_BITRATE 6000000
+#define DEFAULT_I_QP 22
+#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_P_QP 28
+#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_B_QP 22
+#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_AIR IVE_AIR_MODE_NONE
+#define DEFAULT_AIR_REFRESH_PERIOD 30
+#define DEFAULT_SRCH_RNG_X 64
+#define DEFAULT_SRCH_RNG_Y 48
+#define DEFAULT_I_INTERVAL 30
+#define DEFAULT_IDR_INTERVAL 1000
+#define DEFAULT_B_FRAMES 0
+#define DEFAULT_DISABLE_DEBLK_LEVEL 0
+#define DEFAULT_HPEL 1
+#define DEFAULT_QPEL 1
+#define DEFAULT_I4 1
+#define DEFAULT_EPROFILE IV_PROFILE_BASE
+#define DEFAULT_ENTROPY_MODE 0
+#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE
+#define DEFAULT_SLICE_PARAM 256
+#define DEFAULT_ARCH ARCH_ARM_A9Q
+#define DEFAULT_SOC SOC_GENERIC
+#define DEFAULT_INTRA4x4 0
+#define STRLENGTH 500
+#define DEFAULT_CONSTRAINED_INTRA 0
+
+#define MIN(a, b) ((a) < (b))? (a) : (b)
+#define MAX(a, b) ((a) > (b))? (a) : (b)
+#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
+#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ive_aligned_free(buf) free(buf)
+
+struct C2SoftAvcEnc : public SimpleC2Component {
+ C2SoftAvcEnc(const char *name, c2_node_id_t id);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+protected:
+ virtual ~C2SoftAvcEnc();
+
+private:
+ enum {
+ kUpdateBitrate = 1 << 0,
+ kRequestKeyFrame = 1 << 1,
+ kUpdateAIRMode = 1 << 2,
+ };
+
+ // OMX input buffer's timestamp and flags
+ typedef struct {
+ int64_t mTimeUs;
+ int32_t mFlags;
+ } InputBufferInfo;
+
+ int32_t mStride;
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ int mUpdateFlag;
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+
+ IV_PROFILE_T mAVCEncProfile;
+ WORD32 mAVCEncLevel;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+
+ bool mSawInputEOS;
+ bool mSawOutputEOS;
+ bool mSignalledError;
+ bool mIntra4x4;
+ bool mEnableFastSad;
+ bool mEnableAltRef;
+ bool mReconEnable;
+ bool mPSNREnable;
+ bool mEntropyMode;
+ bool mConstrainedIntraFlag;
+ IVE_SPEED_CONFIG mEncSpeed;
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by codec
+ size_t mNumCores; // Number of cores used by the codec
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mFramerate;
+ uint32_t mBitrate;
+ uint32_t mOutBufferSize;
+ UWORD32 mHeaderGenerated;
+ UWORD32 mBframes;
+ IV_ARCH_T mArch;
+ IVE_SLICE_MODE_T mSliceMode;
+ UWORD32 mSliceParam;
+ bool mHalfPelEnable;
+ UWORD32 mIInterval;
+ UWORD32 mIDRInterval;
+ UWORD32 mDisableDeblkLevel;
+ IVE_AIR_MODE_T mAIRMode;
+ UWORD32 mAIRRefreshPeriod;
+ std::map<const void *, std::shared_ptr<C2Buffer>> mBuffers;
+ std::list<std::unique_ptr<uint8_t[]>> mFreeConversionBuffers;
+ std::list<std::unique_ptr<uint8_t[]>> mConversionBuffersInUse;
+
+ void initEncParams();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+
+ c2_status_t setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type);
+ c2_status_t setQp();
+ c2_status_t setEncMode(IVE_ENC_MODE_T e_enc_mode);
+ c2_status_t setDimensions();
+ c2_status_t setNumCores();
+ c2_status_t setFrameRate();
+ c2_status_t setIpeParams();
+ c2_status_t setBitRate();
+ c2_status_t setAirParams();
+ c2_status_t setMeParams();
+ c2_status_t setGopParams();
+ c2_status_t setProfileParams();
+ c2_status_t setDeblockParams();
+ c2_status_t setVbvParams();
+ void logVersion();
+ c2_status_t setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp);
+
+ DISALLOW_EVIL_CONSTRUCTORS(C2SoftAvcEnc);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/avce_input"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output"
+#define OUTPUT_DUMP_EXT "h264"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ if (fp != NULL) \
+ fclose(fp); \
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // C2_SOFT_AVC_ENC_H__
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
index dc4125f..a434f81 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
@@ -22,8 +22,20 @@
namespace android {
-#define FOURCC(c1, c2, c3, c4) \
- ((c1) << 24 | (c2) << 16 | (c3) << 8 | (c4))
+constexpr int FOURCC(unsigned char c1, unsigned char c2, unsigned char c3, unsigned char c4) {
+ return ((c1) << 24 | (c2) << 16 | (c3) << 8 | (c4));
+}
+
+template <size_t N>
+constexpr int32_t FOURCC(const char (&s) [N]) {
+ static_assert(N == 5, "fourcc: wrong length");
+ return
+ (unsigned char) s[0] << 24 |
+ (unsigned char) s[1] << 16 |
+ (unsigned char) s[2] << 8 |
+ (unsigned char) s[3] << 0;
+}
+
uint16_t U16_AT(const uint8_t *ptr);
uint32_t U32_AT(const uint8_t *ptr);
diff --git a/media/libstagefright/include/CCodecBufferChannel.h b/media/libstagefright/include/CCodecBufferChannel.h
index c5062d6..57a1374 100644
--- a/media/libstagefright/include/CCodecBufferChannel.h
+++ b/media/libstagefright/include/CCodecBufferChannel.h
@@ -27,6 +27,7 @@
#include <C2Component.h>
#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/gbs/GraphicBufferSource.h>
#include <media/stagefright/CodecBase.h>
#include <media/ICrypto.h>
@@ -35,134 +36,9 @@
/**
* BufferChannelBase implementation for CCodec.
*/
-class CCodecBufferChannel : public BufferChannelBase {
+class CCodecBufferChannel
+ : public BufferChannelBase, public std::enable_shared_from_this<CCodecBufferChannel> {
public:
- /**
- * Base class for representation of buffers at one port.
- */
- class Buffers {
- public:
- Buffers() = default;
- virtual ~Buffers() = default;
-
- /**
- * Set format for MediaCodec-facing buffers.
- */
- inline void setFormat(const sp<AMessage> &format) { mFormat = format; }
-
- /**
- * Returns true if the buffers are operating under array mode.
- */
- virtual bool isArrayMode() { return false; }
-
- /**
- * Fills the vector with MediaCodecBuffer's if in array mode; otherwise,
- * no-op.
- */
- virtual void getArray(Vector<sp<MediaCodecBuffer>> *) {}
-
- protected:
- // Format to be used for creating MediaCodec-facing buffers.
- sp<AMessage> mFormat;
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(Buffers);
- };
-
- class InputBuffers : public Buffers {
- public:
- using Buffers::Buffers;
- virtual ~InputBuffers() = default;
-
- /**
- * Set a block pool to obtain input memory blocks.
- */
- inline void setPool(const std::shared_ptr<C2BlockPool> &pool) { mPool = pool; }
-
- /**
- * Get a new MediaCodecBuffer for input and its corresponding index.
- * Returns false if no new buffer can be obtained at the moment.
- */
- virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
-
- /**
- * Release the buffer obtained from requestNewBuffer() and get the
- * associated C2Buffer object back. Returns empty shared_ptr if the
- * buffer is not on file.
- */
- virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
-
- /**
- * Flush internal state. After this call, no index or buffer previously
- * returned from requestNewBuffer() is valid.
- */
- virtual void flush() = 0;
-
- /**
- * Return array-backed version of input buffers. The returned object
- * shall retain the internal state so that it will honor index and
- * buffer from previous calls of requestNewBuffer().
- */
- virtual std::unique_ptr<InputBuffers> toArrayMode() = 0;
-
- protected:
- // Pool to obtain blocks for input buffers.
- std::shared_ptr<C2BlockPool> mPool;
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
- };
-
- class OutputBuffers : public Buffers {
- public:
- using Buffers::Buffers;
- virtual ~OutputBuffers() = default;
-
- /**
- * Register output C2Buffer from the component and obtain corresponding
- * index and MediaCodecBuffer object. Returns false if registration
- * fails.
- */
- virtual bool registerBuffer(
- const std::shared_ptr<C2Buffer> &buffer,
- size_t *index,
- sp<MediaCodecBuffer> *codecBuffer) = 0;
-
- /**
- * Register codec specific data as a buffer to be consistent with
- * MediaCodec behavior.
- */
- virtual bool registerCsd(
- const C2StreamCsdInfo::output * /* csd */,
- size_t * /* index */,
- sp<MediaCodecBuffer> * /* codecBuffer */) {
- return false;
- }
-
- /**
- * Release the buffer obtained from registerBuffer() and get the
- * associated C2Buffer object back. Returns empty shared_ptr if the
- * buffer is not on file.
- */
- virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
-
- /**
- * Flush internal state. After this call, no index or buffer previously
- * returned from registerBuffer() is valid.
- */
- virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) = 0;
-
- /**
- * Return array-backed version of output buffers. The returned object
- * shall retain the internal state so that it will honor index and
- * buffer from previous calls of registerBuffer().
- */
- virtual std::unique_ptr<OutputBuffers> toArrayMode() = 0;
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
- };
-
CCodecBufferChannel(const std::function<void(status_t, enum ActionCode)> &onError);
virtual ~CCodecBufferChannel();
@@ -186,23 +62,21 @@
// Methods below are interface for CCodec to use.
+ /**
+ * Set the component object for buffer processing.
+ */
void setComponent(const std::shared_ptr<C2Component> &component);
+
+ /**
+ * Set output graphic surface for rendering.
+ */
status_t setSurface(const sp<Surface> &surface);
/**
- * Set C2BlockPool for input buffers.
- *
- * TODO: start timestamp?
+ * Set GraphicBufferSource object from which the component extracts input
+ * buffers.
*/
- void setInputBufferAllocator(const sp<C2BlockPool> &inAlloc);
-
- /**
- * Set C2BlockPool for output buffers. This object shall never use the
- * allocator itself; it's just passed
- *
- * TODO: start timestamp?
- */
- void setOutputBufferAllocator(const sp<C2BlockPool> &outAlloc);
+ status_t setGraphicBufferSource(const sp<GraphicBufferSource> &source);
/**
* Start queueing buffers to the component. This object should never queue
@@ -219,11 +93,17 @@
void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork);
/**
- * Notify MediaCodec about work done.
+ * Notify input client about work done.
*
- * @param workItems finished work items.
+ * @param workItems finished work item.
*/
- void onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems);
+ void onWorkDone(const std::unique_ptr<C2Work> &work);
+
+ // Internal classes
+ class Buffers;
+ class InputBuffers;
+ class OutputBuffers;
+ class InputBufferClient;
private:
class QueueGuard;
@@ -276,12 +156,17 @@
bool mRunning;
};
+ class C2ComponentWrapper;
+
+ void feedInputBufferIfAvailable();
+
QueueSync mSync;
sp<MemoryDealer> mDealer;
sp<IMemory> mDecryptDestination;
int32_t mHeapSeqNum;
std::shared_ptr<C2Component> mComponent;
+ std::shared_ptr<InputBufferClient> mInputClient;
std::function<void(status_t, enum ActionCode)> mOnError;
std::shared_ptr<C2BlockPool> mInputAllocator;
QueueSync mQueueSync;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 3196b10..fa22003 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -495,7 +495,7 @@
status_t verifySupportForProfileAndLevel(int32_t profile, int32_t level);
status_t configureBitrate(
- int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode, int32_t bitrate, int32_t quality = 0);
void configureEncoderLatency(const sp<AMessage> &msg);
status_t setupErrorCorrectionParameters();
diff --git a/media/libstagefright/include/media/stagefright/CCodec.h b/media/libstagefright/include/media/stagefright/CCodec.h
index 3e24bbe..c689761 100644
--- a/media/libstagefright/include/media/stagefright/CCodec.h
+++ b/media/libstagefright/include/media/stagefright/CCodec.h
@@ -24,6 +24,7 @@
#include <android/native_window.h>
#include <media/hardware/MetadataBufferType.h>
#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/gbs/GraphicBufferSource.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/FrameRenderTracker.h>
#include <media/stagefright/MediaDefs.h>
@@ -58,6 +59,7 @@
virtual void signalRequestIDRFrame() override;
void initiateReleaseIfStuck();
+ void onWorkDone(std::vector<std::unique_ptr<C2Work>> &workItems);
protected:
virtual ~CCodec();
@@ -77,6 +79,10 @@
void flush();
void release(bool sendCallback);
+ void createInputSurface();
+ void setInputSurface(const sp<PersistentSurface> &surface);
+ status_t setupInputSurface(const sp<GraphicBufferSource> &source);
+
void setDeadline(const TimePoint &deadline);
enum {
@@ -86,6 +92,9 @@
kWhatFlush,
kWhatStop,
kWhatRelease,
+ kWhatCreateInputSurface,
+ kWhatSetInputSurface,
+ kWhatWorkDone,
};
enum {
@@ -104,14 +113,17 @@
struct State {
inline State() : mState(RELEASED) {}
+ inline int get() const { return mState; }
+ inline void set(int newState) { mState = newState; }
+ std::shared_ptr<C2Component> comp;
+ private:
int mState;
- std::shared_ptr<C2Component> mComp;
};
struct Formats {
- sp<AMessage> mInputFormat;
- sp<AMessage> mOutputFormat;
+ sp<AMessage> inputFormat;
+ sp<AMessage> outputFormat;
};
Mutexed<State> mState;
@@ -119,6 +131,7 @@
std::shared_ptr<C2Component::Listener> mListener;
Mutexed<TimePoint> mDeadline;
Mutexed<Formats> mFormats;
+ Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
DISALLOW_EVIL_CONSTRUCTORS(CCodec);
};