Merge "Fix typos in KM4 interface definition documentation"
diff --git a/audio/2.0/default/StreamOut.cpp b/audio/2.0/default/StreamOut.cpp
index 0bedc74..49a6b12 100644
--- a/audio/2.0/default/StreamOut.cpp
+++ b/audio/2.0/default/StreamOut.cpp
@@ -164,6 +164,9 @@
}
mCallback.clear();
mDevice->closeOutputStream(mStream);
+ // Closing the output stream in the HAL waits for the callback to finish,
+ // and joins the callback thread. Thus is it guaranteed that the callback
+ // thread will not be accessing our object anymore.
mStream = nullptr;
}
@@ -404,6 +407,8 @@
Return<Result> StreamOut::setCallback(const sp<IStreamOutCallback>& callback) {
if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
+ // Safe to pass 'this' because it is guaranteed that the callback thread
+ // is joined prior to exit from StreamOut's destructor.
int result = mStream->set_callback(mStream, StreamOut::asyncCallback, this);
if (result == 0) {
mCallback = callback;
@@ -420,19 +425,27 @@
// static
int StreamOut::asyncCallback(stream_callback_event_t event, void*,
void* cookie) {
- wp<StreamOut> weakSelf(reinterpret_cast<StreamOut*>(cookie));
- sp<StreamOut> self = weakSelf.promote();
- if (self == nullptr || self->mCallback == nullptr) return 0;
+ // It is guaranteed that the callback thread is joined prior
+ // to exiting from StreamOut's destructor. Must *not* use sp<StreamOut>
+ // here because it can make this code the last owner of StreamOut,
+ // and an attempt to run the destructor on the callback thread
+ // will cause a deadlock in the legacy HAL code.
+ StreamOut *self = reinterpret_cast<StreamOut*>(cookie);
+ // It's correct to hold an sp<> to callback because the reference
+ // in the StreamOut instance can be cleared in the meantime. There is
+ // no difference on which thread to run IStreamOutCallback's destructor.
+ sp<IStreamOutCallback> callback = self->mCallback;
+ if (callback.get() == nullptr) return 0;
ALOGV("asyncCallback() event %d", event);
switch (event) {
case STREAM_CBK_EVENT_WRITE_READY:
- self->mCallback->onWriteReady();
+ callback->onWriteReady();
break;
case STREAM_CBK_EVENT_DRAIN_READY:
- self->mCallback->onDrainReady();
+ callback->onDrainReady();
break;
case STREAM_CBK_EVENT_ERROR:
- self->mCallback->onError();
+ callback->onError();
break;
default:
ALOGW("asyncCallback() unknown event %d", event);
diff --git a/automotive/vehicle/2.0/types.hal b/automotive/vehicle/2.0/types.hal
index e07917e..8e1b164 100644
--- a/automotive/vehicle/2.0/types.hal
+++ b/automotive/vehicle/2.0/types.hal
@@ -1942,7 +1942,7 @@
0x0BC0
| VehiclePropertyGroup:SYSTEM
| VehiclePropertyType:INT32
- | VehicleArea:GLOBAL),
+ | VehicleArea:WINDOW),
/**
* Window Move
@@ -1959,7 +1959,7 @@
0x0BC1
| VehiclePropertyGroup:SYSTEM
| VehiclePropertyType:INT32
- | VehicleArea:GLOBAL),
+ | VehicleArea:WINDOW),
/**
* Window Vent Position
@@ -1976,7 +1976,7 @@
0x0BC2
| VehiclePropertyGroup:SYSTEM
| VehiclePropertyType:INT32
- | VehicleArea:GLOBAL),
+ | VehicleArea:WINDOW),
/**
* Window Vent Move
@@ -1993,7 +1993,7 @@
0x0BC3
| VehiclePropertyGroup:SYSTEM
| VehiclePropertyType:INT32
- | VehicleArea:GLOBAL),
+ | VehicleArea:WINDOW),
/**
* Window Lock
@@ -2007,7 +2007,7 @@
0x0BC4
| VehiclePropertyGroup:SYSTEM
| VehiclePropertyType:BOOLEAN
- | VehicleArea:GLOBAL),
+ | VehicleArea:WINDOW),
/**
diff --git a/broadcastradio/2.0/Android.bp b/broadcastradio/2.0/Android.bp
index afbd6d4..1d7861e 100644
--- a/broadcastradio/2.0/Android.bp
+++ b/broadcastradio/2.0/Android.bp
@@ -21,9 +21,11 @@
"IdentifierType",
"Metadata",
"MetadataKey",
+ "ProgramFilter",
"ProgramIdentifier",
"ProgramInfo",
"ProgramInfoFlags",
+ "ProgramListChunk",
"ProgramSelector",
"Properties",
"Result",
diff --git a/broadcastradio/2.0/ITunerCallback.hal b/broadcastradio/2.0/ITunerCallback.hal
index 1aefc4e..ede8350 100644
--- a/broadcastradio/2.0/ITunerCallback.hal
+++ b/broadcastradio/2.0/ITunerCallback.hal
@@ -39,6 +39,21 @@
oneway onCurrentProgramInfoChanged(ProgramInfo info);
/**
+ * A delta update of the program list, called whenever there's a change in
+ * the list.
+ *
+ * If there are frequent changes, HAL implementation must throttle the rate
+ * of the updates.
+ *
+ * There is a hard limit on binder transaction buffer, and the list must
+ * not exceed it. For large lists, HAL implementation must split them to
+ * multiple chunks, no larger than 500kiB each.
+ *
+ * @param chunk A chunk of the program list update.
+ */
+ oneway onProgramListUpdated(ProgramListChunk chunk);
+
+ /**
* Method called by the HAL when the antenna gets connected or disconnected.
*
* For a new tuner session, client must assume the antenna is connected.
diff --git a/broadcastradio/2.0/ITunerSession.hal b/broadcastradio/2.0/ITunerSession.hal
index 8a21768..a3f93fd 100644
--- a/broadcastradio/2.0/ITunerSession.hal
+++ b/broadcastradio/2.0/ITunerSession.hal
@@ -77,6 +77,32 @@
cancel();
/**
+ * Applies a filter to the program list and starts sending program list
+ * updates over onProgramListUpdated callback.
+ *
+ * There may be only one updates stream active at the moment. Calling this
+ * method again must result in cancelling the previous update request.
+ *
+ * This call clears the program list on the client side, the HAL must send
+ * the whole list again.
+ *
+ * If the program list scanning hardware (i.e. background tuner) is
+ * unavailable at the moment, the call must succeed and start updates
+ * when it becomes available.
+ *
+ * @param filter Filter to apply on the fetched program list.
+ * @return result OK successfully started fetching list updates.
+ * NOT_SUPPORTED program list scanning is not supported
+ * by the hardware.
+ */
+ startProgramListUpdates(ProgramFilter filter) generates (Result result);
+
+ /**
+ * Stops sending program list updates.
+ */
+ stopProgramListUpdates();
+
+ /**
* Fetches the current setting of a given config flag.
*
* The success/failure result must be consistent with setConfigFlag.
diff --git a/broadcastradio/2.0/default/TunerSession.cpp b/broadcastradio/2.0/default/TunerSession.cpp
index 54af3389..244544a 100644
--- a/broadcastradio/2.0/default/TunerSession.cpp
+++ b/broadcastradio/2.0/default/TunerSession.cpp
@@ -45,6 +45,7 @@
static constexpr auto scan = 200ms;
static constexpr auto step = 100ms;
static constexpr auto tune = 150ms;
+static constexpr auto list = 1s;
} // namespace delay
@@ -55,6 +56,9 @@
static ProgramInfo makeDummyProgramInfo(const ProgramSelector& selector) {
ProgramInfo info = {};
info.selector = selector;
+ info.logicallyTunedTo = utils::make_identifier(
+ IdentifierType::AMFM_FREQUENCY, utils::getId(selector, IdentifierType::AMFM_FREQUENCY));
+ info.physicallyTunedTo = info.logicallyTunedTo;
return info;
}
@@ -205,6 +209,38 @@
return {};
}
+Return<Result> TunerSession::startProgramListUpdates(const ProgramFilter& filter) {
+ ALOGV("%s(%s)", __func__, toString(filter).c_str());
+ lock_guard<mutex> lk(mMut);
+ if (mIsClosed) return Result::INVALID_STATE;
+
+ auto list = virtualRadio().getProgramList();
+ vector<VirtualProgram> filteredList;
+ auto filterCb = [&filter](const VirtualProgram& program) {
+ return utils::satisfies(filter, program.selector);
+ };
+ std::copy_if(list.begin(), list.end(), std::back_inserter(filteredList), filterCb);
+
+ auto task = [this, list]() {
+ lock_guard<mutex> lk(mMut);
+
+ ProgramListChunk chunk = {};
+ chunk.purge = true;
+ chunk.complete = true;
+ chunk.modified = hidl_vec<ProgramInfo>(list.begin(), list.end());
+
+ mCallback->onProgramListUpdated(chunk);
+ };
+ mThread.schedule(task, delay::list);
+
+ return Result::OK;
+}
+
+Return<void> TunerSession::stopProgramListUpdates() {
+ ALOGV("%s", __func__);
+ return {};
+}
+
Return<void> TunerSession::getConfigFlag(ConfigFlag flag, getConfigFlag_cb _hidl_cb) {
ALOGV("%s(%s)", __func__, toString(flag).c_str());
diff --git a/broadcastradio/2.0/default/TunerSession.h b/broadcastradio/2.0/default/TunerSession.h
index 9a72182..a58aa19 100644
--- a/broadcastradio/2.0/default/TunerSession.h
+++ b/broadcastradio/2.0/default/TunerSession.h
@@ -38,6 +38,8 @@
virtual Return<Result> scan(bool directionUp, bool skipSubChannel) override;
virtual Return<Result> step(bool directionUp) override;
virtual Return<void> cancel() override;
+ virtual Return<Result> startProgramListUpdates(const ProgramFilter& filter);
+ virtual Return<void> stopProgramListUpdates();
virtual Return<void> getConfigFlag(ConfigFlag flag, getConfigFlag_cb _hidl_cb);
virtual Return<Result> setConfigFlag(ConfigFlag flag, bool value);
virtual Return<void> setParameters(const hidl_vec<VendorKeyValue>& parameters,
diff --git a/broadcastradio/2.0/default/VirtualProgram.cpp b/broadcastradio/2.0/default/VirtualProgram.cpp
index 1acd4d3..acde704 100644
--- a/broadcastradio/2.0/default/VirtualProgram.cpp
+++ b/broadcastradio/2.0/default/VirtualProgram.cpp
@@ -13,11 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#define LOG_TAG "BcRadioDef.VirtualProgram"
+
#include "VirtualProgram.h"
#include "resources.h"
+#include <android-base/logging.h>
#include <broadcastradio-utils-2x/Utils.h>
+#include <log/log.h>
namespace android {
namespace hardware {
@@ -38,6 +42,39 @@
auto pType = getType(selector.primaryId);
auto isDigital = (pType != IdentifierType::AMFM_FREQUENCY && pType != IdentifierType::RDS_PI);
+ auto selectId = [&info](IdentifierType type) {
+ return utils::make_identifier(type, utils::getId(info.selector, type));
+ };
+
+ switch (pType) {
+ case IdentifierType::AMFM_FREQUENCY:
+ info.logicallyTunedTo = info.physicallyTunedTo =
+ selectId(IdentifierType::AMFM_FREQUENCY);
+ break;
+ case IdentifierType::RDS_PI:
+ info.logicallyTunedTo = selectId(IdentifierType::RDS_PI);
+ info.physicallyTunedTo = selectId(IdentifierType::AMFM_FREQUENCY);
+ break;
+ case IdentifierType::HD_STATION_ID_EXT:
+ info.logicallyTunedTo = selectId(IdentifierType::HD_STATION_ID_EXT);
+ info.physicallyTunedTo = selectId(IdentifierType::AMFM_FREQUENCY);
+ break;
+ case IdentifierType::DAB_SID_EXT:
+ info.logicallyTunedTo = selectId(IdentifierType::DAB_SID_EXT);
+ info.physicallyTunedTo = selectId(IdentifierType::DAB_ENSEMBLE);
+ break;
+ case IdentifierType::DRMO_SERVICE_ID:
+ info.logicallyTunedTo = selectId(IdentifierType::DRMO_SERVICE_ID);
+ info.physicallyTunedTo = selectId(IdentifierType::DRMO_FREQUENCY);
+ break;
+ case IdentifierType::SXM_SERVICE_ID:
+ info.logicallyTunedTo = selectId(IdentifierType::SXM_SERVICE_ID);
+ info.physicallyTunedTo = selectId(IdentifierType::SXM_CHANNEL);
+ break;
+ default:
+ LOG(FATAL) << "Unsupported program type: " << toString(pType);
+ }
+
info.infoFlags |= ProgramInfoFlags::TUNED;
info.infoFlags |= ProgramInfoFlags::STEREO;
info.signalQuality = isDigital ? 100 : 80;
diff --git a/broadcastradio/2.0/types.hal b/broadcastradio/2.0/types.hal
index fc5809f..b5264f4 100644
--- a/broadcastradio/2.0/types.hal
+++ b/broadcastradio/2.0/types.hal
@@ -25,6 +25,8 @@
* onAntennaStateChange callback must be called within this time.
*/
ANTENNA_DISCONNECTED_TIMEOUT_MS = 100,
+
+ LIST_COMPLETE_TIMEOUT_MS = 300000,
};
enum Result : int32_t {
@@ -86,21 +88,17 @@
*/
RDS_REG,
- /**
- * Enables DAB implicit linking, based on program identifiers
- * (DAB SId, RDS PI).
- */
- DAB_IMPLICIT_LINKING,
+ /** Enables DAB-DAB hard- and implicit-linking (the same content). */
+ DAB_DAB_LINKING,
- /**
- * Enables DAB hard linking (the same content).
- */
- DAB_HARD_LINKING,
+ /** Enables DAB-FM hard- and implicit-linking (the same content). */
+ DAB_FM_LINKING,
- /**
- * Enables DAB hard linking (related content).
- */
- DAB_SOFT_LINKING,
+ /** Enables DAB-DAB soft-linking (related content). */
+ DAB_DAB_SOFT_LINKING,
+
+ /** Enables DAB-FM soft-linking (related content). */
+ DAB_FM_SOFT_LINKING,
};
/**
@@ -187,6 +185,69 @@
*/
ProgramSelector selector;
+ /**
+ * Identifier currently used for program selection.
+ *
+ * It allows to determine which technology is currently used for reception.
+ *
+ * Some program selectors contain tuning information for different radio
+ * technologies (i.e. FM RDS and DAB). For example, user may tune using
+ * a ProgramSelector with RDS_PI primary identifier, but the tuner hardware
+ * may choose to use DAB technology to make actual tuning. This identifier
+ * must reflect that.
+ *
+ * This field is optional, but must be set for currently tuned program.
+ * If it's not set, its value must be initialized to all-zeros.
+ *
+ * Only primary identifiers for a given radio technology are valid:
+ * - AMFM_FREQUENCY for analog AM/FM;
+ * - RDS_PI for FM RDS;
+ * - HD_STATION_ID_EXT;
+ * - DAB_SID_EXT;
+ * - DRMO_SERVICE_ID;
+ * - SXM_SERVICE_ID;
+ * - VENDOR_*;
+ * - more might come in next minor versions of this HAL.
+ */
+ ProgramIdentifier logicallyTunedTo;
+
+ /**
+ * Identifier currently used by hardware to physically tune to a channel.
+ *
+ * Some radio technologies broadcast the same program on multiple channels,
+ * i.e. with RDS AF the same program may be broadcasted on multiple
+ * alternative frequencies; the same DAB program may be broadcast on
+ * multiple ensembles. This identifier points to the channel to which the
+ * radio hardware is physically tuned to.
+ *
+ * This field is optional, but must be set for currently tuned program.
+ * If it's not set, its type field must be initialized to
+ * IdentifierType::INVALID.
+ *
+ * Only physical identifiers are valid:
+ * - AMFM_FREQUENCY;
+ * - DAB_ENSEMBLE;
+ * - DRMO_FREQUENCY;
+ * - SXM_CHANNEL;
+ * - VENDOR_*;
+ * - more might come in next minor versions of this HAL.
+ */
+ ProgramIdentifier physicallyTunedTo;
+
+ /**
+ * Primary identifiers of related contents.
+ *
+ * Some radio technologies provide pointers to other programs that carry
+ * related content (i.e. DAB soft-links). This field is a list of pointers
+ * to other programs on the program list.
+ *
+ * Please note, that these identifiers does not have to exist on the program
+ * list - i.e. DAB tuner may provide information on FM RDS alternatives
+ * despite not supporting FM RDS. If the system has multiple tuners, another
+ * one may have it on its list.
+ */
+ vec<ProgramIdentifier> relatedContent;
+
bitfield<ProgramInfoFlags> infoFlags;
/**
@@ -277,6 +338,8 @@
/** See VENDOR_START */
VENDOR_END = 1999,
+ INVALID = 0,
+
/**
* Primary identifier for analogue (without RDS) AM/FM stations:
* frequency in kHz.
@@ -287,7 +350,7 @@
* - 1.71MHz - 30MHz: AM SW;
* - >60MHz: FM.
*/
- AMFM_FREQUENCY = 1,
+ AMFM_FREQUENCY,
/**
* 16bit primary identifier for FM RDS station.
@@ -455,6 +518,42 @@
/** Album art (uint32_t, see IBroadcastRadio::getImage) */
ALBUM_ART,
+
+ /**
+ * Station name.
+ *
+ * This is a generic field to cover any radio technology.
+ *
+ * If the PROGRAM_NAME has the same content as DAB_*_NAME or RDS_PS,
+ * it may not be present, to preserve space - framework must repopulate
+ * it on the client side.
+ */
+ PROGRAM_NAME,
+
+ /** DAB ensemble name (string) */
+ DAB_ENSEMBLE_NAME,
+
+ /**
+ * DAB ensemble name abbreviated (string).
+ *
+ * The string must be up to 8 characters long.
+ *
+ * If the short variant is present, the long (DAB_ENSEMBLE_NAME) one must be
+ * present as well.
+ */
+ DAB_ENSEMBLE_NAME_SHORT,
+
+ /** DAB service name (string) */
+ DAB_SERVICE_NAME,
+
+ /** DAB service name abbreviated (see DAB_ENSEMBLE_NAME_SHORT) (string) */
+ DAB_SERVICE_NAME_SHORT,
+
+ /** DAB component name (string) */
+ DAB_COMPONENT_NAME,
+
+ /** DAB component name abbreviated (see DAB_ENSEMBLE_NAME_SHORT) (string) */
+ DAB_COMPONENT_NAME_SHORT,
};
/**
@@ -476,3 +575,102 @@
int64_t intValue;
string stringValue;
};
+
+/**
+ * An update packet of the program list.
+ *
+ * The order of entries in the vectors is unspecified.
+ */
+struct ProgramListChunk {
+ /**
+ * Treats all previously added entries as removed.
+ *
+ * This is meant to save binder transaction bandwidth on 'removed' vector
+ * and provide a clear empty state.
+ *
+ * If set, 'removed' vector must be empty.
+ *
+ * The client may wait with taking action on this until it received the
+ * chunk with complete flag set (to avoid part of stations temporarily
+ * disappearing from the list).
+ */
+ bool purge;
+
+ /**
+ * If false, it means there are still programs not transmitted,
+ * due for transmission in following updates.
+ *
+ * Used by UIs that wait for complete list instead of displaying
+ * programs while scanning.
+ *
+ * After the whole channel range was scanned and all discovered programs
+ * were transmitted, the last chunk must have set this flag to true.
+ * This must happen within Constants::LIST_COMPLETE_TIMEOUT_MS from the
+ * startProgramListUpdates call. If it doesn't, client may assume the tuner
+ * came into a bad state and display error message.
+ */
+ bool complete;
+
+ /**
+ * Added or modified program list entries.
+ *
+ * Two entries with the same primaryId (ProgramSelector member)
+ * are considered the same.
+ */
+ vec<ProgramInfo> modified;
+
+ /**
+ * Removed program list entries.
+ *
+ * Contains primaryId (ProgramSelector member) of a program to remove.
+ */
+ vec<ProgramIdentifier> removed;
+};
+
+/**
+ * Large-grain filter to the program list.
+ *
+ * This is meant to reduce binder transaction bandwidth, not for fine-grained
+ * filtering user might expect.
+ *
+ * The filter is designed as conjunctive normal form: the entry that passes the
+ * filter must satisfy all the clauses (members of this struct). Vector clauses
+ * are disjunctions of literals. In other words, there is AND between each
+ * high-level group and OR inside it.
+ */
+struct ProgramFilter {
+ /**
+ * List of identifier types that satisfy the filter.
+ *
+ * If the program list entry contains at least one identifier of the type
+ * listed, it satisfies this condition.
+ *
+ * Empty list means no filtering on identifier type.
+ */
+ vec<uint32_t> identifierTypes;
+
+ /**
+ * List of identifiers that satisfy the filter.
+ *
+ * If the program list entry contains at least one listed identifier,
+ * it satisfies this condition.
+ *
+ * Empty list means no filtering on identifier.
+ */
+ vec<ProgramIdentifier> identifiers;
+
+ /**
+ * Includes non-tunable entries that define tree structure on the
+ * program list (i.e. DAB ensembles).
+ */
+ bool includeCategories;
+
+ /**
+ * Disable updates on entry modifications.
+ *
+ * If true, 'modified' vector of ProgramListChunk must contain list
+ * additions only. Once the program is added to the list, it's not
+ * updated anymore.
+ */
+ bool excludeModifications;
+};
diff --git a/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp b/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp
index d0e4144..cbe6288 100644
--- a/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp
+++ b/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp
@@ -38,6 +38,7 @@
using namespace std::chrono_literals;
+using std::unordered_set;
using std::vector;
using testing::_;
using testing::AnyNumber;
@@ -54,25 +55,36 @@
namespace timeout {
static constexpr auto tune = 30s;
+static constexpr auto programListScan = 5min;
} // namespace timeout
static const ConfigFlag gConfigFlagValues[] = {
- ConfigFlag::FORCE_MONO, ConfigFlag::FORCE_ANALOG, ConfigFlag::FORCE_DIGITAL,
- ConfigFlag::RDS_AF, ConfigFlag::RDS_REG, ConfigFlag::DAB_IMPLICIT_LINKING,
- ConfigFlag::DAB_HARD_LINKING, ConfigFlag::DAB_SOFT_LINKING,
+ ConfigFlag::FORCE_MONO,
+ ConfigFlag::FORCE_ANALOG,
+ ConfigFlag::FORCE_DIGITAL,
+ ConfigFlag::RDS_AF,
+ ConfigFlag::RDS_REG,
+ ConfigFlag::DAB_DAB_LINKING,
+ ConfigFlag::DAB_FM_LINKING,
+ ConfigFlag::DAB_DAB_SOFT_LINKING,
+ ConfigFlag::DAB_FM_SOFT_LINKING,
};
-struct TunerCallbackMock : public ITunerCallback {
- TunerCallbackMock() {
- // we expect the antenna is connected through the whole test
- EXPECT_CALL(*this, onAntennaStateChange(false)).Times(0);
- }
+class TunerCallbackMock : public ITunerCallback {
+ public:
+ TunerCallbackMock();
MOCK_METHOD2(onTuneFailed, Return<void>(Result, const ProgramSelector&));
MOCK_TIMEOUT_METHOD1(onCurrentProgramInfoChanged, Return<void>(const ProgramInfo&));
+ Return<void> onProgramListUpdated(const ProgramListChunk& chunk);
MOCK_METHOD1(onAntennaStateChange, Return<void>(bool connected));
MOCK_METHOD1(onParametersUpdated, Return<void>(const hidl_vec<VendorKeyValue>& parameters));
+
+ MOCK_TIMEOUT_METHOD0(onProgramListReady, void());
+
+ std::mutex mLock;
+ utils::ProgramInfoSet mProgramList;
};
class BroadcastRadioHalTest : public ::testing::VtsHalHidlTargetTestBase {
@@ -88,6 +100,25 @@
sp<TunerCallbackMock> mCallback = new TunerCallbackMock();
};
+static void printSkipped(std::string msg) {
+ std::cout << "[ SKIPPED ] " << msg << std::endl;
+}
+
+TunerCallbackMock::TunerCallbackMock() {
+ // we expect the antenna is connected through the whole test
+ EXPECT_CALL(*this, onAntennaStateChange(false)).Times(0);
+}
+
+Return<void> TunerCallbackMock::onProgramListUpdated(const ProgramListChunk& chunk) {
+ std::lock_guard<std::mutex> lk(mLock);
+
+ updateProgramList(mProgramList, chunk);
+
+ if (chunk.complete) onProgramListReady();
+
+ return {};
+}
+
void BroadcastRadioHalTest::SetUp() {
EXPECT_EQ(nullptr, mModule.get()) << "Module is already open";
@@ -463,6 +494,35 @@
}
}
+/**
+ * Test getting program list.
+ *
+ * Verifies that:
+ * - startProgramListUpdates either succeeds or returns NOT_SUPPORTED;
+ * - the complete list is fetched within timeout::programListScan;
+ * - stopProgramListUpdates does not crash.
+ */
+TEST_F(BroadcastRadioHalTest, GetProgramList) {
+ ASSERT_TRUE(openSession());
+
+ EXPECT_TIMEOUT_CALL(*mCallback, onProgramListReady).Times(AnyNumber());
+
+ auto startResult = mSession->startProgramListUpdates({});
+ if (startResult == Result::NOT_SUPPORTED) {
+ printSkipped("Program list not supported");
+ return;
+ }
+ ASSERT_EQ(Result::OK, startResult);
+
+ EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onProgramListReady, timeout::programListScan);
+
+ auto stopResult = mSession->stopProgramListUpdates();
+ EXPECT_TRUE(stopResult.isOk());
+}
+
+// TODO(b/70939328): test ProgramInfo's currentlyTunedId and
+// currentlyTunedChannel once the program list is implemented.
+
} // namespace vts
} // namespace V2_0
} // namespace broadcastradio
diff --git a/broadcastradio/common/tests/Android.bp b/broadcastradio/common/tests/Android.bp
index bbad527..512c02e 100644
--- a/broadcastradio/common/tests/Android.bp
+++ b/broadcastradio/common/tests/Android.bp
@@ -15,20 +15,6 @@
//
cc_test {
- name: "android.hardware.broadcastradio@common-utils-tests",
- vendor: true,
- cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- ],
- srcs: [
- "WorkerThread_test.cpp",
- ],
- static_libs: ["android.hardware.broadcastradio@common-utils-lib"],
-}
-
-cc_test {
name: "android.hardware.broadcastradio@common-utils-xx-tests",
vendor: true,
cflags: [
@@ -48,3 +34,36 @@
"android.hardware.broadcastradio@2.0",
],
}
+
+cc_test {
+ name: "android.hardware.broadcastradio@common-utils-2x-tests",
+ vendor: true,
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ ],
+ srcs: [
+ "IdentifierIterator_test.cpp",
+ ],
+ static_libs: [
+ "android.hardware.broadcastradio@common-utils-2x-lib",
+ ],
+ shared_libs: [
+ "android.hardware.broadcastradio@2.0",
+ ],
+}
+
+cc_test {
+ name: "android.hardware.broadcastradio@common-utils-tests",
+ vendor: true,
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ ],
+ srcs: [
+ "WorkerThread_test.cpp",
+ ],
+ static_libs: ["android.hardware.broadcastradio@common-utils-lib"],
+}
diff --git a/broadcastradio/common/tests/IdentifierIterator_test.cpp b/broadcastradio/common/tests/IdentifierIterator_test.cpp
new file mode 100644
index 0000000..5bf222b
--- /dev/null
+++ b/broadcastradio/common/tests/IdentifierIterator_test.cpp
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <broadcastradio-utils-2x/Utils.h>
+#include <gtest/gtest.h>
+
+namespace {
+
+namespace V2_0 = android::hardware::broadcastradio::V2_0;
+namespace utils = android::hardware::broadcastradio::utils;
+
+using V2_0::IdentifierType;
+using V2_0::ProgramSelector;
+
+TEST(IdentifierIteratorTest, singleSecondary) {
+ // clang-format off
+ V2_0::ProgramSelector sel {
+ utils::make_identifier(IdentifierType::RDS_PI, 0xBEEF),
+ {utils::make_identifier(IdentifierType::AMFM_FREQUENCY, 100100)}
+ };
+ // clang-format on
+
+ auto it = utils::begin(sel);
+ auto end = utils::end(sel);
+
+ ASSERT_NE(end, it);
+ EXPECT_EQ(sel.primaryId, *it);
+ ASSERT_NE(end, ++it);
+ EXPECT_EQ(sel.secondaryIds[0], *it);
+ ASSERT_EQ(end, ++it);
+}
+
+TEST(IdentifierIteratorTest, empty) {
+ V2_0::ProgramSelector sel{};
+
+ auto it = utils::begin(sel);
+ auto end = utils::end(sel);
+
+ ASSERT_NE(end, it++); // primary id is always present
+ ASSERT_EQ(end, it);
+}
+
+TEST(IdentifierIteratorTest, twoSelectors) {
+ V2_0::ProgramSelector sel1{};
+ V2_0::ProgramSelector sel2{};
+
+ auto it1 = utils::begin(sel1);
+ auto it2 = utils::begin(sel2);
+
+ EXPECT_NE(it1, it2);
+}
+
+TEST(IdentifierIteratorTest, increments) {
+ V2_0::ProgramSelector sel{{}, {{}, {}}};
+
+ auto it = utils::begin(sel);
+ auto end = utils::end(sel);
+ auto pre = it;
+ auto post = it;
+
+ EXPECT_NE(++pre, post++);
+ EXPECT_EQ(pre, post);
+ EXPECT_EQ(pre, it + 1);
+ ASSERT_NE(end, pre);
+}
+
+TEST(IdentifierIteratorTest, findType) {
+ using namespace std::placeholders;
+
+ uint64_t rds_pi1 = 0xDEAD;
+ uint64_t rds_pi2 = 0xBEEF;
+ uint64_t freq1 = 100100;
+ uint64_t freq2 = 107900;
+
+ // clang-format off
+ V2_0::ProgramSelector sel {
+ utils::make_identifier(IdentifierType::RDS_PI, rds_pi1),
+ {
+ utils::make_identifier(IdentifierType::AMFM_FREQUENCY, freq1),
+ utils::make_identifier(IdentifierType::RDS_PI, rds_pi2),
+ utils::make_identifier(IdentifierType::AMFM_FREQUENCY, freq2),
+ }
+ };
+ // clang-format on
+
+ auto typeEquals = [](const V2_0::ProgramIdentifier& id, V2_0::IdentifierType type) {
+ return utils::getType(id) == type;
+ };
+ auto isRdsPi = std::bind(typeEquals, _1, IdentifierType::RDS_PI);
+ auto isFreq = std::bind(typeEquals, _1, IdentifierType::AMFM_FREQUENCY);
+
+ auto end = utils::end(sel);
+ auto it = std::find_if(utils::begin(sel), end, isRdsPi);
+ ASSERT_NE(end, it);
+ EXPECT_EQ(rds_pi1, it->value);
+
+ it = std::find_if(it + 1, end, isRdsPi);
+ ASSERT_NE(end, it);
+ EXPECT_EQ(rds_pi2, it->value);
+
+ it = std::find_if(utils::begin(sel), end, isFreq);
+ ASSERT_NE(end, it);
+ EXPECT_EQ(freq1, it->value);
+
+ it = std::find_if(++it, end, isFreq);
+ ASSERT_NE(end, it);
+ EXPECT_EQ(freq2, it->value);
+}
+
+} // anonymous namespace
diff --git a/broadcastradio/common/utils2x/Utils.cpp b/broadcastradio/common/utils2x/Utils.cpp
index d157108..e0337b4 100644
--- a/broadcastradio/common/utils2x/Utils.cpp
+++ b/broadcastradio/common/utils2x/Utils.cpp
@@ -18,6 +18,7 @@
#include <broadcastradio-utils-2x/Utils.h>
+#include <android-base/logging.h>
#include <log/log.h>
namespace android {
@@ -28,14 +29,64 @@
using V2_0::IdentifierType;
using V2_0::Metadata;
using V2_0::MetadataKey;
+using V2_0::ProgramFilter;
using V2_0::ProgramIdentifier;
+using V2_0::ProgramInfo;
+using V2_0::ProgramListChunk;
using V2_0::ProgramSelector;
+using V2_0::Properties;
using std::string;
using std::vector;
+IdentifierType getType(uint32_t typeAsInt) {
+ return static_cast<IdentifierType>(typeAsInt);
+}
+
IdentifierType getType(const ProgramIdentifier& id) {
- return static_cast<IdentifierType>(id.type);
+ return getType(id.type);
+}
+
+IdentifierIterator::IdentifierIterator(const V2_0::ProgramSelector& sel)
+ : IdentifierIterator(sel, 0) {}
+
+IdentifierIterator::IdentifierIterator(const V2_0::ProgramSelector& sel, size_t pos)
+ : mSel(sel), mPos(pos) {}
+
+IdentifierIterator IdentifierIterator::operator++(int) {
+ auto i = *this;
+ mPos++;
+ return i;
+}
+
+IdentifierIterator& IdentifierIterator::operator++() {
+ ++mPos;
+ return *this;
+}
+
+IdentifierIterator::ref_type IdentifierIterator::operator*() const {
+ if (mPos == 0) return sel().primaryId;
+
+ // mPos is 1-based for secondary identifiers
+ DCHECK(mPos <= sel().secondaryIds.size());
+ return sel().secondaryIds[mPos - 1];
+}
+
+bool IdentifierIterator::operator==(const IdentifierIterator& rhs) const {
+ // Check, if both iterators points at the same selector.
+ if (reinterpret_cast<uintptr_t>(&sel()) != reinterpret_cast<uintptr_t>(&rhs.sel())) {
+ return false;
+ }
+
+ return mPos == rhs.mPos;
+}
+
+IdentifierIterator begin(const V2_0::ProgramSelector& sel) {
+ return IdentifierIterator(sel);
+}
+
+IdentifierIterator end(const V2_0::ProgramSelector& sel) {
+ return IdentifierIterator(sel) + 1 /* primary id */ + sel.secondaryIds.size();
}
static bool bothHaveId(const ProgramSelector& a, const ProgramSelector& b,
@@ -88,6 +139,7 @@
return true;
}
+ // TODO(twasilczyk): use IdentifierIterator
// not optimal, but we don't care in default impl
for (auto&& id : sel.secondaryIds) {
if (id.type == itype) {
@@ -125,6 +177,7 @@
if (sel.primaryId.type == itype) ret.push_back(sel.primaryId.value);
+ // TODO(twasilczyk): use IdentifierIterator
for (auto&& id : sel.secondaryIds) {
if (id.type == itype) ret.push_back(id.value);
}
@@ -132,11 +185,11 @@
return ret;
}
-bool isSupported(const V2_0::Properties& prop, const V2_0::ProgramSelector& sel) {
+bool isSupported(const Properties& prop, const ProgramSelector& sel) {
+ // TODO(twasilczyk): use IdentifierIterator
// Not optimal, but it doesn't matter for default impl nor VTS tests.
- for (auto&& idTypeI : prop.supportedIdentifierTypes) {
- auto idType = static_cast<IdentifierType>(idTypeI);
- if (hasId(sel, idType)) return true;
+ for (auto&& idType : prop.supportedIdentifierTypes) {
+ if (hasId(sel, getType(idType))) return true;
}
return false;
}
@@ -152,7 +205,10 @@
}
};
- switch (static_cast<IdentifierType>(id.type)) {
+ switch (getType(id)) {
+ case IdentifierType::INVALID:
+ expect(false, "IdentifierType::INVALID");
+ break;
case IdentifierType::AMFM_FREQUENCY:
case IdentifierType::DAB_FREQUENCY:
case IdentifierType::DRMO_FREQUENCY:
@@ -211,8 +267,9 @@
return valid;
}
-bool isValid(const V2_0::ProgramSelector& sel) {
+bool isValid(const ProgramSelector& sel) {
if (!isValid(sel.primaryId)) return false;
+ // TODO(twasilczyk): use IdentifierIterator
for (auto&& id : sel.secondaryIds) {
if (!isValid(id)) return false;
}
@@ -243,6 +300,59 @@
return meta;
}
+bool satisfies(const ProgramFilter& filter, const ProgramSelector& sel) {
+ if (filter.identifierTypes.size() > 0) {
+ auto typeEquals = [](const V2_0::ProgramIdentifier& id, uint32_t type) {
+ return id.type == type;
+ };
+ auto it = std::find_first_of(begin(sel), end(sel), filter.identifierTypes.begin(),
+ filter.identifierTypes.end(), typeEquals);
+ if (it == end(sel)) return false;
+ }
+
+ if (filter.identifiers.size() > 0) {
+ auto it = std::find_first_of(begin(sel), end(sel), filter.identifiers.begin(),
+ filter.identifiers.end());
+ if (it == end(sel)) return false;
+ }
+
+ if (!filter.includeCategories) {
+ if (getType(sel.primaryId) == IdentifierType::DAB_ENSEMBLE) return false;
+ }
+
+ return true;
+}
+
+size_t ProgramInfoHasher::operator()(const ProgramInfo& info) const {
+ auto& id = info.selector.primaryId;
+
+ /* This is not the best hash implementation, but good enough for default HAL
+ * implementation and tests. */
+ auto h = std::hash<uint32_t>{}(id.type);
+ h += 0x9e3779b9;
+ h ^= std::hash<uint64_t>{}(id.value);
+
+ return h;
+}
+
+bool ProgramInfoKeyEqual::operator()(const ProgramInfo& info1, const ProgramInfo& info2) const {
+ auto& id1 = info1.selector.primaryId;
+ auto& id2 = info2.selector.primaryId;
+ return id1.type == id2.type && id1.value == id2.value;
+}
+
+void updateProgramList(ProgramInfoSet& list, const ProgramListChunk& chunk) {
+ if (chunk.purge) list.clear();
+
+ list.insert(chunk.modified.begin(), chunk.modified.end());
+
+ for (auto&& id : chunk.removed) {
+ ProgramInfo info = {};
+ info.selector.primaryId = id;
+ list.erase(info);
+ }
+}
+
} // namespace utils
} // namespace broadcastradio
} // namespace hardware
diff --git a/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h b/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h
index dd01852..bac11fd 100644
--- a/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h
+++ b/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h
@@ -20,14 +20,49 @@
#include <chrono>
#include <queue>
#include <thread>
+#include <unordered_set>
namespace android {
namespace hardware {
namespace broadcastradio {
namespace utils {
+V2_0::IdentifierType getType(uint32_t typeAsInt);
V2_0::IdentifierType getType(const V2_0::ProgramIdentifier& id);
+class IdentifierIterator
+ : public std::iterator<std::random_access_iterator_tag, V2_0::ProgramIdentifier, ssize_t,
+ const V2_0::ProgramIdentifier*, const V2_0::ProgramIdentifier&> {
+ using traits = std::iterator_traits<IdentifierIterator>;
+ using ptr_type = typename traits::pointer;
+ using ref_type = typename traits::reference;
+ using diff_type = typename traits::difference_type;
+
+ public:
+ explicit IdentifierIterator(const V2_0::ProgramSelector& sel);
+
+ IdentifierIterator operator++(int);
+ IdentifierIterator& operator++();
+ ref_type operator*() const;
+ inline ptr_type operator->() const { return &operator*(); }
+ IdentifierIterator operator+(diff_type v) const { return IdentifierIterator(mSel, mPos + v); }
+ bool operator==(const IdentifierIterator& rhs) const;
+ inline bool operator!=(const IdentifierIterator& rhs) const { return !operator==(rhs); };
+
+ private:
+ explicit IdentifierIterator(const V2_0::ProgramSelector& sel, size_t pos);
+
+ std::reference_wrapper<const V2_0::ProgramSelector> mSel;
+
+ const V2_0::ProgramSelector& sel() const { return mSel.get(); }
+
+ /** 0 is the primary identifier, 1-n are secondary identifiers. */
+ size_t mPos = 0;
+};
+
+IdentifierIterator begin(const V2_0::ProgramSelector& sel);
+IdentifierIterator end(const V2_0::ProgramSelector& sel);
+
/**
* Checks, if {@code pointer} tunes to {@channel}.
*
@@ -77,6 +112,21 @@
V2_0::Metadata make_metadata(V2_0::MetadataKey key, int64_t value);
V2_0::Metadata make_metadata(V2_0::MetadataKey key, std::string value);
+bool satisfies(const V2_0::ProgramFilter& filter, const V2_0::ProgramSelector& sel);
+
+struct ProgramInfoHasher {
+ size_t operator()(const V2_0::ProgramInfo& info) const;
+};
+
+struct ProgramInfoKeyEqual {
+ bool operator()(const V2_0::ProgramInfo& info1, const V2_0::ProgramInfo& info2) const;
+};
+
+typedef std::unordered_set<V2_0::ProgramInfo, ProgramInfoHasher, ProgramInfoKeyEqual>
+ ProgramInfoSet;
+
+void updateProgramList(ProgramInfoSet& list, const V2_0::ProgramListChunk& chunk);
+
} // namespace utils
} // namespace broadcastradio
} // namespace hardware
diff --git a/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h
index b0ce088..12453bb 100644
--- a/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h
+++ b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h
@@ -30,18 +30,41 @@
std::condition_variable egmock_cond_##Method;
/**
+ * Function similar to comma operator, to make it possible to return any value returned by mocked
+ * function (which may be void) and discard the result of the other operation (notification about
+ * a call).
+ *
+ * We need to invoke the mocked function (which result is returned) before the notification (which
+ * result is dropped) - that's exactly the opposite of comma operator.
+ *
+ * INTERNAL IMPLEMENTATION - don't use in user code.
+ */
+template <typename T>
+static T EGMockFlippedComma_(std::function<T()> returned, std::function<void()> discarded) {
+ auto ret = returned();
+ discarded();
+ return ret;
+}
+
+template <>
+inline void EGMockFlippedComma_(std::function<void()> returned, std::function<void()> discarded) {
+ returned();
+ discarded();
+}
+
+/**
* Common method body for gmock timeout extension.
*
* INTERNAL IMPLEMENTATION - don't use in user code.
*/
-#define EGMOCK_TIMEOUT_METHOD_BODY_(Method, ...) \
- auto ret = egmock_##Method(__VA_ARGS__); \
- { \
- std::lock_guard<std::mutex> lk(egmock_mut_##Method); \
- egmock_called_##Method = true; \
- egmock_cond_##Method.notify_all(); \
- } \
- return ret;
+#define EGMOCK_TIMEOUT_METHOD_BODY_(Method, ...) \
+ auto invokeMock = [&]() { return egmock_##Method(__VA_ARGS__); }; \
+ auto notify = [&]() { \
+ std::lock_guard<std::mutex> lk(egmock_mut_##Method); \
+ egmock_called_##Method = true; \
+ egmock_cond_##Method.notify_all(); \
+ }; \
+ return EGMockFlippedComma_<decltype(invokeMock())>(invokeMock, notify);
/**
* Gmock MOCK_METHOD0 timeout-capable extension.
diff --git a/camera/device/1.0/default/Android.bp b/camera/device/1.0/default/Android.bp
index e0b31f0..4a7fc9c 100644
--- a/camera/device/1.0/default/Android.bp
+++ b/camera/device/1.0/default/Android.bp
@@ -29,9 +29,6 @@
header_libs: [
"media_plugin_headers",
],
- include_dirs: [
- "frameworks/native/include/media/openmax"
- ],
export_include_dirs: ["."]
}
diff --git a/camera/device/3.2/default/CameraDeviceSession.cpp b/camera/device/3.2/default/CameraDeviceSession.cpp
index d6a04bc..631404e 100644
--- a/camera/device/3.2/default/CameraDeviceSession.cpp
+++ b/camera/device/3.2/default/CameraDeviceSession.cpp
@@ -803,6 +803,89 @@
return dataSpace;
}
+bool CameraDeviceSession::preProcessConfigurationLocked(
+ const StreamConfiguration& requestedConfiguration,
+ camera3_stream_configuration_t *stream_list /*out*/,
+ hidl_vec<camera3_stream_t*> *streams /*out*/) {
+
+ if ((stream_list == nullptr) || (streams == nullptr)) {
+ return false;
+ }
+
+ stream_list->operation_mode = (uint32_t) requestedConfiguration.operationMode;
+ stream_list->num_streams = requestedConfiguration.streams.size();
+ streams->resize(stream_list->num_streams);
+ stream_list->streams = streams->data();
+
+ for (uint32_t i = 0; i < stream_list->num_streams; i++) {
+ int id = requestedConfiguration.streams[i].id;
+
+ if (mStreamMap.count(id) == 0) {
+ Camera3Stream stream;
+ convertFromHidl(requestedConfiguration.streams[i], &stream);
+ mStreamMap[id] = stream;
+ mStreamMap[id].data_space = mapToLegacyDataspace(
+ mStreamMap[id].data_space);
+ mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{});
+ } else {
+ // width/height/format must not change, but usage/rotation might need to change
+ if (mStreamMap[id].stream_type !=
+ (int) requestedConfiguration.streams[i].streamType ||
+ mStreamMap[id].width != requestedConfiguration.streams[i].width ||
+ mStreamMap[id].height != requestedConfiguration.streams[i].height ||
+ mStreamMap[id].format != (int) requestedConfiguration.streams[i].format ||
+ mStreamMap[id].data_space !=
+ mapToLegacyDataspace( static_cast<android_dataspace_t> (
+ requestedConfiguration.streams[i].dataSpace))) {
+ ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id);
+ return false;
+ }
+ mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation;
+ mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage;
+ }
+ (*streams)[i] = &mStreamMap[id];
+ }
+
+ return true;
+}
+
+void CameraDeviceSession::postProcessConfigurationLocked(
+ const StreamConfiguration& requestedConfiguration) {
+ // delete unused streams, note we do this after adding new streams to ensure new stream
+ // will not have the same address as deleted stream, and HAL has a chance to reference
+ // the to be deleted stream in configure_streams call
+ for(auto it = mStreamMap.begin(); it != mStreamMap.end();) {
+ int id = it->first;
+ bool found = false;
+ for (const auto& stream : requestedConfiguration.streams) {
+ if (id == stream.id) {
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ // Unmap all buffers of deleted stream
+ // in case the configuration call succeeds and HAL
+ // is able to release the corresponding resources too.
+ cleanupBuffersLocked(id);
+ it = mStreamMap.erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ // Track video streams
+ mVideoStreamIds.clear();
+ for (const auto& stream : requestedConfiguration.streams) {
+ if (stream.streamType == StreamType::OUTPUT &&
+ stream.usage &
+ graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) {
+ mVideoStreamIds.push_back(stream.id);
+ }
+ }
+ mResultBatcher.setBatchedStreams(mVideoStreamIds);
+}
+
Return<void> CameraDeviceSession::configureStreams(
const StreamConfiguration& requestedConfiguration,
ICameraDeviceSession::configureStreams_cb _hidl_cb) {
@@ -840,42 +923,11 @@
return Void();
}
- camera3_stream_configuration_t stream_list;
+ camera3_stream_configuration_t stream_list{};
hidl_vec<camera3_stream_t*> streams;
-
- stream_list.operation_mode = (uint32_t) requestedConfiguration.operationMode;
- stream_list.num_streams = requestedConfiguration.streams.size();
- streams.resize(stream_list.num_streams);
- stream_list.streams = streams.data();
-
- for (uint32_t i = 0; i < stream_list.num_streams; i++) {
- int id = requestedConfiguration.streams[i].id;
-
- if (mStreamMap.count(id) == 0) {
- Camera3Stream stream;
- convertFromHidl(requestedConfiguration.streams[i], &stream);
- mStreamMap[id] = stream;
- mStreamMap[id].data_space = mapToLegacyDataspace(
- mStreamMap[id].data_space);
- mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{});
- } else {
- // width/height/format must not change, but usage/rotation might need to change
- if (mStreamMap[id].stream_type !=
- (int) requestedConfiguration.streams[i].streamType ||
- mStreamMap[id].width != requestedConfiguration.streams[i].width ||
- mStreamMap[id].height != requestedConfiguration.streams[i].height ||
- mStreamMap[id].format != (int) requestedConfiguration.streams[i].format ||
- mStreamMap[id].data_space !=
- mapToLegacyDataspace( static_cast<android_dataspace_t> (
- requestedConfiguration.streams[i].dataSpace))) {
- ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id);
- _hidl_cb(Status::INTERNAL_ERROR, outStreams);
- return Void();
- }
- mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation;
- mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage;
- }
- streams[i] = &mStreamMap[id];
+ if (!preProcessConfigurationLocked(requestedConfiguration, &stream_list, &streams)) {
+ _hidl_cb(Status::INTERNAL_ERROR, outStreams);
+ return Void();
}
ATRACE_BEGIN("camera3->configure_streams");
@@ -885,39 +937,7 @@
// In case Hal returns error most likely it was not able to release
// the corresponding resources of the deleted streams.
if (ret == OK) {
- // delete unused streams, note we do this after adding new streams to ensure new stream
- // will not have the same address as deleted stream, and HAL has a chance to reference
- // the to be deleted stream in configure_streams call
- for(auto it = mStreamMap.begin(); it != mStreamMap.end();) {
- int id = it->first;
- bool found = false;
- for (const auto& stream : requestedConfiguration.streams) {
- if (id == stream.id) {
- found = true;
- break;
- }
- }
- if (!found) {
- // Unmap all buffers of deleted stream
- // in case the configuration call succeeds and HAL
- // is able to release the corresponding resources too.
- cleanupBuffersLocked(id);
- it = mStreamMap.erase(it);
- } else {
- ++it;
- }
- }
-
- // Track video streams
- mVideoStreamIds.clear();
- for (const auto& stream : requestedConfiguration.streams) {
- if (stream.streamType == StreamType::OUTPUT &&
- stream.usage &
- graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) {
- mVideoStreamIds.push_back(stream.id);
- }
- }
- mResultBatcher.setBatchedStreams(mVideoStreamIds);
+ postProcessConfigurationLocked(requestedConfiguration);
}
if (ret == -EINVAL) {
diff --git a/camera/device/3.2/default/CameraDeviceSession.h b/camera/device/3.2/default/CameraDeviceSession.h
index 69e2e2c..c5a63c8 100644
--- a/camera/device/3.2/default/CameraDeviceSession.h
+++ b/camera/device/3.2/default/CameraDeviceSession.h
@@ -112,6 +112,12 @@
Return<Status> flush();
Return<void> close();
+ //Helper methods
+ bool preProcessConfigurationLocked(const StreamConfiguration& requestedConfiguration,
+ camera3_stream_configuration_t *stream_list /*out*/,
+ hidl_vec<camera3_stream_t*> *streams /*out*/);
+ void postProcessConfigurationLocked(const StreamConfiguration& requestedConfiguration);
+
protected:
// protecting mClosed/mDisconnected/mInitFail
diff --git a/camera/device/3.3/default/CameraDeviceSession.cpp b/camera/device/3.3/default/CameraDeviceSession.cpp
index f877895..d36e9ed 100644
--- a/camera/device/3.3/default/CameraDeviceSession.cpp
+++ b/camera/device/3.3/default/CameraDeviceSession.cpp
@@ -77,42 +77,11 @@
return Void();
}
- camera3_stream_configuration_t stream_list;
+ camera3_stream_configuration_t stream_list{};
hidl_vec<camera3_stream_t*> streams;
-
- stream_list.operation_mode = (uint32_t) requestedConfiguration.operationMode;
- stream_list.num_streams = requestedConfiguration.streams.size();
- streams.resize(stream_list.num_streams);
- stream_list.streams = streams.data();
-
- for (uint32_t i = 0; i < stream_list.num_streams; i++) {
- int id = requestedConfiguration.streams[i].id;
-
- if (mStreamMap.count(id) == 0) {
- Camera3Stream stream;
- V3_2::implementation::convertFromHidl(requestedConfiguration.streams[i], &stream);
- mStreamMap[id] = stream;
- mStreamMap[id].data_space = mapToLegacyDataspace(
- mStreamMap[id].data_space);
- mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{});
- } else {
- // width/height/format must not change, but usage/rotation might need to change
- if (mStreamMap[id].stream_type !=
- (int) requestedConfiguration.streams[i].streamType ||
- mStreamMap[id].width != requestedConfiguration.streams[i].width ||
- mStreamMap[id].height != requestedConfiguration.streams[i].height ||
- mStreamMap[id].format != (int) requestedConfiguration.streams[i].format ||
- mStreamMap[id].data_space !=
- mapToLegacyDataspace( static_cast<android_dataspace_t> (
- requestedConfiguration.streams[i].dataSpace))) {
- ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id);
- _hidl_cb(Status::INTERNAL_ERROR, outStreams);
- return Void();
- }
- mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation;
- mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage;
- }
- streams[i] = &mStreamMap[id];
+ if (!preProcessConfigurationLocked(requestedConfiguration, &stream_list, &streams)) {
+ _hidl_cb(Status::INTERNAL_ERROR, outStreams);
+ return Void();
}
ATRACE_BEGIN("camera3->configure_streams");
@@ -122,39 +91,7 @@
// In case Hal returns error most likely it was not able to release
// the corresponding resources of the deleted streams.
if (ret == OK) {
- // delete unused streams, note we do this after adding new streams to ensure new stream
- // will not have the same address as deleted stream, and HAL has a chance to reference
- // the to be deleted stream in configure_streams call
- for(auto it = mStreamMap.begin(); it != mStreamMap.end();) {
- int id = it->first;
- bool found = false;
- for (const auto& stream : requestedConfiguration.streams) {
- if (id == stream.id) {
- found = true;
- break;
- }
- }
- if (!found) {
- // Unmap all buffers of deleted stream
- // in case the configuration call succeeds and HAL
- // is able to release the corresponding resources too.
- cleanupBuffersLocked(id);
- it = mStreamMap.erase(it);
- } else {
- ++it;
- }
- }
-
- // Track video streams
- mVideoStreamIds.clear();
- for (const auto& stream : requestedConfiguration.streams) {
- if (stream.streamType == V3_2::StreamType::OUTPUT &&
- stream.usage &
- graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) {
- mVideoStreamIds.push_back(stream.id);
- }
- }
- mResultBatcher.setBatchedStreams(mVideoStreamIds);
+ postProcessConfigurationLocked(requestedConfiguration);
}
if (ret == -EINVAL) {
diff --git a/camera/device/3.4/Android.bp b/camera/device/3.4/Android.bp
new file mode 100644
index 0000000..2523fa8
--- /dev/null
+++ b/camera/device/3.4/Android.bp
@@ -0,0 +1,25 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.camera.device@3.4",
+ root: "android.hardware",
+ vndk: {
+ enabled: true,
+ },
+ srcs: [
+ "types.hal",
+ "ICameraDeviceSession.hal",
+ ],
+ interfaces: [
+ "android.hardware.camera.common@1.0",
+ "android.hardware.camera.device@3.2",
+ "android.hardware.camera.device@3.3",
+ "android.hardware.graphics.common@1.0",
+ "android.hidl.base@1.0",
+ ],
+ types: [
+ "StreamConfiguration",
+ ],
+ gen_java: false,
+}
+
diff --git a/camera/device/3.4/ICameraDeviceSession.hal b/camera/device/3.4/ICameraDeviceSession.hal
new file mode 100644
index 0000000..e5693b2
--- /dev/null
+++ b/camera/device/3.4/ICameraDeviceSession.hal
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera.device@3.4;
+
+import android.hardware.camera.common@1.0::Status;
+import @3.3::ICameraDeviceSession;
+import @3.3::HalStreamConfiguration;
+
+/**
+ * Camera device active session interface.
+ *
+ * Obtained via ICameraDevice::open(), this interface contains the methods to
+ * configure and request captures from an active camera device.
+ */
+interface ICameraDeviceSession extends @3.3::ICameraDeviceSession {
+
+ /**
+ * configureStreams_3_4:
+ *
+ * Identical to @3.3::ICameraDeviceSession.configureStreams, except that:
+ *
+ * - The requested configuration includes session parameters.
+ *
+ * @return Status Status code for the operation, one of:
+ * OK:
+ * On successful stream configuration.
+ * INTERNAL_ERROR:
+ * If there has been a fatal error and the device is no longer
+ * operational. Only close() can be called successfully by the
+ * framework after this error is returned.
+ * ILLEGAL_ARGUMENT:
+ * If the requested stream configuration is invalid. Some examples
+ * of invalid stream configurations include:
+ * - Including more than 1 INPUT stream
+ * - Not including any OUTPUT streams
+ * - Including streams with unsupported formats, or an unsupported
+ * size for that format.
+ * - Including too many output streams of a certain format.
+ * - Unsupported rotation configuration
+ * - Stream sizes/formats don't satisfy the
+ * camera3_stream_configuration_t->operation_mode requirements
+ * for non-NORMAL mode, or the requested operation_mode is not
+ * supported by the HAL.
+ * - Unsupported usage flag
+ * The camera service cannot filter out all possible illegal stream
+ * configurations, since some devices may support more simultaneous
+ * streams or larger stream resolutions than the minimum required
+ * for a given camera device hardware level. The HAL must return an
+ * ILLEGAL_ARGUMENT for any unsupported stream set, and then be
+ * ready to accept a future valid stream configuration in a later
+ * configureStreams call.
+ * @return halConfiguration The stream parameters desired by the HAL for
+ * each stream, including maximum buffers, the usage flags, and the
+ * override format.
+ */
+ configureStreams_3_4(@3.4::StreamConfiguration requestedConfiguration)
+ generates (Status status,
+ @3.3::HalStreamConfiguration halConfiguration);
+
+};
diff --git a/camera/device/3.4/default/Android.bp b/camera/device/3.4/default/Android.bp
new file mode 100644
index 0000000..c0ce838
--- /dev/null
+++ b/camera/device/3.4/default/Android.bp
@@ -0,0 +1,56 @@
+//
+// Copyright (C) 2017 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+cc_library_headers {
+ name: "camera.device@3.4-impl_headers",
+ vendor: true,
+ export_include_dirs: ["include/device_v3_4_impl"],
+}
+
+cc_library_shared {
+ name: "camera.device@3.4-impl",
+ defaults: ["hidl_defaults"],
+ proprietary: true,
+ vendor: true,
+ srcs: [
+ "CameraDevice.cpp",
+ "CameraDeviceSession.cpp",
+ ],
+ shared_libs: [
+ "libhidlbase",
+ "libhidltransport",
+ "libutils",
+ "libcutils",
+ "camera.device@3.2-impl",
+ "camera.device@3.3-impl",
+ "android.hardware.camera.device@3.2",
+ "android.hardware.camera.device@3.3",
+ "android.hardware.camera.device@3.4",
+ "android.hardware.camera.provider@2.4",
+ "android.hardware.graphics.mapper@2.0",
+ "liblog",
+ "libhardware",
+ "libcamera_metadata",
+ "libfmq",
+ ],
+ static_libs: [
+ "android.hardware.camera.common@1.0-helper",
+ ],
+ local_include_dirs: ["include/device_v3_4_impl"],
+ export_shared_lib_headers: [
+ "libfmq",
+ ],
+}
diff --git a/camera/device/3.4/default/CameraDevice.cpp b/camera/device/3.4/default/CameraDevice.cpp
new file mode 100644
index 0000000..d73833a
--- /dev/null
+++ b/camera/device/3.4/default/CameraDevice.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CamDev@3.4-impl"
+#include <log/log.h>
+
+#include <utils/Vector.h>
+#include <utils/Trace.h>
+#include "CameraDevice_3_4.h"
+#include <include/convert.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace V3_4 {
+namespace implementation {
+
+using ::android::hardware::camera::common::V1_0::Status;
+using namespace ::android::hardware::camera::device;
+
+CameraDevice::CameraDevice(
+ sp<CameraModule> module, const std::string& cameraId,
+ const SortedVector<std::pair<std::string, std::string>>& cameraDeviceNames) :
+ V3_2::implementation::CameraDevice(module, cameraId, cameraDeviceNames) {
+}
+
+CameraDevice::~CameraDevice() {
+}
+
+sp<V3_2::implementation::CameraDeviceSession> CameraDevice::createSession(camera3_device_t* device,
+ const camera_metadata_t* deviceInfo,
+ const sp<V3_2::ICameraDeviceCallback>& callback) {
+ sp<CameraDeviceSession> session = new CameraDeviceSession(device, deviceInfo, callback);
+ IF_ALOGV() {
+ session->getInterface()->interfaceChain([](
+ ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) {
+ ALOGV("Session interface chain:");
+ for (auto iface : interfaceChain) {
+ ALOGV(" %s", iface.c_str());
+ }
+ });
+ }
+ return session;
+}
+
+// End of methods from ::android::hardware::camera::device::V3_2::ICameraDevice.
+
+} // namespace implementation
+} // namespace V3_4
+} // namespace device
+} // namespace camera
+} // namespace hardware
+} // namespace android
diff --git a/camera/device/3.4/default/CameraDeviceSession.cpp b/camera/device/3.4/default/CameraDeviceSession.cpp
new file mode 100644
index 0000000..0ae470f
--- /dev/null
+++ b/camera/device/3.4/default/CameraDeviceSession.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CamDevSession@3.4-impl"
+#include <android/log.h>
+
+#include <set>
+#include <utils/Trace.h>
+#include <hardware/gralloc.h>
+#include <hardware/gralloc1.h>
+#include "CameraDeviceSession.h"
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace V3_4 {
+namespace implementation {
+
+CameraDeviceSession::CameraDeviceSession(
+ camera3_device_t* device,
+ const camera_metadata_t* deviceInfo,
+ const sp<V3_2::ICameraDeviceCallback>& callback) :
+ V3_3::implementation::CameraDeviceSession(device, deviceInfo, callback) {
+}
+
+CameraDeviceSession::~CameraDeviceSession() {
+}
+
+Return<void> CameraDeviceSession::configureStreams_3_4(
+ const V3_4::StreamConfiguration& requestedConfiguration,
+ ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) {
+ Status status = initStatus();
+ HalStreamConfiguration outStreams;
+
+ // hold the inflight lock for entire configureStreams scope since there must not be any
+ // inflight request/results during stream configuration.
+ Mutex::Autolock _l(mInflightLock);
+ if (!mInflightBuffers.empty()) {
+ ALOGE("%s: trying to configureStreams while there are still %zu inflight buffers!",
+ __FUNCTION__, mInflightBuffers.size());
+ _hidl_cb(Status::INTERNAL_ERROR, outStreams);
+ return Void();
+ }
+
+ if (!mInflightAETriggerOverrides.empty()) {
+ ALOGE("%s: trying to configureStreams while there are still %zu inflight"
+ " trigger overrides!", __FUNCTION__,
+ mInflightAETriggerOverrides.size());
+ _hidl_cb(Status::INTERNAL_ERROR, outStreams);
+ return Void();
+ }
+
+ if (!mInflightRawBoostPresent.empty()) {
+ ALOGE("%s: trying to configureStreams while there are still %zu inflight"
+ " boost overrides!", __FUNCTION__,
+ mInflightRawBoostPresent.size());
+ _hidl_cb(Status::INTERNAL_ERROR, outStreams);
+ return Void();
+ }
+
+ if (status != Status::OK) {
+ _hidl_cb(status, outStreams);
+ return Void();
+ }
+
+ const camera_metadata_t *paramBuffer = nullptr;
+ if (0 < requestedConfiguration.sessionParams.size()) {
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams;
+ V3_2::implementation::convertFromHidl(requestedConfiguration.sessionParams, ¶mBuffer);
+ }
+
+ camera3_stream_configuration_t stream_list{};
+ hidl_vec<camera3_stream_t*> streams;
+ stream_list.session_parameters = paramBuffer;
+ if (!preProcessConfigurationLocked(requestedConfiguration.v3_2, &stream_list, &streams)) {
+ _hidl_cb(Status::INTERNAL_ERROR, outStreams);
+ return Void();
+ }
+
+ ATRACE_BEGIN("camera3->configure_streams");
+ status_t ret = mDevice->ops->configure_streams(mDevice, &stream_list);
+ ATRACE_END();
+
+ // In case Hal returns error most likely it was not able to release
+ // the corresponding resources of the deleted streams.
+ if (ret == OK) {
+ postProcessConfigurationLocked(requestedConfiguration.v3_2);
+ }
+
+ if (ret == -EINVAL) {
+ status = Status::ILLEGAL_ARGUMENT;
+ } else if (ret != OK) {
+ status = Status::INTERNAL_ERROR;
+ } else {
+ V3_3::implementation::convertToHidl(stream_list, &outStreams);
+ mFirstRequest = true;
+ }
+
+ _hidl_cb(status, outStreams);
+ return Void();
+}
+
+} // namespace implementation
+} // namespace V3_4
+} // namespace device
+} // namespace camera
+} // namespace hardware
+} // namespace android
diff --git a/camera/device/3.4/default/OWNERS b/camera/device/3.4/default/OWNERS
new file mode 100644
index 0000000..18acfee
--- /dev/null
+++ b/camera/device/3.4/default/OWNERS
@@ -0,0 +1,6 @@
+cychen@google.com
+epeev@google.com
+etalvala@google.com
+shuzhenwang@google.com
+yinchiayeh@google.com
+zhijunhe@google.com
diff --git a/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h b/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h
new file mode 100644
index 0000000..bff1734
--- /dev/null
+++ b/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h
@@ -0,0 +1,146 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H
+#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H
+
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <android/hardware/camera/device/3.3/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <../../3.3/default/CameraDeviceSession.h>
+#include <../../3.3/default/include/convert.h>
+#include <fmq/MessageQueue.h>
+#include <hidl/MQDescriptor.h>
+#include <hidl/Status.h>
+#include <deque>
+#include <map>
+#include <unordered_map>
+#include "CameraMetadata.h"
+#include "HandleImporter.h"
+#include "hardware/camera3.h"
+#include "hardware/camera_common.h"
+#include "utils/Mutex.h"
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace V3_4 {
+namespace implementation {
+
+using namespace ::android::hardware::camera::device;
+using ::android::hardware::camera::device::V3_2::CaptureRequest;
+using ::android::hardware::camera::device::V3_2::StreamConfiguration;
+using ::android::hardware::camera::device::V3_3::HalStreamConfiguration;
+using ::android::hardware::camera::device::V3_4::ICameraDeviceSession;
+using ::android::hardware::camera::common::V1_0::Status;
+using ::android::hardware::camera::common::V1_0::helper::HandleImporter;
+using ::android::hardware::kSynchronizedReadWrite;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::hidl_string;
+using ::android::sp;
+using ::android::Mutex;
+
+struct CameraDeviceSession : public V3_3::implementation::CameraDeviceSession {
+
+ CameraDeviceSession(camera3_device_t*,
+ const camera_metadata_t* deviceInfo,
+ const sp<V3_2::ICameraDeviceCallback>&);
+ virtual ~CameraDeviceSession();
+
+ virtual sp<V3_2::ICameraDeviceSession> getInterface() override {
+ return new TrampolineSessionInterface_3_4(this);
+ }
+
+protected:
+ // Methods from v3.3 and earlier will trampoline to inherited implementation
+
+ // New methods for v3.4
+
+ Return<void> configureStreams_3_4(
+ const V3_4::StreamConfiguration& requestedConfiguration,
+ ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb);
+private:
+
+ struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession {
+ TrampolineSessionInterface_3_4(sp<CameraDeviceSession> parent) :
+ mParent(parent) {}
+
+ virtual Return<void> constructDefaultRequestSettings(
+ V3_2::RequestTemplate type,
+ V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override {
+ return mParent->constructDefaultRequestSettings(type, _hidl_cb);
+ }
+
+ virtual Return<void> configureStreams(
+ const StreamConfiguration& requestedConfiguration,
+ V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override {
+ return mParent->configureStreams(requestedConfiguration, _hidl_cb);
+ }
+
+ virtual Return<void> processCaptureRequest(const hidl_vec<V3_2::CaptureRequest>& requests,
+ const hidl_vec<V3_2::BufferCache>& cachesToRemove,
+ V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override {
+ return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb);
+ }
+
+ virtual Return<void> getCaptureRequestMetadataQueue(
+ V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override {
+ return mParent->getCaptureRequestMetadataQueue(_hidl_cb);
+ }
+
+ virtual Return<void> getCaptureResultMetadataQueue(
+ V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override {
+ return mParent->getCaptureResultMetadataQueue(_hidl_cb);
+ }
+
+ virtual Return<Status> flush() override {
+ return mParent->flush();
+ }
+
+ virtual Return<void> close() override {
+ return mParent->close();
+ }
+
+ virtual Return<void> configureStreams_3_3(
+ const StreamConfiguration& requestedConfiguration,
+ configureStreams_3_3_cb _hidl_cb) override {
+ return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb);
+ }
+
+ virtual Return<void> configureStreams_3_4(
+ const V3_4::StreamConfiguration& requestedConfiguration,
+ configureStreams_3_3_cb _hidl_cb) override {
+ return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb);
+ }
+
+ private:
+ sp<CameraDeviceSession> mParent;
+ };
+};
+
+} // namespace implementation
+} // namespace V3_4
+} // namespace device
+} // namespace camera
+} // namespace hardware
+} // namespace android
+
+#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H
diff --git a/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h b/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h
new file mode 100644
index 0000000..95ee20e
--- /dev/null
+++ b/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H
+#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H
+
+#include "utils/Mutex.h"
+#include "CameraModule.h"
+#include "CameraMetadata.h"
+#include "CameraDeviceSession.h"
+#include <../../3.2/default/CameraDevice_3_2.h>
+
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <hidl/Status.h>
+#include <hidl/MQDescriptor.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace V3_4 {
+namespace implementation {
+
+using namespace ::android::hardware::camera::device;
+using ::android::hardware::camera::common::V1_0::helper::CameraModule;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::hidl_string;
+using ::android::sp;
+
+/*
+ * The camera device HAL implementation is opened lazily (via the open call)
+ */
+struct CameraDevice : public V3_2::implementation::CameraDevice {
+
+ // Called by provider HAL.
+ // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could
+ // be multiple CameraDevice trying to access the same physical camera. Also, provider will have
+ // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying
+ // camera is detached.
+ // Delegates nearly all work to CameraDevice_3_2
+ CameraDevice(sp<CameraModule> module,
+ const std::string& cameraId,
+ const SortedVector<std::pair<std::string, std::string>>& cameraDeviceNames);
+ ~CameraDevice();
+
+protected:
+ virtual sp<V3_2::implementation::CameraDeviceSession> createSession(camera3_device_t*,
+ const camera_metadata_t* deviceInfo,
+ const sp<V3_2::ICameraDeviceCallback>&) override;
+
+};
+
+} // namespace implementation
+} // namespace V3_4
+} // namespace device
+} // namespace camera
+} // namespace hardware
+} // namespace android
+
+#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H
diff --git a/camera/device/3.4/types.hal b/camera/device/3.4/types.hal
new file mode 100644
index 0000000..c822717
--- /dev/null
+++ b/camera/device/3.4/types.hal
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera.device@3.4;
+
+import @3.2::StreamConfiguration;
+import @3.2::types;
+
+/**
+ * StreamConfiguration:
+ *
+ * Identical to @3.2::StreamConfiguration, except that it contains session parameters.
+ */
+struct StreamConfiguration {
+ /**
+ * The definition of StreamConfiguration from the prior version.
+ */
+ @3.2::StreamConfiguration v3_2;
+
+ /**
+ * Session wide camera parameters.
+ *
+ * The session parameters contain the initial values of any request keys that were
+ * made available via ANDROID_REQUEST_AVAILABLE_SESSION_KEYS. The Hal implementation
+ * can advertise any settings that can potentially introduce unexpected delays when
+ * their value changes during active process requests. Typical examples are
+ * parameters that trigger time-consuming HW re-configurations or internal camera
+ * pipeline updates. The field is optional, clients can choose to ignore it and avoid
+ * including any initial settings. If parameters are present, then hal must examine
+ * their values and configure the internal camera pipeline accordingly.
+ */
+ CameraMetadata sessionParams;
+};
diff --git a/camera/device/README.md b/camera/device/README.md
index 9f60781..3709cb8 100644
--- a/camera/device/README.md
+++ b/camera/device/README.md
@@ -87,3 +87,11 @@
supported in the legacy camera HAL.
Added in Android 8.1.
+
+### ICameraDevice.hal@3.4:
+
+A minor revision to the ICameraDevice.hal@3.3.
+
+ - Adds support for session parameters during stream configuration.
+
+Added in Android 9
diff --git a/camera/metadata/3.2/docs.html b/camera/metadata/3.2/docs.html
deleted file mode 100644
index 004ecae..0000000
--- a/camera/metadata/3.2/docs.html
+++ /dev/null
@@ -1,27340 +0,0 @@
-<!DOCTYPE html>
-<html>
-<!-- Copyright (C) 2012 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<head>
- <!-- automatically generated from html.mako. do NOT edit directly -->
- <meta charset="utf-8" />
- <title>Android Camera HAL3.4 Properties</title>
- <style type="text/css">
- body { background-color: #f7f7f7; font-family: Roboto, sans-serif;}
- h1 { color: #333333; }
- h2 { color: #333333; }
- a:link { color: #258aaf; text-decoration: none}
- a:hover { color: #459aaf; text-decoration: underline }
- a:visited { color: #154a5f; text-decoration: none}
- .section { color: #eeeeee; font-size: 1.5em; font-weight: bold; background-color: #888888; padding: 0.5em 0em 0.5em 0.5em; border-width: thick thin thin thin; border-color: #111111 #777777 #777777 #777777}
- .kind { color: #eeeeee; font-size: 1.2em; font-weight: bold; padding-left: 1.5em; background-color: #aaaaaa }
- .entry { background-color: #f0f0f0 }
- .entry_cont { background-color: #f0f0f0 }
- .entries_header { background-color: #dddddd; text-align: center}
-
- /* toc style */
- .toc_section_header { font-size:1.3em; }
- .toc_kind_header { font-size:1.2em; }
- .toc_deprecated { text-decoration:line-through; }
-
- /* table column sizes */
- table { border-collapse:collapse; table-layout: fixed; width: 100%; word-wrap: break-word }
- td,th { border: 1px solid; border-color: #aaaaaa; padding-left: 0.5em; padding-right: 0.5em }
- .th_name { width: 20% }
- .th_units { width: 10% }
- .th_tags { width: 5% }
- .th_details { width: 25% }
- .th_type { width: 20% }
- .th_description { width: 20% }
- .th_range { width: 10% }
- td { font-size: 0.9em; }
-
- /* hide the first thead, we need it there only to enforce column sizes */
- .thead_dummy { visibility: hidden; }
-
- /* Entry flair */
- .entry_name { color: #333333; padding-left:1.0em; font-size:1.1em; font-family: monospace; vertical-align:top; }
- .entry_name_deprecated { text-decoration:line-through; }
-
- /* Entry type flair */
- .entry_type_name { font-size:1.1em; color: #669900; font-weight: bold;}
- .entry_type_name_enum:after { color: #669900; font-weight: bold; content:" (enum)" }
- .entry_type_visibility { font-weight: bolder; padding-left:1em}
- .entry_type_synthetic { font-weight: bolder; color: #996600; }
- .entry_type_hwlevel { font-weight: bolder; color: #000066; }
- .entry_type_deprecated { font-weight: bolder; color: #4D4D4D; }
- .entry_type_enum_name { font-family: monospace; font-weight: bolder; }
- .entry_type_enum_notes:before { content:" - " }
- .entry_type_enum_notes>p:first-child { display:inline; }
- .entry_type_enum_value:before { content:" = " }
- .entry_type_enum_value { font-family: monospace; }
- .entry ul { margin: 0 0 0 0; list-style-position: inside; padding-left: 0.5em; }
- .entry ul li { padding: 0 0 0 0; margin: 0 0 0 0;}
- .entry_range_deprecated { font-weight: bolder; }
-
- /* Entry tags flair */
- .entry_tags ul { list-style-type: none; }
-
- /* Entry details (full docs) flair */
- .entry_details_header { font-weight: bold; background-color: #dddddd;
- text-align: center; font-size: 1.1em; margin-left: 0em; margin-right: 0em; }
-
- /* Entry spacer flair */
- .entry_spacer { background-color: transparent; border-style: none; height: 0.5em; }
-
- /* TODO: generate abbr element for each tag link? */
- /* TODO for each x.y.z try to link it to the entry */
-
- </style>
-
- <style>
-
- {
- /* broken...
- supposedly there is a bug in chrome that it lays out tables before
- it knows its being printed, so the page-break-* styles are ignored
- */
- tr { page-break-after: always; page-break-inside: avoid; }
- }
-
- </style>
-</head>
-
-
-
-<body>
- <h1>Android Camera HAL3.2 Properties</h1>
-
-
- <h2>Table of Contents</h2>
- <ul class="toc">
- <li><a href="#tag_index" class="toc_section_header">Tags</a></li>
- <li>
- <span class="toc_section_header"><a href="#section_colorCorrection">colorCorrection</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.colorCorrection.mode">android.colorCorrection.mode</a></li>
- <li
- ><a href="#controls_android.colorCorrection.transform">android.colorCorrection.transform</a></li>
- <li
- ><a href="#controls_android.colorCorrection.gains">android.colorCorrection.gains</a></li>
- <li
- ><a href="#controls_android.colorCorrection.aberrationMode">android.colorCorrection.aberrationMode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.colorCorrection.mode">android.colorCorrection.mode</a></li>
- <li
- ><a href="#dynamic_android.colorCorrection.transform">android.colorCorrection.transform</a></li>
- <li
- ><a href="#dynamic_android.colorCorrection.gains">android.colorCorrection.gains</a></li>
- <li
- ><a href="#dynamic_android.colorCorrection.aberrationMode">android.colorCorrection.aberrationMode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.colorCorrection.availableAberrationModes">android.colorCorrection.availableAberrationModes</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_control">control</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.control.aeAntibandingMode">android.control.aeAntibandingMode</a></li>
- <li
- ><a href="#controls_android.control.aeExposureCompensation">android.control.aeExposureCompensation</a></li>
- <li
- ><a href="#controls_android.control.aeLock">android.control.aeLock</a></li>
- <li
- ><a href="#controls_android.control.aeMode">android.control.aeMode</a></li>
- <li
- ><a href="#controls_android.control.aeRegions">android.control.aeRegions</a></li>
- <li
- ><a href="#controls_android.control.aeTargetFpsRange">android.control.aeTargetFpsRange</a></li>
- <li
- ><a href="#controls_android.control.aePrecaptureTrigger">android.control.aePrecaptureTrigger</a></li>
- <li
- ><a href="#controls_android.control.afMode">android.control.afMode</a></li>
- <li
- ><a href="#controls_android.control.afRegions">android.control.afRegions</a></li>
- <li
- ><a href="#controls_android.control.afTrigger">android.control.afTrigger</a></li>
- <li
- ><a href="#controls_android.control.awbLock">android.control.awbLock</a></li>
- <li
- ><a href="#controls_android.control.awbMode">android.control.awbMode</a></li>
- <li
- ><a href="#controls_android.control.awbRegions">android.control.awbRegions</a></li>
- <li
- ><a href="#controls_android.control.captureIntent">android.control.captureIntent</a></li>
- <li
- ><a href="#controls_android.control.effectMode">android.control.effectMode</a></li>
- <li
- ><a href="#controls_android.control.mode">android.control.mode</a></li>
- <li
- ><a href="#controls_android.control.sceneMode">android.control.sceneMode</a></li>
- <li
- ><a href="#controls_android.control.videoStabilizationMode">android.control.videoStabilizationMode</a></li>
- <li
- ><a href="#controls_android.control.postRawSensitivityBoost">android.control.postRawSensitivityBoost</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.control.aeAvailableAntibandingModes">android.control.aeAvailableAntibandingModes</a></li>
- <li
- ><a href="#static_android.control.aeAvailableModes">android.control.aeAvailableModes</a></li>
- <li
- ><a href="#static_android.control.aeAvailableTargetFpsRanges">android.control.aeAvailableTargetFpsRanges</a></li>
- <li
- ><a href="#static_android.control.aeCompensationRange">android.control.aeCompensationRange</a></li>
- <li
- ><a href="#static_android.control.aeCompensationStep">android.control.aeCompensationStep</a></li>
- <li
- ><a href="#static_android.control.afAvailableModes">android.control.afAvailableModes</a></li>
- <li
- ><a href="#static_android.control.availableEffects">android.control.availableEffects</a></li>
- <li
- ><a href="#static_android.control.availableSceneModes">android.control.availableSceneModes</a></li>
- <li
- ><a href="#static_android.control.availableVideoStabilizationModes">android.control.availableVideoStabilizationModes</a></li>
- <li
- ><a href="#static_android.control.awbAvailableModes">android.control.awbAvailableModes</a></li>
- <li
- ><a href="#static_android.control.maxRegions">android.control.maxRegions</a></li>
- <li
- ><a href="#static_android.control.maxRegionsAe">android.control.maxRegionsAe</a></li>
- <li
- ><a href="#static_android.control.maxRegionsAwb">android.control.maxRegionsAwb</a></li>
- <li
- ><a href="#static_android.control.maxRegionsAf">android.control.maxRegionsAf</a></li>
- <li
- ><a href="#static_android.control.sceneModeOverrides">android.control.sceneModeOverrides</a></li>
- <li
- ><a href="#static_android.control.availableHighSpeedVideoConfigurations">android.control.availableHighSpeedVideoConfigurations</a></li>
- <li
- ><a href="#static_android.control.aeLockAvailable">android.control.aeLockAvailable</a></li>
- <li
- ><a href="#static_android.control.awbLockAvailable">android.control.awbLockAvailable</a></li>
- <li
- ><a href="#static_android.control.availableModes">android.control.availableModes</a></li>
- <li
- ><a href="#static_android.control.postRawSensitivityBoostRange">android.control.postRawSensitivityBoostRange</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.control.aePrecaptureId">android.control.aePrecaptureId</a></li>
- <li
- ><a href="#dynamic_android.control.aeAntibandingMode">android.control.aeAntibandingMode</a></li>
- <li
- ><a href="#dynamic_android.control.aeExposureCompensation">android.control.aeExposureCompensation</a></li>
- <li
- ><a href="#dynamic_android.control.aeLock">android.control.aeLock</a></li>
- <li
- ><a href="#dynamic_android.control.aeMode">android.control.aeMode</a></li>
- <li
- ><a href="#dynamic_android.control.aeRegions">android.control.aeRegions</a></li>
- <li
- ><a href="#dynamic_android.control.aeTargetFpsRange">android.control.aeTargetFpsRange</a></li>
- <li
- ><a href="#dynamic_android.control.aePrecaptureTrigger">android.control.aePrecaptureTrigger</a></li>
- <li
- ><a href="#dynamic_android.control.aeState">android.control.aeState</a></li>
- <li
- ><a href="#dynamic_android.control.afMode">android.control.afMode</a></li>
- <li
- ><a href="#dynamic_android.control.afRegions">android.control.afRegions</a></li>
- <li
- ><a href="#dynamic_android.control.afTrigger">android.control.afTrigger</a></li>
- <li
- ><a href="#dynamic_android.control.afState">android.control.afState</a></li>
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.control.afTriggerId">android.control.afTriggerId</a></li>
- <li
- ><a href="#dynamic_android.control.awbLock">android.control.awbLock</a></li>
- <li
- ><a href="#dynamic_android.control.awbMode">android.control.awbMode</a></li>
- <li
- ><a href="#dynamic_android.control.awbRegions">android.control.awbRegions</a></li>
- <li
- ><a href="#dynamic_android.control.captureIntent">android.control.captureIntent</a></li>
- <li
- ><a href="#dynamic_android.control.awbState">android.control.awbState</a></li>
- <li
- ><a href="#dynamic_android.control.effectMode">android.control.effectMode</a></li>
- <li
- ><a href="#dynamic_android.control.mode">android.control.mode</a></li>
- <li
- ><a href="#dynamic_android.control.sceneMode">android.control.sceneMode</a></li>
- <li
- ><a href="#dynamic_android.control.videoStabilizationMode">android.control.videoStabilizationMode</a></li>
- <li
- ><a href="#dynamic_android.control.postRawSensitivityBoost">android.control.postRawSensitivityBoost</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_demosaic">demosaic</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.demosaic.mode">android.demosaic.mode</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_edge">edge</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.edge.mode">android.edge.mode</a></li>
- <li
- ><a href="#controls_android.edge.strength">android.edge.strength</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.edge.availableEdgeModes">android.edge.availableEdgeModes</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.edge.mode">android.edge.mode</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_flash">flash</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.flash.firingPower">android.flash.firingPower</a></li>
- <li
- ><a href="#controls_android.flash.firingTime">android.flash.firingTime</a></li>
- <li
- ><a href="#controls_android.flash.mode">android.flash.mode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
-
- <li
- ><a href="#static_android.flash.info.available">android.flash.info.available</a></li>
- <li
- ><a href="#static_android.flash.info.chargeDuration">android.flash.info.chargeDuration</a></li>
-
- <li
- ><a href="#static_android.flash.colorTemperature">android.flash.colorTemperature</a></li>
- <li
- ><a href="#static_android.flash.maxEnergy">android.flash.maxEnergy</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.flash.firingPower">android.flash.firingPower</a></li>
- <li
- ><a href="#dynamic_android.flash.firingTime">android.flash.firingTime</a></li>
- <li
- ><a href="#dynamic_android.flash.mode">android.flash.mode</a></li>
- <li
- ><a href="#dynamic_android.flash.state">android.flash.state</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_hotPixel">hotPixel</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.hotPixel.mode">android.hotPixel.mode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.hotPixel.availableHotPixelModes">android.hotPixel.availableHotPixelModes</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_jpeg">jpeg</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.jpeg.gpsLocation">android.jpeg.gpsLocation</a></li>
- <li
- ><a href="#controls_android.jpeg.gpsCoordinates">android.jpeg.gpsCoordinates</a></li>
- <li
- ><a href="#controls_android.jpeg.gpsProcessingMethod">android.jpeg.gpsProcessingMethod</a></li>
- <li
- ><a href="#controls_android.jpeg.gpsTimestamp">android.jpeg.gpsTimestamp</a></li>
- <li
- ><a href="#controls_android.jpeg.orientation">android.jpeg.orientation</a></li>
- <li
- ><a href="#controls_android.jpeg.quality">android.jpeg.quality</a></li>
- <li
- ><a href="#controls_android.jpeg.thumbnailQuality">android.jpeg.thumbnailQuality</a></li>
- <li
- ><a href="#controls_android.jpeg.thumbnailSize">android.jpeg.thumbnailSize</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.jpeg.availableThumbnailSizes">android.jpeg.availableThumbnailSizes</a></li>
- <li
- ><a href="#static_android.jpeg.maxSize">android.jpeg.maxSize</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.jpeg.gpsLocation">android.jpeg.gpsLocation</a></li>
- <li
- ><a href="#dynamic_android.jpeg.gpsCoordinates">android.jpeg.gpsCoordinates</a></li>
- <li
- ><a href="#dynamic_android.jpeg.gpsProcessingMethod">android.jpeg.gpsProcessingMethod</a></li>
- <li
- ><a href="#dynamic_android.jpeg.gpsTimestamp">android.jpeg.gpsTimestamp</a></li>
- <li
- ><a href="#dynamic_android.jpeg.orientation">android.jpeg.orientation</a></li>
- <li
- ><a href="#dynamic_android.jpeg.quality">android.jpeg.quality</a></li>
- <li
- ><a href="#dynamic_android.jpeg.size">android.jpeg.size</a></li>
- <li
- ><a href="#dynamic_android.jpeg.thumbnailQuality">android.jpeg.thumbnailQuality</a></li>
- <li
- ><a href="#dynamic_android.jpeg.thumbnailSize">android.jpeg.thumbnailSize</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_lens">lens</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.lens.aperture">android.lens.aperture</a></li>
- <li
- ><a href="#controls_android.lens.filterDensity">android.lens.filterDensity</a></li>
- <li
- ><a href="#controls_android.lens.focalLength">android.lens.focalLength</a></li>
- <li
- ><a href="#controls_android.lens.focusDistance">android.lens.focusDistance</a></li>
- <li
- ><a href="#controls_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
-
- <li
- ><a href="#static_android.lens.info.availableApertures">android.lens.info.availableApertures</a></li>
- <li
- ><a href="#static_android.lens.info.availableFilterDensities">android.lens.info.availableFilterDensities</a></li>
- <li
- ><a href="#static_android.lens.info.availableFocalLengths">android.lens.info.availableFocalLengths</a></li>
- <li
- ><a href="#static_android.lens.info.availableOpticalStabilization">android.lens.info.availableOpticalStabilization</a></li>
- <li
- ><a href="#static_android.lens.info.hyperfocalDistance">android.lens.info.hyperfocalDistance</a></li>
- <li
- ><a href="#static_android.lens.info.minimumFocusDistance">android.lens.info.minimumFocusDistance</a></li>
- <li
- ><a href="#static_android.lens.info.shadingMapSize">android.lens.info.shadingMapSize</a></li>
- <li
- ><a href="#static_android.lens.info.focusDistanceCalibration">android.lens.info.focusDistanceCalibration</a></li>
-
- <li
- ><a href="#static_android.lens.facing">android.lens.facing</a></li>
- <li
- ><a href="#static_android.lens.poseRotation">android.lens.poseRotation</a></li>
- <li
- ><a href="#static_android.lens.poseTranslation">android.lens.poseTranslation</a></li>
- <li
- ><a href="#static_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a></li>
- <li
- ><a href="#static_android.lens.radialDistortion">android.lens.radialDistortion</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.lens.aperture">android.lens.aperture</a></li>
- <li
- ><a href="#dynamic_android.lens.filterDensity">android.lens.filterDensity</a></li>
- <li
- ><a href="#dynamic_android.lens.focalLength">android.lens.focalLength</a></li>
- <li
- ><a href="#dynamic_android.lens.focusDistance">android.lens.focusDistance</a></li>
- <li
- ><a href="#dynamic_android.lens.focusRange">android.lens.focusRange</a></li>
- <li
- ><a href="#dynamic_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a></li>
- <li
- ><a href="#dynamic_android.lens.state">android.lens.state</a></li>
- <li
- ><a href="#dynamic_android.lens.poseRotation">android.lens.poseRotation</a></li>
- <li
- ><a href="#dynamic_android.lens.poseTranslation">android.lens.poseTranslation</a></li>
- <li
- ><a href="#dynamic_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a></li>
- <li
- ><a href="#dynamic_android.lens.radialDistortion">android.lens.radialDistortion</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_noiseReduction">noiseReduction</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.noiseReduction.mode">android.noiseReduction.mode</a></li>
- <li
- ><a href="#controls_android.noiseReduction.strength">android.noiseReduction.strength</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.noiseReduction.availableNoiseReductionModes</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_quirks">quirks</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- class="toc_deprecated"
- ><a href="#static_android.quirks.meteringCropRegion">android.quirks.meteringCropRegion</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.quirks.triggerAfWithAuto">android.quirks.triggerAfWithAuto</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.quirks.useZslFormat">android.quirks.useZslFormat</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.quirks.usePartialResult">android.quirks.usePartialResult</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.quirks.partialResult">android.quirks.partialResult</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_request">request</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- class="toc_deprecated"
- ><a href="#controls_android.request.frameCount">android.request.frameCount</a></li>
- <li
- ><a href="#controls_android.request.id">android.request.id</a></li>
- <li
- class="toc_deprecated"
- ><a href="#controls_android.request.inputStreams">android.request.inputStreams</a></li>
- <li
- ><a href="#controls_android.request.metadataMode">android.request.metadataMode</a></li>
- <li
- class="toc_deprecated"
- ><a href="#controls_android.request.outputStreams">android.request.outputStreams</a></li>
- <li
- class="toc_deprecated"
- ><a href="#controls_android.request.type">android.request.type</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.request.maxNumOutputStreams">android.request.maxNumOutputStreams</a></li>
- <li
- ><a href="#static_android.request.maxNumOutputRaw">android.request.maxNumOutputRaw</a></li>
- <li
- ><a href="#static_android.request.maxNumOutputProc">android.request.maxNumOutputProc</a></li>
- <li
- ><a href="#static_android.request.maxNumOutputProcStalling">android.request.maxNumOutputProcStalling</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.request.maxNumReprocessStreams">android.request.maxNumReprocessStreams</a></li>
- <li
- ><a href="#static_android.request.maxNumInputStreams">android.request.maxNumInputStreams</a></li>
- <li
- ><a href="#static_android.request.pipelineMaxDepth">android.request.pipelineMaxDepth</a></li>
- <li
- ><a href="#static_android.request.partialResultCount">android.request.partialResultCount</a></li>
- <li
- ><a href="#static_android.request.availableCapabilities">android.request.availableCapabilities</a></li>
- <li
- ><a href="#static_android.request.availableRequestKeys">android.request.availableRequestKeys</a></li>
- <li
- ><a href="#static_android.request.availableResultKeys">android.request.availableResultKeys</a></li>
- <li
- ><a href="#static_android.request.availableCharacteristicsKeys">android.request.availableCharacteristicsKeys</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.request.frameCount">android.request.frameCount</a></li>
- <li
- ><a href="#dynamic_android.request.id">android.request.id</a></li>
- <li
- ><a href="#dynamic_android.request.metadataMode">android.request.metadataMode</a></li>
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.request.outputStreams">android.request.outputStreams</a></li>
- <li
- ><a href="#dynamic_android.request.pipelineDepth">android.request.pipelineDepth</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_scaler">scaler</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.scaler.cropRegion">android.scaler.cropRegion</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableFormats">android.scaler.availableFormats</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableJpegMinDurations">android.scaler.availableJpegMinDurations</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableJpegSizes">android.scaler.availableJpegSizes</a></li>
- <li
- ><a href="#static_android.scaler.availableMaxDigitalZoom">android.scaler.availableMaxDigitalZoom</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableProcessedMinDurations">android.scaler.availableProcessedMinDurations</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableProcessedSizes">android.scaler.availableProcessedSizes</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableRawMinDurations">android.scaler.availableRawMinDurations</a></li>
- <li
- class="toc_deprecated"
- ><a href="#static_android.scaler.availableRawSizes">android.scaler.availableRawSizes</a></li>
- <li
- ><a href="#static_android.scaler.availableInputOutputFormatsMap">android.scaler.availableInputOutputFormatsMap</a></li>
- <li
- ><a href="#static_android.scaler.availableStreamConfigurations">android.scaler.availableStreamConfigurations</a></li>
- <li
- ><a href="#static_android.scaler.availableMinFrameDurations">android.scaler.availableMinFrameDurations</a></li>
- <li
- ><a href="#static_android.scaler.availableStallDurations">android.scaler.availableStallDurations</a></li>
- <li
- ><a href="#static_android.scaler.streamConfigurationMap">android.scaler.streamConfigurationMap</a></li>
- <li
- ><a href="#static_android.scaler.croppingType">android.scaler.croppingType</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.scaler.cropRegion">android.scaler.cropRegion</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_sensor">sensor</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.sensor.exposureTime">android.sensor.exposureTime</a></li>
- <li
- ><a href="#controls_android.sensor.frameDuration">android.sensor.frameDuration</a></li>
- <li
- ><a href="#controls_android.sensor.sensitivity">android.sensor.sensitivity</a></li>
- <li
- ><a href="#controls_android.sensor.testPatternData">android.sensor.testPatternData</a></li>
- <li
- ><a href="#controls_android.sensor.testPatternMode">android.sensor.testPatternMode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
-
- <li
- ><a href="#static_android.sensor.info.activeArraySize">android.sensor.info.activeArraySize</a></li>
- <li
- ><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a></li>
- <li
- ><a href="#static_android.sensor.info.colorFilterArrangement">android.sensor.info.colorFilterArrangement</a></li>
- <li
- ><a href="#static_android.sensor.info.exposureTimeRange">android.sensor.info.exposureTimeRange</a></li>
- <li
- ><a href="#static_android.sensor.info.maxFrameDuration">android.sensor.info.maxFrameDuration</a></li>
- <li
- ><a href="#static_android.sensor.info.physicalSize">android.sensor.info.physicalSize</a></li>
- <li
- ><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a></li>
- <li
- ><a href="#static_android.sensor.info.whiteLevel">android.sensor.info.whiteLevel</a></li>
- <li
- ><a href="#static_android.sensor.info.timestampSource">android.sensor.info.timestampSource</a></li>
- <li
- ><a href="#static_android.sensor.info.lensShadingApplied">android.sensor.info.lensShadingApplied</a></li>
- <li
- ><a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.sensor.info.preCorrectionActiveArraySize</a></li>
-
- <li
- ><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a></li>
- <li
- ><a href="#static_android.sensor.referenceIlluminant2">android.sensor.referenceIlluminant2</a></li>
- <li
- ><a href="#static_android.sensor.calibrationTransform1">android.sensor.calibrationTransform1</a></li>
- <li
- ><a href="#static_android.sensor.calibrationTransform2">android.sensor.calibrationTransform2</a></li>
- <li
- ><a href="#static_android.sensor.colorTransform1">android.sensor.colorTransform1</a></li>
- <li
- ><a href="#static_android.sensor.colorTransform2">android.sensor.colorTransform2</a></li>
- <li
- ><a href="#static_android.sensor.forwardMatrix1">android.sensor.forwardMatrix1</a></li>
- <li
- ><a href="#static_android.sensor.forwardMatrix2">android.sensor.forwardMatrix2</a></li>
- <li
- ><a href="#static_android.sensor.baseGainFactor">android.sensor.baseGainFactor</a></li>
- <li
- ><a href="#static_android.sensor.blackLevelPattern">android.sensor.blackLevelPattern</a></li>
- <li
- ><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a></li>
- <li
- ><a href="#static_android.sensor.orientation">android.sensor.orientation</a></li>
- <li
- ><a href="#static_android.sensor.profileHueSatMapDimensions">android.sensor.profileHueSatMapDimensions</a></li>
- <li
- ><a href="#static_android.sensor.availableTestPatternModes">android.sensor.availableTestPatternModes</a></li>
- <li
- ><a href="#static_android.sensor.opticalBlackRegions">android.sensor.opticalBlackRegions</a></li>
- <li
- ><a href="#static_android.sensor.opaqueRawSize">android.sensor.opaqueRawSize</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.sensor.exposureTime">android.sensor.exposureTime</a></li>
- <li
- ><a href="#dynamic_android.sensor.frameDuration">android.sensor.frameDuration</a></li>
- <li
- ><a href="#dynamic_android.sensor.sensitivity">android.sensor.sensitivity</a></li>
- <li
- ><a href="#dynamic_android.sensor.timestamp">android.sensor.timestamp</a></li>
- <li
- ><a href="#dynamic_android.sensor.temperature">android.sensor.temperature</a></li>
- <li
- ><a href="#dynamic_android.sensor.neutralColorPoint">android.sensor.neutralColorPoint</a></li>
- <li
- ><a href="#dynamic_android.sensor.noiseProfile">android.sensor.noiseProfile</a></li>
- <li
- ><a href="#dynamic_android.sensor.profileHueSatMap">android.sensor.profileHueSatMap</a></li>
- <li
- ><a href="#dynamic_android.sensor.profileToneCurve">android.sensor.profileToneCurve</a></li>
- <li
- ><a href="#dynamic_android.sensor.greenSplit">android.sensor.greenSplit</a></li>
- <li
- ><a href="#dynamic_android.sensor.testPatternData">android.sensor.testPatternData</a></li>
- <li
- ><a href="#dynamic_android.sensor.testPatternMode">android.sensor.testPatternMode</a></li>
- <li
- ><a href="#dynamic_android.sensor.rollingShutterSkew">android.sensor.rollingShutterSkew</a></li>
- <li
- ><a href="#dynamic_android.sensor.dynamicBlackLevel">android.sensor.dynamicBlackLevel</a></li>
- <li
- ><a href="#dynamic_android.sensor.dynamicWhiteLevel">android.sensor.dynamicWhiteLevel</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_shading">shading</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.shading.mode">android.shading.mode</a></li>
- <li
- ><a href="#controls_android.shading.strength">android.shading.strength</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.shading.mode">android.shading.mode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.shading.availableModes">android.shading.availableModes</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_statistics">statistics</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.statistics.faceDetectMode">android.statistics.faceDetectMode</a></li>
- <li
- ><a href="#controls_android.statistics.histogramMode">android.statistics.histogramMode</a></li>
- <li
- ><a href="#controls_android.statistics.sharpnessMapMode">android.statistics.sharpnessMapMode</a></li>
- <li
- ><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a></li>
- <li
- ><a href="#controls_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
-
- <li
- ><a href="#static_android.statistics.info.availableFaceDetectModes">android.statistics.info.availableFaceDetectModes</a></li>
- <li
- ><a href="#static_android.statistics.info.histogramBucketCount">android.statistics.info.histogramBucketCount</a></li>
- <li
- ><a href="#static_android.statistics.info.maxFaceCount">android.statistics.info.maxFaceCount</a></li>
- <li
- ><a href="#static_android.statistics.info.maxHistogramCount">android.statistics.info.maxHistogramCount</a></li>
- <li
- ><a href="#static_android.statistics.info.maxSharpnessMapValue">android.statistics.info.maxSharpnessMapValue</a></li>
- <li
- ><a href="#static_android.statistics.info.sharpnessMapSize">android.statistics.info.sharpnessMapSize</a></li>
- <li
- ><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a></li>
- <li
- ><a href="#static_android.statistics.info.availableLensShadingMapModes">android.statistics.info.availableLensShadingMapModes</a></li>
-
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.statistics.faceDetectMode">android.statistics.faceDetectMode</a></li>
- <li
- ><a href="#dynamic_android.statistics.faceIds">android.statistics.faceIds</a></li>
- <li
- ><a href="#dynamic_android.statistics.faceLandmarks">android.statistics.faceLandmarks</a></li>
- <li
- ><a href="#dynamic_android.statistics.faceRectangles">android.statistics.faceRectangles</a></li>
- <li
- ><a href="#dynamic_android.statistics.faceScores">android.statistics.faceScores</a></li>
- <li
- ><a href="#dynamic_android.statistics.faces">android.statistics.faces</a></li>
- <li
- ><a href="#dynamic_android.statistics.histogram">android.statistics.histogram</a></li>
- <li
- ><a href="#dynamic_android.statistics.histogramMode">android.statistics.histogramMode</a></li>
- <li
- ><a href="#dynamic_android.statistics.sharpnessMap">android.statistics.sharpnessMap</a></li>
- <li
- ><a href="#dynamic_android.statistics.sharpnessMapMode">android.statistics.sharpnessMapMode</a></li>
- <li
- ><a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.statistics.lensShadingCorrectionMap</a></li>
- <li
- ><a href="#dynamic_android.statistics.lensShadingMap">android.statistics.lensShadingMap</a></li>
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.statistics.predictedColorGains">android.statistics.predictedColorGains</a></li>
- <li
- class="toc_deprecated"
- ><a href="#dynamic_android.statistics.predictedColorTransform">android.statistics.predictedColorTransform</a></li>
- <li
- ><a href="#dynamic_android.statistics.sceneFlicker">android.statistics.sceneFlicker</a></li>
- <li
- ><a href="#dynamic_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a></li>
- <li
- ><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a></li>
- <li
- ><a href="#dynamic_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_tonemap">tonemap</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.tonemap.curveBlue">android.tonemap.curveBlue</a></li>
- <li
- ><a href="#controls_android.tonemap.curveGreen">android.tonemap.curveGreen</a></li>
- <li
- ><a href="#controls_android.tonemap.curveRed">android.tonemap.curveRed</a></li>
- <li
- ><a href="#controls_android.tonemap.curve">android.tonemap.curve</a></li>
- <li
- ><a href="#controls_android.tonemap.mode">android.tonemap.mode</a></li>
- <li
- ><a href="#controls_android.tonemap.gamma">android.tonemap.gamma</a></li>
- <li
- ><a href="#controls_android.tonemap.presetCurve">android.tonemap.presetCurve</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.tonemap.maxCurvePoints">android.tonemap.maxCurvePoints</a></li>
- <li
- ><a href="#static_android.tonemap.availableToneMapModes">android.tonemap.availableToneMapModes</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.tonemap.curveBlue">android.tonemap.curveBlue</a></li>
- <li
- ><a href="#dynamic_android.tonemap.curveGreen">android.tonemap.curveGreen</a></li>
- <li
- ><a href="#dynamic_android.tonemap.curveRed">android.tonemap.curveRed</a></li>
- <li
- ><a href="#dynamic_android.tonemap.curve">android.tonemap.curve</a></li>
- <li
- ><a href="#dynamic_android.tonemap.mode">android.tonemap.mode</a></li>
- <li
- ><a href="#dynamic_android.tonemap.gamma">android.tonemap.gamma</a></li>
- <li
- ><a href="#dynamic_android.tonemap.presetCurve">android.tonemap.presetCurve</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_led">led</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.led.transmit">android.led.transmit</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.led.transmit">android.led.transmit</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.led.availableLeds">android.led.availableLeds</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_info">info</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.info.supportedHardwareLevel">android.info.supportedHardwareLevel</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_blackLevel">blackLevel</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.blackLevel.lock">android.blackLevel.lock</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.blackLevel.lock">android.blackLevel.lock</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_sync">sync</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.sync.frameNumber">android.sync.frameNumber</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.sync.maxLatency">android.sync.maxLatency</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_reprocess">reprocess</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">controls</span>
- <ul class="toc_section">
- <li
- ><a href="#controls_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">dynamic</span>
- <ul class="toc_section">
- <li
- ><a href="#dynamic_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a></li>
- </ul>
- </li>
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.reprocess.maxCaptureStall">android.reprocess.maxCaptureStall</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- <li>
- <span class="toc_section_header"><a href="#section_depth">depth</a></span>
- <ul class="toc_section">
- <li>
- <span class="toc_kind_header">static</span>
- <ul class="toc_section">
- <li
- ><a href="#static_android.depth.maxDepthSamples">android.depth.maxDepthSamples</a></li>
- <li
- ><a href="#static_android.depth.availableDepthStreamConfigurations">android.depth.availableDepthStreamConfigurations</a></li>
- <li
- ><a href="#static_android.depth.availableDepthMinFrameDurations">android.depth.availableDepthMinFrameDurations</a></li>
- <li
- ><a href="#static_android.depth.availableDepthStallDurations">android.depth.availableDepthStallDurations</a></li>
- <li
- ><a href="#static_android.depth.depthIsExclusive">android.depth.depthIsExclusive</a></li>
- </ul>
- </li>
- </ul> <!-- toc_section -->
- </li>
- </ul>
-
-
- <h1>Properties</h1>
- <table class="properties">
-
- <thead class="thead_dummy">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead> <!-- so that the first occurrence of thead is not
- above the first occurrence of tr -->
-<!-- <namespace name="android"> -->
- <tr><td colspan="6" id="section_colorCorrection" class="section">colorCorrection</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.colorCorrection.mode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>color<wbr/>Correction.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">TRANSFORM_MATRIX</span>
- <span class="entry_type_enum_notes"><p>Use the <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> matrix
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> to do color conversion.<wbr/></p>
-<p>All advanced white balance adjustments (not specified
-by our white balance pipeline) must be disabled.<wbr/></p>
-<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
-TRANSFORM_<wbr/>MATRIX is ignored.<wbr/> The camera device will override
-this value to either FAST or HIGH_<wbr/>QUALITY.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Color correction processing must not slow down
-capture rate relative to sensor raw output.<wbr/></p>
-<p>Advanced white balance adjustments above and beyond
-the specified white balance pipeline may be applied.<wbr/></p>
-<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
-the camera device uses the last frame's AWB values
-(or defaults if AWB has never been run).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Color correction processing operates at improved
-quality but the capture rate might be reduced (relative to sensor
-raw output rate)</p>
-<p>Advanced white balance adjustments above and beyond
-the specified white balance pipeline may be applied.<wbr/></p>
-<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
-the camera device uses the last frame's AWB values
-(or defaults if AWB has never been run).<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The mode control selects how the image data is converted from the
-sensor's native color into linear sRGB color.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When auto-white balance (AWB) is enabled with <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> this
-control is overridden by the AWB routine.<wbr/> When AWB is disabled,<wbr/> the
-application controls how the color mapping is performed.<wbr/></p>
-<p>We define the expected processing pipeline below.<wbr/> For consistency
-across devices,<wbr/> this is always the case with TRANSFORM_<wbr/>MATRIX.<wbr/></p>
-<p>When either FULL or HIGH_<wbr/>QUALITY is used,<wbr/> the camera device may
-do additional processing but <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> will still be provided by the
-camera device (in the results) and be roughly correct.<wbr/></p>
-<p>Switching to TRANSFORM_<wbr/>MATRIX and using the data provided from
-FAST or HIGH_<wbr/>QUALITY will yield a picture with the same white point
-as what was produced by the camera device in the earlier frame.<wbr/></p>
-<p>The expected processing pipeline is as follows:</p>
-<p><img alt="White balance processing pipeline" src="images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png"/></p>
-<p>The white balance is encoded by two values,<wbr/> a 4-channel white-balance
-gain vector (applied in the Bayer domain),<wbr/> and a 3x3 color transform
-matrix (applied after demosaic).<wbr/></p>
-<p>The 4-channel white-balance gains are defined as:</p>
-<pre><code><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> = [ R G_<wbr/>even G_<wbr/>odd B ]
-</code></pre>
-<p>where <code>G_<wbr/>even</code> is the gain for green pixels on even rows of the
-output,<wbr/> and <code>G_<wbr/>odd</code> is the gain for green pixels on the odd rows.<wbr/>
-These may be identical for a given camera device implementation; if
-the camera device does not support a separate gain for even/<wbr/>odd green
-channels,<wbr/> it will use the <code>G_<wbr/>even</code> value,<wbr/> and write <code>G_<wbr/>odd</code> equal to
-<code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
-<p>The matrices for color transforms are defined as a 9-entry vector:</p>
-<pre><code><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
-</code></pre>
-<p>which define a transform from input sensor colors,<wbr/> <code>P_<wbr/>in = [ r g b ]</code>,<wbr/>
-to output linear sRGB,<wbr/> <code>P_<wbr/>out = [ r' g' b' ]</code>,<wbr/></p>
-<p>with colors as follows:</p>
-<pre><code>r' = I0r + I1g + I2b
-g' = I3r + I4g + I5b
-b' = I6r + I7g + I8b
-</code></pre>
-<p>Both the input and output value ranges must match.<wbr/> Overflow/<wbr/>underflow
-values are clipped to fit within the range.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if color correction control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY should generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.colorCorrection.transform">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>color<wbr/>Correction.<wbr/>transform
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">3x3 rational matrix in row-major order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A color transform matrix to use to transform
-from sensor RGB color space to output linear sRGB color space.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Unitless scale factors
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is either set by the camera device when the request
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not TRANSFORM_<wbr/>MATRIX,<wbr/> or
-directly by the application in the request when the
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is TRANSFORM_<wbr/>MATRIX.<wbr/></p>
-<p>In the latter case,<wbr/> the camera device may round the matrix to account
-for precision issues; the final rounded matrix should be reported back
-in this matrix result metadata.<wbr/> The transform should keep the magnitude
-of the output color values within <code>[0,<wbr/> 1.<wbr/>0]</code> (assuming input color
-values is within the normalized range <code>[0,<wbr/> 1.<wbr/>0]</code>),<wbr/> or clipping may occur.<wbr/></p>
-<p>The valid range of each matrix element varies on different devices,<wbr/> but
-values within [-1.<wbr/>5,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.colorCorrection.gains">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>color<wbr/>Correction.<wbr/>gains
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as rggbChannelVector]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">A 1D array of floats for 4 color channel gains</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Gains applying to Bayer raw color channels for
-white-balance.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Unitless gain factors
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>These per-channel gains are either set by the camera device
-when the request <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not
-TRANSFORM_<wbr/>MATRIX,<wbr/> or directly by the application in the
-request when the <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is
-TRANSFORM_<wbr/>MATRIX.<wbr/></p>
-<p>The gains in the result metadata are the gains actually
-applied by the camera device to the current frame.<wbr/></p>
-<p>The valid range of gains varies on different devices,<wbr/> but gains
-between [1.<wbr/>0,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/> Even if a given
-device allows gains below 1.<wbr/>0,<wbr/> this is usually not recommended because
-this can create color artifacts.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The 4-channel white-balance gains are defined in
-the order of <code>[R G_<wbr/>even G_<wbr/>odd B]</code>,<wbr/> where <code>G_<wbr/>even</code> is the gain
-for green pixels on even rows of the output,<wbr/> and <code>G_<wbr/>odd</code>
-is the gain for green pixels on the odd rows.<wbr/></p>
-<p>If a HAL does not support a separate gain for even/<wbr/>odd green
-channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
-<code>G_<wbr/>odd</code> equal to <code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.colorCorrection.aberrationMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No aberration correction is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Aberration correction will not slow down capture rate
-relative to sensor raw output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Aberration correction operates at improved quality but the capture rate might be
-reduced (relative to sensor raw output rate)</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Mode of operation for the chromatic aberration correction algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
-can not focus on the same point after exiting from the lens.<wbr/> This metadata defines
-the high level control of chromatic aberration correction algorithm,<wbr/> which aims to
-minimize the chromatic artifacts that may occur along the object boundaries in an
-image.<wbr/></p>
-<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean that camera device determined aberration
-correction will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device will
-use the highest-quality aberration correction algorithms,<wbr/> even if it slows down
-capture rate.<wbr/> FAST means the camera device will not slow down capture rate when
-applying aberration correction.<wbr/></p>
-<p>LEGACY devices will always be in FAST mode.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.colorCorrection.mode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>color<wbr/>Correction.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">TRANSFORM_MATRIX</span>
- <span class="entry_type_enum_notes"><p>Use the <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> matrix
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> to do color conversion.<wbr/></p>
-<p>All advanced white balance adjustments (not specified
-by our white balance pipeline) must be disabled.<wbr/></p>
-<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
-TRANSFORM_<wbr/>MATRIX is ignored.<wbr/> The camera device will override
-this value to either FAST or HIGH_<wbr/>QUALITY.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Color correction processing must not slow down
-capture rate relative to sensor raw output.<wbr/></p>
-<p>Advanced white balance adjustments above and beyond
-the specified white balance pipeline may be applied.<wbr/></p>
-<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
-the camera device uses the last frame's AWB values
-(or defaults if AWB has never been run).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Color correction processing operates at improved
-quality but the capture rate might be reduced (relative to sensor
-raw output rate)</p>
-<p>Advanced white balance adjustments above and beyond
-the specified white balance pipeline may be applied.<wbr/></p>
-<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
-the camera device uses the last frame's AWB values
-(or defaults if AWB has never been run).<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The mode control selects how the image data is converted from the
-sensor's native color into linear sRGB color.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When auto-white balance (AWB) is enabled with <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> this
-control is overridden by the AWB routine.<wbr/> When AWB is disabled,<wbr/> the
-application controls how the color mapping is performed.<wbr/></p>
-<p>We define the expected processing pipeline below.<wbr/> For consistency
-across devices,<wbr/> this is always the case with TRANSFORM_<wbr/>MATRIX.<wbr/></p>
-<p>When either FULL or HIGH_<wbr/>QUALITY is used,<wbr/> the camera device may
-do additional processing but <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> will still be provided by the
-camera device (in the results) and be roughly correct.<wbr/></p>
-<p>Switching to TRANSFORM_<wbr/>MATRIX and using the data provided from
-FAST or HIGH_<wbr/>QUALITY will yield a picture with the same white point
-as what was produced by the camera device in the earlier frame.<wbr/></p>
-<p>The expected processing pipeline is as follows:</p>
-<p><img alt="White balance processing pipeline" src="images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png"/></p>
-<p>The white balance is encoded by two values,<wbr/> a 4-channel white-balance
-gain vector (applied in the Bayer domain),<wbr/> and a 3x3 color transform
-matrix (applied after demosaic).<wbr/></p>
-<p>The 4-channel white-balance gains are defined as:</p>
-<pre><code><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> = [ R G_<wbr/>even G_<wbr/>odd B ]
-</code></pre>
-<p>where <code>G_<wbr/>even</code> is the gain for green pixels on even rows of the
-output,<wbr/> and <code>G_<wbr/>odd</code> is the gain for green pixels on the odd rows.<wbr/>
-These may be identical for a given camera device implementation; if
-the camera device does not support a separate gain for even/<wbr/>odd green
-channels,<wbr/> it will use the <code>G_<wbr/>even</code> value,<wbr/> and write <code>G_<wbr/>odd</code> equal to
-<code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
-<p>The matrices for color transforms are defined as a 9-entry vector:</p>
-<pre><code><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
-</code></pre>
-<p>which define a transform from input sensor colors,<wbr/> <code>P_<wbr/>in = [ r g b ]</code>,<wbr/>
-to output linear sRGB,<wbr/> <code>P_<wbr/>out = [ r' g' b' ]</code>,<wbr/></p>
-<p>with colors as follows:</p>
-<pre><code>r' = I0r + I1g + I2b
-g' = I3r + I4g + I5b
-b' = I6r + I7g + I8b
-</code></pre>
-<p>Both the input and output value ranges must match.<wbr/> Overflow/<wbr/>underflow
-values are clipped to fit within the range.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if color correction control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY should generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.colorCorrection.transform">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>color<wbr/>Correction.<wbr/>transform
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">3x3 rational matrix in row-major order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A color transform matrix to use to transform
-from sensor RGB color space to output linear sRGB color space.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Unitless scale factors
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is either set by the camera device when the request
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not TRANSFORM_<wbr/>MATRIX,<wbr/> or
-directly by the application in the request when the
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is TRANSFORM_<wbr/>MATRIX.<wbr/></p>
-<p>In the latter case,<wbr/> the camera device may round the matrix to account
-for precision issues; the final rounded matrix should be reported back
-in this matrix result metadata.<wbr/> The transform should keep the magnitude
-of the output color values within <code>[0,<wbr/> 1.<wbr/>0]</code> (assuming input color
-values is within the normalized range <code>[0,<wbr/> 1.<wbr/>0]</code>),<wbr/> or clipping may occur.<wbr/></p>
-<p>The valid range of each matrix element varies on different devices,<wbr/> but
-values within [-1.<wbr/>5,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.colorCorrection.gains">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>color<wbr/>Correction.<wbr/>gains
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as rggbChannelVector]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">A 1D array of floats for 4 color channel gains</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Gains applying to Bayer raw color channels for
-white-balance.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Unitless gain factors
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>These per-channel gains are either set by the camera device
-when the request <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not
-TRANSFORM_<wbr/>MATRIX,<wbr/> or directly by the application in the
-request when the <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is
-TRANSFORM_<wbr/>MATRIX.<wbr/></p>
-<p>The gains in the result metadata are the gains actually
-applied by the camera device to the current frame.<wbr/></p>
-<p>The valid range of gains varies on different devices,<wbr/> but gains
-between [1.<wbr/>0,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/> Even if a given
-device allows gains below 1.<wbr/>0,<wbr/> this is usually not recommended because
-this can create color artifacts.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The 4-channel white-balance gains are defined in
-the order of <code>[R G_<wbr/>even G_<wbr/>odd B]</code>,<wbr/> where <code>G_<wbr/>even</code> is the gain
-for green pixels on even rows of the output,<wbr/> and <code>G_<wbr/>odd</code>
-is the gain for green pixels on the odd rows.<wbr/></p>
-<p>If a HAL does not support a separate gain for even/<wbr/>odd green
-channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
-<code>G_<wbr/>odd</code> equal to <code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.colorCorrection.aberrationMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No aberration correction is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Aberration correction will not slow down capture rate
-relative to sensor raw output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Aberration correction operates at improved quality but the capture rate might be
-reduced (relative to sensor raw output rate)</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Mode of operation for the chromatic aberration correction algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
-can not focus on the same point after exiting from the lens.<wbr/> This metadata defines
-the high level control of chromatic aberration correction algorithm,<wbr/> which aims to
-minimize the chromatic artifacts that may occur along the object boundaries in an
-image.<wbr/></p>
-<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean that camera device determined aberration
-correction will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device will
-use the highest-quality aberration correction algorithms,<wbr/> even if it slows down
-capture rate.<wbr/> FAST means the camera device will not slow down capture rate when
-applying aberration correction.<wbr/></p>
-<p>LEGACY devices will always be in FAST mode.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.colorCorrection.availableAberrationModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of aberration correction modes for <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a> that are
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This key lists the valid modes for <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a>.<wbr/> If no
-aberration correction modes are available for a device,<wbr/> this list will solely include
-OFF mode.<wbr/> All camera devices will support either OFF or FAST mode.<wbr/></p>
-<p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always list
-OFF mode.<wbr/> This includes all FULL level devices.<wbr/></p>
-<p>LEGACY devices will always only support FAST mode.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if chromatic aberration control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_control" class="section">control</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.control.aeAntibandingMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The camera device will not adjust exposure duration to
-avoid banding problems.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">50HZ</span>
- <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
-avoid banding problems with 50Hz illumination sources.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">60HZ</span>
- <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
-avoid banding problems with 60Hz illumination
-sources.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>The camera device will automatically adapt its
-antibanding routine to the current illumination
-condition.<wbr/> This is the default mode if AUTO is
-available on given camera device.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired setting for the camera device's auto-exposure
-algorithm's antibanding compensation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Some kinds of lighting fixtures,<wbr/> such as some fluorescent
-lights,<wbr/> flicker at the rate of the power supply frequency
-(60Hz or 50Hz,<wbr/> depending on country).<wbr/> While this is
-typically not noticeable to a person,<wbr/> it can be visible to
-a camera device.<wbr/> If a camera sets its exposure time to the
-wrong value,<wbr/> the flicker may become visible in the
-viewfinder as flicker or in a final captured image,<wbr/> as a
-set of variable-brightness bands across the image.<wbr/></p>
-<p>Therefore,<wbr/> the auto-exposure routines of camera devices
-include antibanding routines that ensure that the chosen
-exposure value will not cause such banding.<wbr/> The choice of
-exposure time depends on the rate of flicker,<wbr/> which the
-camera device can detect automatically,<wbr/> or the expected
-rate can be selected by the application using this
-control.<wbr/></p>
-<p>A given camera device may not support all of the possible
-options for the antibanding mode.<wbr/> The
-<a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a> key contains
-the available modes for a given camera device.<wbr/></p>
-<p>AUTO mode is the default if it is available on given
-camera device.<wbr/> When AUTO mode is not available,<wbr/> the
-default will be either 50HZ or 60HZ,<wbr/> and both 50HZ
-and 60HZ will be available.<wbr/></p>
-<p>If manual exposure control is enabled (by setting
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
-then this setting has no effect,<wbr/> and the application must
-ensure it selects exposure times that do not cause banding
-issues.<wbr/> The <a href="#dynamic_android.statistics.sceneFlicker">android.<wbr/>statistics.<wbr/>scene<wbr/>Flicker</a> key can assist
-the application in this.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For all capture request templates,<wbr/> this field must be set
-to AUTO if AUTO mode is available.<wbr/> If AUTO is not available,<wbr/>
-the default must be either 50HZ or 60HZ,<wbr/> and both 50HZ and
-60HZ must be available.<wbr/></p>
-<p>If manual exposure control is enabled (by setting
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
-then the exposure values provided by the application must not be
-adjusted for antibanding.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.aeExposureCompensation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Adjustment to auto-exposure (AE) target image
-brightness.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Compensation steps
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The adjustment is measured as a count of steps,<wbr/> with the
-step size defined by <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> and the
-allowed range by <a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a>.<wbr/></p>
-<p>For example,<wbr/> if the exposure value (EV) step is 0.<wbr/>333,<wbr/> '6'
-will mean an exposure compensation of +2 EV; -3 will mean an
-exposure compensation of -1 EV.<wbr/> One EV represents a doubling
-of image brightness.<wbr/> Note that this control will only be
-effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF.<wbr/> This control
-will take effect even when <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> <code>== true</code>.<wbr/></p>
-<p>In the event of exposure compensation value being changed,<wbr/> camera device
-may take several frames to reach the newly requested exposure target.<wbr/>
-During that time,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> field will be in the SEARCHING
-state.<wbr/> Once the new exposure target is reached,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> will
-change from SEARCHING to either CONVERGED,<wbr/> LOCKED (if AE lock is enabled),<wbr/> or
-FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.aeLock">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Lock
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Auto-exposure lock is disabled; the AE algorithm
-is free to update its parameters.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Auto-exposure lock is enabled; the AE algorithm
-must not update the exposure and sensitivity parameters
-while the lock is active.<wbr/></p>
-<p><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> setting changes
-will still take effect while auto-exposure is locked.<wbr/></p>
-<p>Some rare LEGACY devices may not support
-this,<wbr/> in which case the value will always be overridden to OFF.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-exposure (AE) is currently locked to its latest
-calculated values.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to <code>true</code> (ON),<wbr/> the AE algorithm is locked to its latest parameters,<wbr/>
-and will not change exposure settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
-<p>Note that even when AE is locked,<wbr/> the flash may be fired if
-the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>AUTO_<wbr/>FLASH /<wbr/>
-ON_<wbr/>ALWAYS_<wbr/>FLASH /<wbr/> ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE.<wbr/></p>
-<p>When <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> is changed,<wbr/> even if the AE lock
-is ON,<wbr/> the camera device will still adjust its exposure value.<wbr/></p>
-<p>If AE precapture is triggered (see <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>)
-when AE is already locked,<wbr/> the camera device will not change the exposure time
-(<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) and sensitivity (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
-parameters.<wbr/> The flash may be fired if the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
-is ON_<wbr/>AUTO_<wbr/>FLASH/<wbr/>ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE and the scene is too dark.<wbr/> If the
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> the scene may become overexposed.<wbr/>
-Similarly,<wbr/> AE precapture trigger CANCEL has no effect when AE is already locked.<wbr/></p>
-<p>When an AE precapture sequence is triggered,<wbr/> AE unlock will not be able to unlock
-the AE if AE is locked by the camera device internally during precapture metering
-sequence In other words,<wbr/> submitting requests with AE unlock has no effect for an
-ongoing precapture metering sequence.<wbr/> Otherwise,<wbr/> the precapture metering sequence
-will never succeed in a sequence of preview requests where AE lock is always set
-to <code>false</code>.<wbr/></p>
-<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
-get locked do not necessarily correspond to the settings that were present in the
-latest capture result received from the camera device,<wbr/> since additional captures
-and AE updates may have occurred even before the result was sent out.<wbr/> If an
-application is switching between automatic and manual control and wishes to eliminate
-any flicker during the switch,<wbr/> the following procedure is recommended:</p>
-<ol>
-<li>Starting in auto-AE mode:</li>
-<li>Lock AE</li>
-<li>Wait for the first result to be output that has the AE locked</li>
-<li>Copy exposure settings from that result into a request,<wbr/> set the request to manual AE</li>
-<li>Submit the capture request,<wbr/> proceed to run manual AE as desired.<wbr/></li>
-</ol>
-<p>See <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE lock related state transition details.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.aeMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is disabled.<wbr/></p>
-<p>The application-selected <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are used by the camera
-device,<wbr/> along with android.<wbr/>flash.<wbr/>* fields,<wbr/> if there's
-a flash unit for this camera device.<wbr/></p>
-<p>Note that auto-white balance (AWB) and auto-focus (AF)
-behavior is device dependent when AE is in OFF mode.<wbr/>
-To have consistent behavior across different devices,<wbr/>
-it is recommended to either set AWB and AF to OFF mode
-or lock AWB and AF before setting AE to OFF.<wbr/>
-See <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a>,<wbr/> and <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
-for more details.<wbr/></p>
-<p>LEGACY devices do not support the OFF mode and will
-override attempts to use this value to ON.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is active,<wbr/>
-with no flash control.<wbr/></p>
-<p>The application's values for
-<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are ignored.<wbr/> The
-application has control over the various
-android.<wbr/>flash.<wbr/>* fields.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON_AUTO_FLASH</span>
- <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
-the camera's flash unit,<wbr/> firing it in low-light
-conditions.<wbr/></p>
-<p>The flash may be fired during a precapture sequence
-(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
-may be fired for captures for which the
-<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
-STILL_<wbr/>CAPTURE</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON_ALWAYS_FLASH</span>
- <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
-the camera's flash unit,<wbr/> always firing it for still
-captures.<wbr/></p>
-<p>The flash may be fired during a precapture sequence
-(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
-will always be fired for captures for which the
-<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
-STILL_<wbr/>CAPTURE</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE</span>
- <span class="entry_type_enum_notes"><p>Like ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> but with automatic red eye
-reduction.<wbr/></p>
-<p>If deemed necessary by the camera device,<wbr/> a red eye
-reduction flash will fire during the precapture
-sequence.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired mode for the camera device's
-auto-exposure routine.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is
-AUTO.<wbr/></p>
-<p>When set to any of the ON modes,<wbr/> the camera device's
-auto-exposure routine is enabled,<wbr/> overriding the
-application's selected exposure time,<wbr/> sensor sensitivity,<wbr/>
-and frame duration (<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>).<wbr/> If one of the FLASH modes
-is selected,<wbr/> the camera device's flash unit controls are
-also overridden.<wbr/></p>
-<p>The FLASH modes are only available if the camera device
-has a flash unit (<a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> is <code>true</code>).<wbr/></p>
-<p>If flash TORCH mode is desired,<wbr/> this field must be set to
-ON or OFF,<wbr/> and <a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> set to TORCH.<wbr/></p>
-<p>When set to any of the ON modes,<wbr/> the values chosen by the
-camera device auto-exposure routine for the overridden
-fields for a given capture will be available in its
-CaptureResult.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.aeRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x area_count
- </span>
- <span class="entry_type_visibility"> [public as meteringRectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of metering areas to use for auto-exposure adjustment.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not available if <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a> is 0.<wbr/>
-Otherwise will always be present.<wbr/></p>
-<p>The maximum number of regions supported by the device is determined by the value
-of <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a>.<wbr/></p>
-<p>The coordinate system is based on the active pixel array,<wbr/>
-with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
-(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
-bottom-right pixel in the active pixel array.<wbr/></p>
-<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
-for every pixel in the area.<wbr/> This means that a large metering area
-with the same weight as a smaller area will have more effect in
-the metering result.<wbr/> Metering areas can partially overlap and the
-camera device will add the weights in the overlap region.<wbr/></p>
-<p>The weights are relative to weights of other exposure metering regions,<wbr/> so if only one
-region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0
-weight is ignored.<wbr/></p>
-<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
-<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
-capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
-region and output only the intersection rectangle as the metering region in the result
-metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
-not reported in the result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL level representation of MeteringRectangle[] is a
-int[5 * area_<wbr/>count].<wbr/>
-Every five elements represent a metering region of
-(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
-The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
-exclusive on xmax and ymax.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.aeTargetFpsRange">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as rangeInt]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Range over which the auto-exposure routine can
-adjust the capture frame rate to maintain good
-exposure.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Frames per second (FPS)
- </td>
-
- <td class="entry_range">
- <p>Any of the entries in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only constrains auto-exposure (AE) algorithm,<wbr/> not
-manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.aePrecaptureTrigger">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">IDLE</span>
- <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">START</span>
- <span class="entry_type_enum_notes"><p>The precapture metering sequence will be started
-by the camera device.<wbr/></p>
-<p>The exact effect of the precapture trigger depends on
-the current AE mode and state.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CANCEL</span>
- <span class="entry_type_enum_notes"><p>The camera device will cancel any currently active or completed
-precapture metering sequence,<wbr/> the auto-exposure routine will return to its
-initial state.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device will trigger a precapture
-metering sequence when it processes this request.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is normally set to IDLE,<wbr/> or is not
-included at all in the request settings.<wbr/> When included and
-set to START,<wbr/> the camera device will trigger the auto-exposure (AE)
-precapture metering sequence.<wbr/></p>
-<p>When set to CANCEL,<wbr/> the camera device will cancel any active
-precapture metering trigger,<wbr/> and return to its initial AE state.<wbr/>
-If a precapture metering sequence is already completed,<wbr/> and the camera
-device has implicitly locked the AE for subsequent still capture,<wbr/> the
-CANCEL trigger will unlock the AE and return to its initial AE state.<wbr/></p>
-<p>The precapture sequence should be triggered before starting a
-high-quality still capture for final metering decisions to
-be made,<wbr/> and for firing pre-capture flash pulses to estimate
-scene brightness and required final capture flash power,<wbr/> when
-the flash is enabled.<wbr/></p>
-<p>Normally,<wbr/> this entry should be set to START for only a
-single request,<wbr/> and the application should wait until the
-sequence completes before starting a new one.<wbr/></p>
-<p>When a precapture metering sequence is finished,<wbr/> the camera device
-may lock the auto-exposure routine internally to be able to accurately expose the
-subsequent still capture image (<code><a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE</code>).<wbr/>
-For this case,<wbr/> the AE may not resume normal scan if no subsequent still capture is
-submitted.<wbr/> To ensure that the AE routine restarts normal scan,<wbr/> the application should
-submit a request with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == true</code>,<wbr/> followed by a request
-with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == false</code>,<wbr/> if the application decides not to submit a
-still capture request after the precapture sequence completes.<wbr/> Alternatively,<wbr/> for
-API level 23 or newer devices,<wbr/> the CANCEL can be used to unlock the camera device
-internally locked AE if the application doesn't submit a still capture request after
-the AE precapture trigger.<wbr/> Note that,<wbr/> the CANCEL was added in API level 23,<wbr/> and must not
-be used in devices that have earlier API levels.<wbr/></p>
-<p>The exact effect of auto-exposure (AE) precapture trigger
-depends on the current AE mode and state; see
-<a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE precapture state transition
-details.<wbr/></p>
-<p>On LEGACY-level devices,<wbr/> the precapture trigger is not supported;
-capturing a high-resolution JPEG image will automatically trigger a
-precapture sequence before the high-resolution capture,<wbr/> including
-potentially firing a pre-capture flash.<wbr/></p>
-<p>Using the precapture trigger and the auto-focus trigger <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
-simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
-the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
-focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
-trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
-changes to <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> indicating the start of the precapture sequence,<wbr/> for
-example.<wbr/></p>
-<p>If both the precapture and the auto-focus trigger are activated on the same request,<wbr/> then
-the camera device will complete them in the optimal order for that device.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must support triggering the AE precapture trigger while an AF trigger is active
-(and vice versa),<wbr/> or at the same time as the AF trigger.<wbr/> It is acceptable for the HAL to
-treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
-AE trigger.<wbr/> Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
-to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.afMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>af<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The auto-focus routine does not control the lens;
-<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> is controlled by the
-application.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>Basic automatic focus mode.<wbr/></p>
-<p>In this mode,<wbr/> the lens does not move unless
-the autofocus trigger action is called.<wbr/> When that trigger
-is activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
-the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/></p>
-<p>Always supported if lens is not fixed focus.<wbr/></p>
-<p>Use <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> to determine if lens
-is fixed-focus.<wbr/></p>
-<p>Triggering AF_<wbr/>CANCEL resets the lens position to default,<wbr/>
-and sets the AF state to INACTIVE.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MACRO</span>
- <span class="entry_type_enum_notes"><p>Close-up focusing mode.<wbr/></p>
-<p>In this mode,<wbr/> the lens does not move unless the
-autofocus trigger action is called.<wbr/> When that trigger is
-activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
-the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/> This
-mode is optimized for focusing on objects very close to
-the camera.<wbr/></p>
-<p>When that trigger is activated,<wbr/> AF will transition to
-ACTIVE_<wbr/>SCAN,<wbr/> then to the outcome of the scan (FOCUSED or
-NOT_<wbr/>FOCUSED).<wbr/> Triggering cancel AF resets the lens
-position to default,<wbr/> and sets the AF state to
-INACTIVE.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONTINUOUS_VIDEO</span>
- <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
-position continually to attempt to provide a
-constantly-in-focus image stream.<wbr/></p>
-<p>The focusing behavior should be suitable for good quality
-video recording; typically this means slower focus
-movement and no overshoots.<wbr/> When the AF trigger is not
-involved,<wbr/> the AF algorithm should start in INACTIVE state,<wbr/>
-and then transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED
-states as appropriate.<wbr/> When the AF trigger is activated,<wbr/>
-the algorithm should immediately transition into
-AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
-lens position until a cancel AF trigger is received.<wbr/></p>
-<p>Once cancel is received,<wbr/> the algorithm should transition
-back to INACTIVE and resume passive scan.<wbr/> Note that this
-behavior is not identical to CONTINUOUS_<wbr/>PICTURE,<wbr/> since an
-ongoing PASSIVE_<wbr/>SCAN must immediately be
-canceled.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONTINUOUS_PICTURE</span>
- <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
-position continually to attempt to provide a
-constantly-in-focus image stream.<wbr/></p>
-<p>The focusing behavior should be suitable for still image
-capture; typically this means focusing as fast as
-possible.<wbr/> When the AF trigger is not involved,<wbr/> the AF
-algorithm should start in INACTIVE state,<wbr/> and then
-transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED states as
-appropriate as it attempts to maintain focus.<wbr/> When the AF
-trigger is activated,<wbr/> the algorithm should finish its
-PASSIVE_<wbr/>SCAN if active,<wbr/> and then transition into
-AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
-lens position until a cancel AF trigger is received.<wbr/></p>
-<p>When the AF cancel trigger is activated,<wbr/> the algorithm
-should transition back to INACTIVE and then act as if it
-has just been started.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">EDOF</span>
- <span class="entry_type_enum_notes"><p>Extended depth of field (digital focus) mode.<wbr/></p>
-<p>The camera device will produce images with an extended
-depth of field automatically; no special focusing
-operations need to be done before taking a picture.<wbr/></p>
-<p>AF triggers are ignored,<wbr/> and the AF state will always be
-INACTIVE.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-focus (AF) is currently enabled,<wbr/> and what
-mode it is set to.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.afAvailableModes">android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> = AUTO and the lens is not fixed focus
-(i.<wbr/>e.<wbr/> <code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> > 0</code>).<wbr/> Also note that
-when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/> the behavior of AF is device
-dependent.<wbr/> It is recommended to lock AF by using <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> before
-setting <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> to OFF,<wbr/> or set AF mode to OFF when AE is OFF.<wbr/></p>
-<p>If the lens is controlled by the camera device auto-focus algorithm,<wbr/>
-the camera device will report the current AF status in <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>
-in result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When afMode is AUTO or MACRO,<wbr/> the lens must not move until an AF trigger is sent in a
-request (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> <code>==</code> START).<wbr/> After an AF trigger,<wbr/> the afState will end
-up with either FOCUSED_<wbr/>LOCKED or NOT_<wbr/>FOCUSED_<wbr/>LOCKED state (see
-<a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> which indicates that the lens is
-locked and will not move.<wbr/> If camera movement (e.<wbr/>g.<wbr/> tilting camera) causes the lens to move
-after the lens is locked,<wbr/> the HAL must compensate this movement appropriately such that
-the same focal plane remains in focus.<wbr/></p>
-<p>When afMode is one of the continuous auto focus modes,<wbr/> the HAL is free to start a AF
-scan whenever it's not locked.<wbr/> When the lens is locked after an AF trigger
-(see <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> the HAL should maintain the
-same lock behavior as above.<wbr/></p>
-<p>When afMode is OFF,<wbr/> the application controls focus manually.<wbr/> The accuracy of the
-focus distance control depends on the <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a>.<wbr/>
-However,<wbr/> the lens must not move regardless of the camera movement for any focus distance
-manual control.<wbr/></p>
-<p>To put this in concrete terms,<wbr/> if the camera has lens elements which may move based on
-camera orientation or motion (e.<wbr/>g.<wbr/> due to gravity),<wbr/> then the HAL must drive the lens to
-remain in a fixed position invariant to the camera's orientation or motion,<wbr/> for example,<wbr/>
-by using accelerometer measurements in the lens control logic.<wbr/> This is a typical issue
-that will arise on camera modules with open-loop VCMs.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.afRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>af<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x area_count
- </span>
- <span class="entry_type_visibility"> [public as meteringRectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of metering areas to use for auto-focus.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not available if <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a> is 0.<wbr/>
-Otherwise will always be present.<wbr/></p>
-<p>The maximum number of focus areas supported by the device is determined by the value
-of <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a>.<wbr/></p>
-<p>The coordinate system is based on the active pixel array,<wbr/>
-with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
-(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
-bottom-right pixel in the active pixel array.<wbr/></p>
-<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
-for every pixel in the area.<wbr/> This means that a large metering area
-with the same weight as a smaller area will have more effect in
-the metering result.<wbr/> Metering areas can partially overlap and the
-camera device will add the weights in the overlap region.<wbr/></p>
-<p>The weights are relative to weights of other metering regions,<wbr/> so if only one region
-is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0 weight is
-ignored.<wbr/></p>
-<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
-<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
-capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
-region and output only the intersection rectangle as the metering region in the result
-metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
-not reported in the result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL level representation of MeteringRectangle[] is a
-int[5 * area_<wbr/>count].<wbr/>
-Every five elements represent a metering region of
-(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
-The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
-exclusive on xmax and ymax.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.afTrigger">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>af<wbr/>Trigger
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">IDLE</span>
- <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">START</span>
- <span class="entry_type_enum_notes"><p>Autofocus will trigger now.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CANCEL</span>
- <span class="entry_type_enum_notes"><p>Autofocus will return to its initial
-state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device will trigger autofocus for this request.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is normally set to IDLE,<wbr/> or is not
-included at all in the request settings.<wbr/></p>
-<p>When included and set to START,<wbr/> the camera device will trigger the
-autofocus algorithm.<wbr/> If autofocus is disabled,<wbr/> this trigger has no effect.<wbr/></p>
-<p>When set to CANCEL,<wbr/> the camera device will cancel any active trigger,<wbr/>
-and return to its initial AF state.<wbr/></p>
-<p>Generally,<wbr/> applications should set this entry to START or CANCEL for only a
-single capture,<wbr/> and then return it to IDLE (or not set at all).<wbr/> Specifying
-START for multiple captures in a row means restarting the AF operation over
-and over again.<wbr/></p>
-<p>See <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for what the trigger means for each AF mode.<wbr/></p>
-<p>Using the autofocus trigger and the precapture trigger <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>
-simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
-the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
-focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
-trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
-changes to <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>,<wbr/> for example.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must support triggering the AF trigger while an AE precapture trigger is active
-(and vice versa),<wbr/> or at the same time as the AE trigger.<wbr/> It is acceptable for the HAL to
-treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
-AE trigger.<wbr/> Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
-to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.awbLock">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>Lock
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Auto-white balance lock is disabled; the AWB
-algorithm is free to update its parameters if in AUTO
-mode.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Auto-white balance lock is enabled; the AWB
-algorithm will not update its parameters while the lock
-is active.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-white balance (AWB) is currently locked to its
-latest calculated values.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to <code>true</code> (ON),<wbr/> the AWB algorithm is locked to its latest parameters,<wbr/>
-and will not change color balance settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
-<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
-get locked do not necessarily correspond to the settings that were present in the
-latest capture result received from the camera device,<wbr/> since additional captures
-and AWB updates may have occurred even before the result was sent out.<wbr/> If an
-application is switching between automatic and manual control and wishes to eliminate
-any flicker during the switch,<wbr/> the following procedure is recommended:</p>
-<ol>
-<li>Starting in auto-AWB mode:</li>
-<li>Lock AWB</li>
-<li>Wait for the first result to be output that has the AWB locked</li>
-<li>Copy AWB settings from that result into a request,<wbr/> set the request to manual AWB</li>
-<li>Submit the capture request,<wbr/> proceed to run manual AWB as desired.<wbr/></li>
-</ol>
-<p>Note that AWB lock is only meaningful when
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> is in the AUTO mode; in other modes,<wbr/>
-AWB is already fixed to a specific setting.<wbr/></p>
-<p>Some LEGACY devices may not support ON; the value is then overridden to OFF.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.awbMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled.<wbr/></p>
-<p>The application-selected color transform matrix
-(<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>) and gains
-(<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>) are used by the camera
-device for manual white balance control.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is active.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">INCANDESCENT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses incandescent light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant A.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FLUORESCENT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses fluorescent light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant F2.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">WARM_FLUORESCENT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses warm fluorescent light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant F4.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DAYLIGHT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses daylight light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant D65.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CLOUDY_DAYLIGHT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses cloudy daylight light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">TWILIGHT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses twilight light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SHADE</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses shade light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-white balance (AWB) is currently setting the color
-transform fields,<wbr/> and what its illumination target
-is.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is AUTO.<wbr/></p>
-<p>When set to the ON mode,<wbr/> the camera device's auto-white balance
-routine is enabled,<wbr/> overriding the application's selected
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/> Note that when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
-is OFF,<wbr/> the behavior of AWB is device dependent.<wbr/> It is recommened to
-also set AWB mode to OFF or lock AWB by using <a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> before
-setting AE mode to OFF.<wbr/></p>
-<p>When set to the OFF mode,<wbr/> the camera device's auto-white balance
-routine is disabled.<wbr/> The application manually controls the white
-balance by <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>
-and <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/></p>
-<p>When set to any other modes,<wbr/> the camera device's auto-white
-balance routine is disabled.<wbr/> The camera device uses each
-particular illumination target for white balance
-adjustment.<wbr/> The application's values for
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/>
-<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> are ignored.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.awbRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>awb<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x area_count
- </span>
- <span class="entry_type_visibility"> [public as meteringRectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of metering areas to use for auto-white-balance illuminant
-estimation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not available if <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a> is 0.<wbr/>
-Otherwise will always be present.<wbr/></p>
-<p>The maximum number of regions supported by the device is determined by the value
-of <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a>.<wbr/></p>
-<p>The coordinate system is based on the active pixel array,<wbr/>
-with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
-(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
-bottom-right pixel in the active pixel array.<wbr/></p>
-<p>The weight must range from 0 to 1000,<wbr/> and represents a weight
-for every pixel in the area.<wbr/> This means that a large metering area
-with the same weight as a smaller area will have more effect in
-the metering result.<wbr/> Metering areas can partially overlap and the
-camera device will add the weights in the overlap region.<wbr/></p>
-<p>The weights are relative to weights of other white balance metering regions,<wbr/> so if
-only one region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with
-0 weight is ignored.<wbr/></p>
-<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
-<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
-capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
-region and output only the intersection rectangle as the metering region in the result
-metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
-not reported in the result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL level representation of MeteringRectangle[] is a
-int[5 * area_<wbr/>count].<wbr/>
-Every five elements represent a metering region of
-(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
-The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
-exclusive on xmax and ymax.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.captureIntent">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>capture<wbr/>Intent
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CUSTOM</span>
- <span class="entry_type_enum_notes"><p>The goal of this request doesn't fall into the other
-categories.<wbr/> The camera device will default to preview-like
-behavior.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PREVIEW</span>
- <span class="entry_type_enum_notes"><p>This request is for a preview-like use case.<wbr/></p>
-<p>The precapture trigger may be used to start off a metering
-w/<wbr/>flash sequence.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">STILL_CAPTURE</span>
- <span class="entry_type_enum_notes"><p>This request is for a still capture-type
-use case.<wbr/></p>
-<p>If the flash unit is under automatic control,<wbr/> it may fire as needed.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">VIDEO_RECORD</span>
- <span class="entry_type_enum_notes"><p>This request is for a video recording
-use case.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">VIDEO_SNAPSHOT</span>
- <span class="entry_type_enum_notes"><p>This request is for a video snapshot (still
-image while recording video) use case.<wbr/></p>
-<p>The camera device should take the highest-quality image
-possible (given the other settings) without disrupting the
-frame rate of video recording.<wbr/> </p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
- <span class="entry_type_enum_notes"><p>This request is for a ZSL usecase; the
-application will stream full-resolution images and
-reprocess one or several later for a final
-capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MANUAL</span>
- <span class="entry_type_enum_notes"><p>This request is for manual capture use case where
-the applications want to directly control the capture parameters.<wbr/></p>
-<p>For example,<wbr/> the application may wish to manually control
-<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> etc.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Information to the camera device 3A (auto-exposure,<wbr/>
-auto-focus,<wbr/> auto-white balance) routines about the purpose
-of this capture,<wbr/> to help the camera device to decide optimal 3A
-strategy.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control (except for MANUAL) is only effective if
-<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF</code> and any 3A routine is active.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
-contains PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/> MANUAL will be supported if
-<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR.<wbr/> Other intent values are
-always supported.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.effectMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>effect<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No color effect will be applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MONO</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "monocolor" effect where the image is mapped into
-a single color.<wbr/></p>
-<p>This will typically be grayscale.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NEGATIVE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "photo-negative" effect where the image's colors
-are inverted.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SOLARIZE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "solarisation" effect (Sabattier effect) where the
-image is wholly or partially reversed in
-tone.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SEPIA</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "sepia" effect where the image is mapped into warm
-gray,<wbr/> red,<wbr/> and brown tones.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">POSTERIZE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "posterization" effect where the image uses
-discrete regions of tone rather than a continuous
-gradient of tones.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">WHITEBOARD</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "whiteboard" effect where the image is typically displayed
-as regions of white,<wbr/> with black or grey details.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BLACKBOARD</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "blackboard" effect where the image is typically displayed
-as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AQUA</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>An "aqua" effect where a blue hue is added to the image.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A special color effect to apply.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.availableEffects">android.<wbr/>control.<wbr/>available<wbr/>Effects</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When this mode is set,<wbr/> a color effect will be applied
-to images produced by the camera device.<wbr/> The interpretation
-and implementation of these color effects is left to the
-implementor of the camera device,<wbr/> and should not be
-depended on to be consistent (or present) across all
-devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Full application control of pipeline.<wbr/></p>
-<p>All control by the device's metering and focusing (3A)
-routines is disabled,<wbr/> and no other settings in
-android.<wbr/>control.<wbr/>* have any effect,<wbr/> except that
-<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> may be used by the camera
-device to select post-processing values for processing
-blocks that do not allow for manual control,<wbr/> or are not
-exposed by the camera API.<wbr/></p>
-<p>However,<wbr/> the camera device's 3A routines may continue to
-collect statistics and update their internal state so that
-when control is switched to AUTO mode,<wbr/> good control values
-can be immediately applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>Use settings for each individual 3A routine.<wbr/></p>
-<p>Manual control of capture parameters is disabled.<wbr/> All
-controls in android.<wbr/>control.<wbr/>* besides sceneMode take
-effect.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">USE_SCENE_MODE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Use a specific scene mode.<wbr/></p>
-<p>Enabling this disables control.<wbr/>aeMode,<wbr/> control.<wbr/>awbMode and
-control.<wbr/>afMode controls; the camera device will ignore
-those settings while USE_<wbr/>SCENE_<wbr/>MODE is active (except for
-FACE_<wbr/>PRIORITY scene mode).<wbr/> Other control entries are still active.<wbr/>
-This setting can only be used if scene mode is supported (i.<wbr/>e.<wbr/>
-<a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>
-contain some modes other than DISABLED).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">OFF_KEEP_STATE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Same as OFF mode,<wbr/> except that this capture will not be
-used by camera device background auto-exposure,<wbr/> auto-white balance and
-auto-focus algorithms (3A) to update their statistics.<wbr/></p>
-<p>Specifically,<wbr/> the 3A routines are locked to the last
-values set from a request with AUTO,<wbr/> OFF,<wbr/> or
-USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> and any statistics or state updates
-collected from manual captures with OFF_<wbr/>KEEP_<wbr/>STATE will be
-discarded by the camera device.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Overall mode of 3A (auto-exposure,<wbr/> auto-white-balance,<wbr/> auto-focus) control
-routines.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.availableModes">android.<wbr/>control.<wbr/>available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is a top-level 3A control switch.<wbr/> When set to OFF,<wbr/> all 3A control
-by the camera device is disabled.<wbr/> The application must set the fields for
-capture parameters itself.<wbr/></p>
-<p>When set to AUTO,<wbr/> the individual algorithm controls in
-android.<wbr/>control.<wbr/>* are in effect,<wbr/> such as <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>.<wbr/></p>
-<p>When set to USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> the individual controls in
-android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device implements
-one of the scene mode settings (such as ACTION,<wbr/> SUNSET,<wbr/> or PARTY)
-as it wishes.<wbr/> The camera device scene mode 3A settings are provided by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
-<p>When set to OFF_<wbr/>KEEP_<wbr/>STATE,<wbr/> it is similar to OFF mode,<wbr/> the only difference
-is that this frame will not be used by camera device background 3A statistics
-update,<wbr/> as if this frame is never captured.<wbr/> This mode can be used in the scenario
-where the application doesn't want a 3A manual control capture to affect
-the subsequent auto 3A capture results.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.sceneMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>scene<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">DISABLED</span>
- <span class="entry_type_enum_value">0</span>
- <span class="entry_type_enum_notes"><p>Indicates that no scene modes are set for a given capture request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FACE_PRIORITY</span>
- <span class="entry_type_enum_notes"><p>If face detection support exists,<wbr/> use face
-detection data for auto-focus,<wbr/> auto-white balance,<wbr/> and
-auto-exposure routines.<wbr/></p>
-<p>If face detection statistics are disabled
-(i.<wbr/>e.<wbr/> <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> is set to OFF),<wbr/>
-this should still operate correctly (but will not return
-face detection statistics to the framework).<wbr/></p>
-<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
-remain active when FACE_<wbr/>PRIORITY is set.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ACTION</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving objects.<wbr/></p>
-<p>Similar to SPORTS.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PORTRAIT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for still photos of people.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">LANDSCAPE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for photos of distant macroscopic objects.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NIGHT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for low-light settings.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NIGHT_PORTRAIT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for still photos of people in low-light
-settings.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">THEATRE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings where flash must
-remain off.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BEACH</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor beach settings.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SNOW</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor settings containing snow.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SUNSET</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for scenes of the setting sun.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">STEADYPHOTO</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized to avoid blurry photos due to small amounts of
-device motion (for example: due to hand shake).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FIREWORKS</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for nighttime photos of fireworks.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SPORTS</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving people.<wbr/></p>
-<p>Similar to ACTION.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PARTY</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings with multiple moving
-people.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CANDLELIGHT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for dim settings where the main light source
-is a flame.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BARCODE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for accurately capturing a photo of barcode
-for use by camera applications that wish to read the
-barcode value.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_SPEED_VIDEO</span>
- <span class="entry_type_enum_deprecated">[deprecated]</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>This is deprecated,<wbr/> please use <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
-and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>
-for high speed video recording.<wbr/></p>
-<p>Optimized for high speed video recording (frame rate >=60fps) use case.<wbr/></p>
-<p>The supported high speed video sizes and fps ranges are specified in
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> To get desired
-output frame rates,<wbr/> the application is only allowed to select video size
-and fps range combinations listed in this static metadata.<wbr/> The fps range
-can be control via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
-<p>In this mode,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
-ON,<wbr/> ON,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
-controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
-and post-processing parameters is possible.<wbr/> All other controls operate the
-same as when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == AUTO.<wbr/> This means that all other
-android.<wbr/>control.<wbr/>* fields continue to work,<wbr/> such as</p>
-<ul>
-<li><a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a></li>
-<li><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a></li>
-<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
-<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
-<li><a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></li>
-<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a></li>
-<li><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a></li>
-</ul>
-<p>Outside of android.<wbr/>control.<wbr/>*,<wbr/> the following controls will work:</p>
-<ul>
-<li><a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> (automatic flash for still capture will not work since aeMode is ON)</li>
-<li><a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> (if it is supported)</li>
-<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
-<li><a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></li>
-</ul>
-<p>For high speed recording use case,<wbr/> the actual maximum supported frame rate may
-be lower than what camera can output,<wbr/> depending on the destination Surfaces for
-the image data.<wbr/> For example,<wbr/> if the destination surface is from video encoder,<wbr/>
-the application need check if the video encoder is capable of supporting the
-high frame rate for a given video size,<wbr/> or it will end up with lower recording
-frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the preview frame
-rate will be bounded by the screen refresh rate.<wbr/></p>
-<p>The camera device will only support up to 2 output high speed streams
-(processed non-stalling format defined in <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>)
-in this mode.<wbr/> This control will be effective only if all of below conditions are true:</p>
-<ul>
-<li>The application created no more than maxNumHighSpeedStreams processed non-stalling
-format output streams,<wbr/> where maxNumHighSpeedStreams is calculated as
-min(2,<wbr/> <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>[Processed (but not-stalling)]).<wbr/></li>
-<li>The stream sizes are selected from the sizes reported by
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/></li>
-<li>No processed non-stalling or raw streams are configured.<wbr/></li>
-</ul>
-<p>When above conditions are NOT satistied,<wbr/> the controls of this mode and
-<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> will be ignored by the camera device,<wbr/>
-the camera device will fall back to <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> <code>==</code> AUTO,<wbr/>
-and the returned capture result metadata will give the fps range choosen
-by the camera device.<wbr/></p>
-<p>Switching into or out of this mode may trigger some camera ISP/<wbr/>sensor
-reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommended that
-the application avoids unnecessary scene mode switch as much as possible.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HDR</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Turn on a device-specific high dynamic range (HDR) mode.<wbr/></p>
-<p>In this scene mode,<wbr/> the camera device captures images
-that keep a larger range of scene illumination levels
-visible in the final image.<wbr/> For example,<wbr/> when taking a
-picture of a object in front of a bright window,<wbr/> both
-the object and the scene through the window may be
-visible when using HDR mode,<wbr/> while in normal AUTO mode,<wbr/>
-one or the other may be poorly exposed.<wbr/> As a tradeoff,<wbr/>
-HDR mode generally takes much longer to capture a single
-image,<wbr/> has no user control,<wbr/> and may have other artifacts
-depending on the HDR method used.<wbr/></p>
-<p>Therefore,<wbr/> HDR captures operate at a much slower rate
-than regular captures.<wbr/></p>
-<p>In this mode,<wbr/> on LIMITED or FULL devices,<wbr/> when a request
-is made with a <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> of
-STILL_<wbr/>CAPTURE,<wbr/> the camera device will capture an image
-using a high dynamic range capture technique.<wbr/> On LEGACY
-devices,<wbr/> captures that target a JPEG-format output will
-be captured with HDR,<wbr/> and the capture intent is not
-relevant.<wbr/></p>
-<p>The HDR capture may involve the device capturing a burst
-of images internally and combining them into one,<wbr/> or it
-may involve the device using specialized high dynamic
-range capture hardware.<wbr/> In all cases,<wbr/> a single image is
-produced in response to a capture request submitted
-while in HDR mode.<wbr/></p>
-<p>Since substantial post-processing is generally needed to
-produce an HDR image,<wbr/> only YUV,<wbr/> PRIVATE,<wbr/> and JPEG
-outputs are supported for LIMITED/<wbr/>FULL device HDR
-captures,<wbr/> and only JPEG outputs are supported for LEGACY
-HDR captures.<wbr/> Using a RAW output for HDR capture is not
-supported.<wbr/></p>
-<p>Some devices may also support always-on HDR,<wbr/> which
-applies HDR processing at full frame rate.<wbr/> For these
-devices,<wbr/> intents other than STILL_<wbr/>CAPTURE will also
-produce an HDR output with no frame rate impact compared
-to normal operation,<wbr/> though the quality may be lower
-than for STILL_<wbr/>CAPTURE intents.<wbr/></p>
-<p>If SCENE_<wbr/>MODE_<wbr/>HDR is used with unsupported output types
-or capture intents,<wbr/> the images captured will be as if
-the SCENE_<wbr/>MODE was not enabled at all.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_hidden">[hidden]</span>
- <span class="entry_type_enum_notes"><p>Same as FACE_<wbr/>PRIORITY scene mode,<wbr/> except that the camera
-device will choose higher sensitivity values (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
-under low light conditions.<wbr/></p>
-<p>The camera device may be tuned to expose the images in a reduced
-sensitivity range to produce the best quality images.<wbr/> For example,<wbr/>
-if the <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> gives range of [100,<wbr/> 1600],<wbr/>
-the camera device auto-exposure routine tuning process may limit the actual
-exposure sensitivity range to [100,<wbr/> 1200] to ensure that the noise level isn't
-exessive in order to preserve the image quality.<wbr/> Under this situation,<wbr/> the image under
-low light may be under-exposed when the sensor max exposure time (bounded by the
-<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of the
-ON_<wbr/>* modes) and effective max sensitivity are reached.<wbr/> This scene mode allows the
-camera device auto-exposure routine to increase the sensitivity up to the max
-sensitivity specified by <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> when the scene is too
-dark and the max exposure time is reached.<wbr/> The captured images may be noisier
-compared with the images captured in normal FACE_<wbr/>PRIORITY mode; therefore,<wbr/> it is
-recommended that the application only use this scene mode when it is capable of
-reducing the noise level of the captured images.<wbr/></p>
-<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
-remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DEVICE_CUSTOM_START</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_hidden">[hidden]</span>
- <span class="entry_type_enum_value">100</span>
- <span class="entry_type_enum_notes"><p>Scene mode values within the range of
-<code>[DEVICE_<wbr/>CUSTOM_<wbr/>START,<wbr/> DEVICE_<wbr/>CUSTOM_<wbr/>END]</code> are reserved for device specific
-customized scene modes.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DEVICE_CUSTOM_END</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_hidden">[hidden]</span>
- <span class="entry_type_enum_value">127</span>
- <span class="entry_type_enum_notes"><p>Scene mode values within the range of
-<code>[DEVICE_<wbr/>CUSTOM_<wbr/>START,<wbr/> DEVICE_<wbr/>CUSTOM_<wbr/>END]</code> are reserved for device specific
-customized scene modes.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Control for which scene mode is currently active.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Scene modes are custom camera modes optimized for a certain set of conditions and
-capture settings.<wbr/></p>
-<p>This is the mode that that is active when
-<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code>.<wbr/> Aside from FACE_<wbr/>PRIORITY,<wbr/> these modes will
-disable <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
-while in use.<wbr/></p>
-<p>The interpretation and implementation of these scene modes is left
-to the implementor of the camera device.<wbr/> Their behavior will not be
-consistent across all devices,<wbr/> and any given device may only implement
-a subset of these modes.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL implementations that include scene modes are expected to provide
-the per-scene settings to use for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> in
-<a href="#static_android.control.sceneModeOverrides">android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides</a>.<wbr/></p>
-<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/>
-the HAL must list supported video size and fps range in
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>
-1280x720,<wbr/> if the HAL has two different sensor configurations for normal streaming
-mode and high speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture
-requests,<wbr/> the HAL may have to switch between different sensor modes.<wbr/>
-This mode is deprecated in HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a> and CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO
-capbility defined in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.videoStabilizationMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Video stabilization is disabled.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Video stabilization is enabled.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether video stabilization is
-active.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Video stabilization automatically warps images from
-the camera in order to stabilize motion between consecutive frames.<wbr/></p>
-<p>If enabled,<wbr/> video stabilization can modify the
-<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> to keep the video stream stabilized.<wbr/></p>
-<p>Switching between different video stabilization modes may take several
-frames to initialize,<wbr/> the camera device will report the current mode
-in capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/>
-the video stabilization modes in the first several capture results may
-still be "OFF",<wbr/> and it will become "ON" when the initialization is
-done.<wbr/></p>
-<p>In addition,<wbr/> not all recording sizes or frame rates may be supported for
-stabilization by a device that reports stabilization support.<wbr/> It is guaranteed
-that an output targeting a MediaRecorder or MediaCodec will be stabilized if
-the recording resolution is less than or equal to 1920 x 1080 (width less than
-or equal to 1920,<wbr/> height less than or equal to 1080),<wbr/> and the recording
-frame rate is less than or equal to 30fps.<wbr/> At other sizes,<wbr/> the CaptureResult
-<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a> field will return
-OFF if the recording output is not stabilized,<wbr/> or if there are no output
-Surface types that can be stabilized.<wbr/></p>
-<p>If a camera device supports both this mode and OIS
-(<a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may
-produce undesirable interaction,<wbr/> so it is recommended not to enable
-both at the same time.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.control.postRawSensitivityBoost">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The amount of additional sensitivity boost applied to output images
-after RAW sensor data is captured.<wbr/></p>
- </td>
-
- <td class="entry_units">
- ISO arithmetic units,<wbr/> the same as android.<wbr/>sensor.<wbr/>sensitivity
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.postRawSensitivityBoostRange">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Some camera devices support additional digital sensitivity boosting in the
-camera processing pipeline after sensor RAW image is captured.<wbr/>
-Such a boost will be applied to YUV/<wbr/>JPEG format output images but will not
-have effect on RAW output formats like RAW_<wbr/>SENSOR,<wbr/> RAW10,<wbr/> RAW12 or RAW_<wbr/>OPAQUE.<wbr/></p>
-<p>This key will be <code>null</code> for devices that do not support any RAW format
-outputs.<wbr/> For devices that do support RAW format outputs,<wbr/> this key will always
-present,<wbr/> and if a device does not support post RAW sensitivity boost,<wbr/> it will
-list <code>100</code> in this key.<wbr/></p>
-<p>If the camera device cannot apply the exact boost requested,<wbr/> it will reduce the
-boost to the nearest supported value.<wbr/>
-The final boost value used will be available in the output capture result.<wbr/></p>
-<p>For devices that support post RAW sensitivity boost,<wbr/> the YUV/<wbr/>JPEG output images
-of such device will have the total sensitivity of
-<code><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> * <a href="#controls_android.control.postRawSensitivityBoost">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost</a> /<wbr/> 100</code>
-The sensitivity of RAW format images will always be <code><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></code></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.control.aeAvailableAntibandingModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of auto-exposure antibanding modes for <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> that are
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not all of the auto-exposure anti-banding modes may be
-supported by a given camera device.<wbr/> This field lists the
-valid anti-banding modes that the application may request
-for this camera device with the
-<a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> control.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.aeAvailableModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of auto-exposure modes for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not all the auto-exposure modes may be supported by a
-given camera device,<wbr/> especially if no flash unit is
-available.<wbr/> This entry lists the valid modes for
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> for this camera device.<wbr/></p>
-<p>All camera devices support ON,<wbr/> and all camera devices with flash
-units support ON_<wbr/>AUTO_<wbr/>FLASH and ON_<wbr/>ALWAYS_<wbr/>FLASH.<wbr/></p>
-<p>FULL mode camera devices always support OFF mode,<wbr/>
-which enables application control of camera exposure time,<wbr/>
-sensitivity,<wbr/> and frame duration.<wbr/></p>
-<p>LEGACY mode camera devices never support OFF mode.<wbr/>
-LIMITED mode devices support OFF if they support the MANUAL_<wbr/>SENSOR
-capability.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.aeAvailableTargetFpsRanges">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2 x n
- </span>
- <span class="entry_type_visibility"> [public as rangeInt]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">list of pairs of frame rates</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of frame rate ranges for <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> supported by
-this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Frames per second (FPS)
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For devices at the LEGACY level or above:</p>
-<ul>
-<li>
-<p>For constant-framerate recording,<wbr/> for each normal
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,<wbr/> that is,<wbr/> a
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#quality">quality</a> in
-the range [<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW">QUALITY_<wbr/>LOW</a>,<wbr/>
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P">QUALITY_<wbr/>2160P</a>],<wbr/> if the profile is
-supported by the device and has
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code>,<wbr/> this list will
-always include (<code>x</code>,<wbr/><code>x</code>).<wbr/></p>
-</li>
-<li>
-<p>Also,<wbr/> a camera device must either not support any
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,<wbr/>
-or support at least one
-normal <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
-<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code> >= 24.<wbr/></p>
-</li>
-</ul>
-<p>For devices at the LIMITED level or above:</p>
-<ul>
-<li>For YUV_<wbr/>420_<wbr/>888 burst capture use case,<wbr/> this list will always include (<code>min</code>,<wbr/> <code>max</code>)
-and (<code>max</code>,<wbr/> <code>max</code>) where <code>min</code> <= 15 and <code>max</code> = the maximum output frame rate of the
-maximum YUV_<wbr/>420_<wbr/>888 output size.<wbr/></li>
-</ul>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.aeCompensationRange">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as rangeInt]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum and minimum exposure compensation values for
-<a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a>,<wbr/> in counts of <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a>,<wbr/>
-that are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Range [0,<wbr/>0] indicates that exposure compensation is not supported.<wbr/></p>
-<p>For LIMITED and FULL devices,<wbr/> range must follow below requirements if exposure
-compensation is supported (<code>range != [0,<wbr/> 0]</code>):</p>
-<p><code>Min.<wbr/>exposure compensation * <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> <= -2 EV</code></p>
-<p><code>Max.<wbr/>exposure compensation * <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> >= 2 EV</code></p>
-<p>LEGACY devices may support a smaller range than this.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.aeCompensationStep">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Smallest step by which the exposure compensation
-can be changed.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Exposure Value (EV)
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is the unit for <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a>.<wbr/> For example,<wbr/> if this key has
-a value of <code>1/<wbr/>2</code>,<wbr/> then a setting of <code>-2</code> for <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> means
-that the target EV offset for the auto-exposure routine is -1 EV.<wbr/></p>
-<p>One unit of EV compensation changes the brightness of the captured image by a factor
-of two.<wbr/> +1 EV doubles the image brightness,<wbr/> while -1 EV halves the image brightness.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This must be less than or equal to 1/<wbr/>2.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.afAvailableModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of auto-focus (AF) modes for <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> that are
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not all the auto-focus modes may be supported by a
-given camera device.<wbr/> This entry lists the valid modes for
-<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> for this camera device.<wbr/></p>
-<p>All LIMITED and FULL mode camera devices will support OFF mode,<wbr/> and all
-camera devices with adjustable focuser units
-(<code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> > 0</code>) will support AUTO mode.<wbr/></p>
-<p>LEGACY devices will support OFF mode only if they support
-focusing to infinity (by also setting <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> to
-<code>0.<wbr/>0f</code>).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.availableEffects">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>available<wbr/>Effects
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums (android.<wbr/>control.<wbr/>effect<wbr/>Mode).<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of color effects for <a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This list contains the color effect modes that can be applied to
-images produced by the camera device.<wbr/>
-Implementations are not expected to be consistent across all devices.<wbr/>
-If no color effect modes are available for a device,<wbr/> this will only list
-OFF.<wbr/></p>
-<p>A color effect will only be applied if
-<a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF.<wbr/> OFF is always included in this list.<wbr/></p>
-<p>This control has no effect on the operation of other control routines such
-as auto-exposure,<wbr/> white balance,<wbr/> or focus.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.availableSceneModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums (android.<wbr/>control.<wbr/>scene<wbr/>Mode).<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of scene modes for <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This list contains scene modes that can be set for the camera device.<wbr/>
-Only scene modes that have been fully implemented for the
-camera device may be included here.<wbr/> Implementations are not expected
-to be consistent across all devices.<wbr/></p>
-<p>If no scene modes are supported by the camera device,<wbr/> this
-will be set to DISABLED.<wbr/> Otherwise DISABLED will not be listed.<wbr/></p>
-<p>FACE_<wbr/>PRIORITY is always listed if face detection is
-supported (i.<wbr/>e.<wbr/><code><a href="#static_android.statistics.info.maxFaceCount">android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Face<wbr/>Count</a> >
-0</code>).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.availableVideoStabilizationModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>available<wbr/>Video<wbr/>Stabilization<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of video stabilization modes for <a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>
-that are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>OFF will always be listed.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.awbAvailableModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of auto-white-balance modes for <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> that are supported by this
-camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not all the auto-white-balance modes may be supported by a
-given camera device.<wbr/> This entry lists the valid modes for
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> for this camera device.<wbr/></p>
-<p>All camera devices will support ON mode.<wbr/></p>
-<p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always support OFF
-mode,<wbr/> which enables application control of white balance,<wbr/> by using
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>(<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> must be set to TRANSFORM_<wbr/>MATRIX).<wbr/> This includes all FULL
-mode camera devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.maxRegions">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>control.<wbr/>max<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of the maximum number of regions that can be used for metering in
-auto-exposure (AE),<wbr/> auto-white balance (AWB),<wbr/> and auto-focus (AF);
-this corresponds to the the maximum number of elements in
-<a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>,<wbr/> <a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>,<wbr/>
-and <a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Value must be >= 0 for each element.<wbr/> For full-capability devices
-this value must be >= 1 for AE and AF.<wbr/> The order of the elements is:
-<code>(AE,<wbr/> AWB,<wbr/> AF)</code>.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.maxRegionsAe">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum number of metering regions that can be used by the auto-exposure (AE)
-routine.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Value will be >= 0.<wbr/> For FULL-capability devices,<wbr/> this
-value will be >= 1.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This corresponds to the the maximum allowed number of elements in
-<a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is private to the framework.<wbr/> Fill in
-maxRegions to have this entry be automatically populated.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.maxRegionsAwb">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum number of metering regions that can be used by the auto-white balance (AWB)
-routine.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Value will be >= 0.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This corresponds to the the maximum allowed number of elements in
-<a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is private to the framework.<wbr/> Fill in
-maxRegions to have this entry be automatically populated.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.maxRegionsAf">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum number of metering regions that can be used by the auto-focus (AF) routine.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Value will be >= 0.<wbr/> For FULL-capability devices,<wbr/> this
-value will be >= 1.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This corresponds to the the maximum allowed number of elements in
-<a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is private to the framework.<wbr/> Fill in
-maxRegions to have this entry be automatically populated.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.sceneModeOverrides">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x length(availableSceneModes)
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Ordered list of auto-exposure,<wbr/> auto-white balance,<wbr/> and auto-focus
-settings to use with each available scene mode.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>For each available scene mode,<wbr/> the list must contain three
-entries containing the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> values used
-by the camera device.<wbr/> The entry order is <code>(aeMode,<wbr/> awbMode,<wbr/> afMode)</code>
-where aeMode has the lowest index position.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When a scene mode is enabled,<wbr/> the camera device is expected
-to override <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/>
-and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> with its preferred settings for
-that scene mode.<wbr/></p>
-<p>The order of this list matches that of availableSceneModes,<wbr/>
-with 3 entries for each mode.<wbr/> The overrides listed
-for FACE_<wbr/>PRIORITY and FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT (if supported) are ignored,<wbr/>
-since for that mode the application-set <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> values are
-used instead,<wbr/> matching the behavior when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>
-is set to AUTO.<wbr/> It is recommended that the FACE_<wbr/>PRIORITY and
-FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT (if supported) overrides should be set to 0.<wbr/></p>
-<p>For example,<wbr/> if availableSceneModes contains
-<code>(FACE_<wbr/>PRIORITY,<wbr/> ACTION,<wbr/> NIGHT)</code>,<wbr/> then the camera framework
-expects sceneModeOverrides to have 9 entries formatted like:
-<code>(0,<wbr/> 0,<wbr/> 0,<wbr/> ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> AUTO,<wbr/> CONTINUOUS_<wbr/>PICTURE,<wbr/>
-ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> INCANDESCENT,<wbr/> AUTO)</code>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>To maintain backward compatibility,<wbr/> this list will be made available
-in the static metadata of the camera service.<wbr/> The camera service will
-use these values to set <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> when using a scene
-mode other than FACE_<wbr/>PRIORITY and FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT (if supported).<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.availableHighSpeedVideoConfigurations">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x n
- </span>
- <span class="entry_type_visibility"> [hidden as highSpeedVideoConfiguration]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of available high speed video size,<wbr/> fps range and max batch size configurations
-supported by the camera device,<wbr/> in the format of (width,<wbr/> height,<wbr/> fps_<wbr/>min,<wbr/> fps_<wbr/>max,<wbr/> batch_<wbr/>size_<wbr/>max).<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>For each configuration,<wbr/> the fps_<wbr/>max >= 120fps.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO is supported in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>,<wbr/>
-this metadata will list the supported high speed video size,<wbr/> fps range and max batch size
-configurations.<wbr/> All the sizes listed in this configuration will be a subset of the sizes
-reported by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a>
-for processed non-stalling formats.<wbr/></p>
-<p>For the high speed video use case,<wbr/> the application must
-select the video size and fps range from this metadata to configure the recording and
-preview streams and setup the recording requests.<wbr/> For example,<wbr/> if the application intends
-to do high speed recording,<wbr/> it can select the maximum size reported by this metadata to
-configure output streams.<wbr/> Once the size is selected,<wbr/> application can filter this metadata
-by selected size and get the supported fps ranges,<wbr/> and use these fps ranges to setup the
-recording requests.<wbr/> Note that for the use case of multiple output streams,<wbr/> application
-must select one unique size from this metadata to use (e.<wbr/>g.,<wbr/> preview and recording streams
-must have the same size).<wbr/> Otherwise,<wbr/> the high speed capture session creation will fail.<wbr/></p>
-<p>The min and max fps will be multiple times of 30fps.<wbr/></p>
-<p>High speed video streaming extends significant performance pressue to camera hardware,<wbr/>
-to achieve efficient high speed streaming,<wbr/> the camera device may have to aggregate
-multiple frames together and send to camera device for processing where the request
-controls are same for all the frames in this batch.<wbr/> Max batch size indicates
-the max possible number of frames the camera device will group together for this high
-speed stream configuration.<wbr/> This max batch size will be used to generate a high speed
-recording request list by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>.<wbr/>
-The max batch size for each configuration will satisfy below conditions:</p>
-<ul>
-<li>Each max batch size will be a divisor of its corresponding fps_<wbr/>max /<wbr/> 30.<wbr/> For example,<wbr/>
-if max_<wbr/>fps is 300,<wbr/> max batch size will only be 1,<wbr/> 2,<wbr/> 5,<wbr/> or 10.<wbr/></li>
-<li>The camera device may choose smaller internal batch size for each configuration,<wbr/> but
-the actual batch size will be a divisor of max batch size.<wbr/> For example,<wbr/> if the max batch
-size is 8,<wbr/> the actual batch size used by camera device will only be 1,<wbr/> 2,<wbr/> 4,<wbr/> or 8.<wbr/></li>
-<li>The max batch size in each configuration entry must be no larger than 32.<wbr/></li>
-</ul>
-<p>The camera device doesn't have to support batch mode to achieve high speed video recording,<wbr/>
-in such case,<wbr/> batch_<wbr/>size_<wbr/>max will be reported as 1 in each configuration entry.<wbr/></p>
-<p>This fps ranges in this configuration list can only be used to create requests
-that are submitted to a high speed camera capture session created by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>.<wbr/>
-The fps ranges reported in this metadata must not be used to setup capture requests for
-normal capture session,<wbr/> or it will cause request error.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All the sizes listed in this configuration will be a subset of the sizes reported by
-<a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> for processed non-stalling output formats.<wbr/>
-Note that for all high speed video configurations,<wbr/> HAL must be able to support a minimum
-of two streams,<wbr/> though the application might choose to configure just one stream.<wbr/></p>
-<p>The HAL may support multiple sensor modes for high speed outputs,<wbr/> for example,<wbr/> 120fps
-sensor mode and 120fps recording,<wbr/> 240fps sensor mode for 240fps recording.<wbr/> The application
-usually starts preview first,<wbr/> then starts recording.<wbr/> To avoid sensor mode switch caused
-stutter when starting recording as much as possible,<wbr/> the application may want to ensure
-the same sensor mode is used for preview and recording.<wbr/> Therefore,<wbr/> The HAL must advertise
-the variable fps range [30,<wbr/> fps_<wbr/>max] for each fixed fps range in this configuration list.<wbr/>
-For example,<wbr/> if the HAL advertises [120,<wbr/> 120] and [240,<wbr/> 240],<wbr/> the HAL must also advertise
-[30,<wbr/> 120] and [30,<wbr/> 240] for each configuration.<wbr/> In doing so,<wbr/> if the application intends to
-do 120fps recording,<wbr/> it can select [30,<wbr/> 120] to start preview,<wbr/> and [120,<wbr/> 120] to start
-recording.<wbr/> For these variable fps ranges,<wbr/> it's up to the HAL to decide the actual fps
-values that are suitable for smooth preview streaming.<wbr/> If the HAL sees different max_<wbr/>fps
-values that fall into different sensor modes in a sequence of requests,<wbr/> the HAL must
-switch the sensor mode as quick as possible to minimize the mode switch caused stutter.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.aeLockAvailable">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Lock<wbr/>Available
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FALSE</span>
- </li>
- <li>
- <span class="entry_type_enum_name">TRUE</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device supports <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Devices with MANUAL_<wbr/>SENSOR capability or BURST_<wbr/>CAPTURE capability will always
-list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.awbLockAvailable">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>Lock<wbr/>Available
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FALSE</span>
- </li>
- <li>
- <span class="entry_type_enum_name">TRUE</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device supports <a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Devices with MANUAL_<wbr/>POST_<wbr/>PROCESSING capability or BURST_<wbr/>CAPTURE capability will
-always list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.availableModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>available<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums (android.<wbr/>control.<wbr/>mode).<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of control modes for <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This list contains control modes that can be set for the camera device.<wbr/>
-LEGACY mode devices will always support AUTO mode.<wbr/> LIMITED and FULL
-devices will always support OFF,<wbr/> AUTO modes.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.control.postRawSensitivityBoostRange">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as rangeInt]</span>
-
-
-
-
- <div class="entry_type_notes">Range of supported post RAW sensitivitiy boosts</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Range of boosts for <a href="#controls_android.control.postRawSensitivityBoost">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost</a> supported
-by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- ISO arithmetic units,<wbr/> the same as android.<wbr/>sensor.<wbr/>sensitivity
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Devices support post RAW sensitivity boost will advertise
-<a href="#controls_android.control.postRawSensitivityBoost">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost</a> key for controling
-post RAW sensitivity boost.<wbr/></p>
-<p>This key will be <code>null</code> for devices that do not support any RAW format
-outputs.<wbr/> For devices that do support RAW format outputs,<wbr/> this key will always
-present,<wbr/> and if a device does not support post RAW sensitivity boost,<wbr/> it will
-list <code>(100,<wbr/> 100)</code> in this key.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This key is added in HAL3.<wbr/>4.<wbr/> For HAL3.<wbr/>3 or earlier devices,<wbr/> camera framework will
-generate this key as <code>(100,<wbr/> 100)</code> if device supports any of RAW output formats.<wbr/>
-All HAL3.<wbr/>4 and above devices should list this key if device supports any of RAW
-output formats.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.control.aePrecaptureId">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Id
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The ID sent with the latest
-CAMERA2_<wbr/>TRIGGER_<wbr/>PRECAPTURE_<wbr/>METERING call</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Must be 0 if no
-CAMERA2_<wbr/>TRIGGER_<wbr/>PRECAPTURE_<wbr/>METERING trigger received yet
-by HAL.<wbr/> Always updated even if AE algorithm ignores the
-trigger</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeAntibandingMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The camera device will not adjust exposure duration to
-avoid banding problems.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">50HZ</span>
- <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
-avoid banding problems with 50Hz illumination sources.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">60HZ</span>
- <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
-avoid banding problems with 60Hz illumination
-sources.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>The camera device will automatically adapt its
-antibanding routine to the current illumination
-condition.<wbr/> This is the default mode if AUTO is
-available on given camera device.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired setting for the camera device's auto-exposure
-algorithm's antibanding compensation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Some kinds of lighting fixtures,<wbr/> such as some fluorescent
-lights,<wbr/> flicker at the rate of the power supply frequency
-(60Hz or 50Hz,<wbr/> depending on country).<wbr/> While this is
-typically not noticeable to a person,<wbr/> it can be visible to
-a camera device.<wbr/> If a camera sets its exposure time to the
-wrong value,<wbr/> the flicker may become visible in the
-viewfinder as flicker or in a final captured image,<wbr/> as a
-set of variable-brightness bands across the image.<wbr/></p>
-<p>Therefore,<wbr/> the auto-exposure routines of camera devices
-include antibanding routines that ensure that the chosen
-exposure value will not cause such banding.<wbr/> The choice of
-exposure time depends on the rate of flicker,<wbr/> which the
-camera device can detect automatically,<wbr/> or the expected
-rate can be selected by the application using this
-control.<wbr/></p>
-<p>A given camera device may not support all of the possible
-options for the antibanding mode.<wbr/> The
-<a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a> key contains
-the available modes for a given camera device.<wbr/></p>
-<p>AUTO mode is the default if it is available on given
-camera device.<wbr/> When AUTO mode is not available,<wbr/> the
-default will be either 50HZ or 60HZ,<wbr/> and both 50HZ
-and 60HZ will be available.<wbr/></p>
-<p>If manual exposure control is enabled (by setting
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
-then this setting has no effect,<wbr/> and the application must
-ensure it selects exposure times that do not cause banding
-issues.<wbr/> The <a href="#dynamic_android.statistics.sceneFlicker">android.<wbr/>statistics.<wbr/>scene<wbr/>Flicker</a> key can assist
-the application in this.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For all capture request templates,<wbr/> this field must be set
-to AUTO if AUTO mode is available.<wbr/> If AUTO is not available,<wbr/>
-the default must be either 50HZ or 60HZ,<wbr/> and both 50HZ and
-60HZ must be available.<wbr/></p>
-<p>If manual exposure control is enabled (by setting
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
-then the exposure values provided by the application must not be
-adjusted for antibanding.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeExposureCompensation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Adjustment to auto-exposure (AE) target image
-brightness.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Compensation steps
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The adjustment is measured as a count of steps,<wbr/> with the
-step size defined by <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> and the
-allowed range by <a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a>.<wbr/></p>
-<p>For example,<wbr/> if the exposure value (EV) step is 0.<wbr/>333,<wbr/> '6'
-will mean an exposure compensation of +2 EV; -3 will mean an
-exposure compensation of -1 EV.<wbr/> One EV represents a doubling
-of image brightness.<wbr/> Note that this control will only be
-effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF.<wbr/> This control
-will take effect even when <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> <code>== true</code>.<wbr/></p>
-<p>In the event of exposure compensation value being changed,<wbr/> camera device
-may take several frames to reach the newly requested exposure target.<wbr/>
-During that time,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> field will be in the SEARCHING
-state.<wbr/> Once the new exposure target is reached,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> will
-change from SEARCHING to either CONVERGED,<wbr/> LOCKED (if AE lock is enabled),<wbr/> or
-FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeLock">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Lock
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Auto-exposure lock is disabled; the AE algorithm
-is free to update its parameters.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Auto-exposure lock is enabled; the AE algorithm
-must not update the exposure and sensitivity parameters
-while the lock is active.<wbr/></p>
-<p><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> setting changes
-will still take effect while auto-exposure is locked.<wbr/></p>
-<p>Some rare LEGACY devices may not support
-this,<wbr/> in which case the value will always be overridden to OFF.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-exposure (AE) is currently locked to its latest
-calculated values.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to <code>true</code> (ON),<wbr/> the AE algorithm is locked to its latest parameters,<wbr/>
-and will not change exposure settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
-<p>Note that even when AE is locked,<wbr/> the flash may be fired if
-the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>AUTO_<wbr/>FLASH /<wbr/>
-ON_<wbr/>ALWAYS_<wbr/>FLASH /<wbr/> ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE.<wbr/></p>
-<p>When <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> is changed,<wbr/> even if the AE lock
-is ON,<wbr/> the camera device will still adjust its exposure value.<wbr/></p>
-<p>If AE precapture is triggered (see <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>)
-when AE is already locked,<wbr/> the camera device will not change the exposure time
-(<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) and sensitivity (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
-parameters.<wbr/> The flash may be fired if the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
-is ON_<wbr/>AUTO_<wbr/>FLASH/<wbr/>ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE and the scene is too dark.<wbr/> If the
-<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> the scene may become overexposed.<wbr/>
-Similarly,<wbr/> AE precapture trigger CANCEL has no effect when AE is already locked.<wbr/></p>
-<p>When an AE precapture sequence is triggered,<wbr/> AE unlock will not be able to unlock
-the AE if AE is locked by the camera device internally during precapture metering
-sequence In other words,<wbr/> submitting requests with AE unlock has no effect for an
-ongoing precapture metering sequence.<wbr/> Otherwise,<wbr/> the precapture metering sequence
-will never succeed in a sequence of preview requests where AE lock is always set
-to <code>false</code>.<wbr/></p>
-<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
-get locked do not necessarily correspond to the settings that were present in the
-latest capture result received from the camera device,<wbr/> since additional captures
-and AE updates may have occurred even before the result was sent out.<wbr/> If an
-application is switching between automatic and manual control and wishes to eliminate
-any flicker during the switch,<wbr/> the following procedure is recommended:</p>
-<ol>
-<li>Starting in auto-AE mode:</li>
-<li>Lock AE</li>
-<li>Wait for the first result to be output that has the AE locked</li>
-<li>Copy exposure settings from that result into a request,<wbr/> set the request to manual AE</li>
-<li>Submit the capture request,<wbr/> proceed to run manual AE as desired.<wbr/></li>
-</ol>
-<p>See <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE lock related state transition details.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is disabled.<wbr/></p>
-<p>The application-selected <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are used by the camera
-device,<wbr/> along with android.<wbr/>flash.<wbr/>* fields,<wbr/> if there's
-a flash unit for this camera device.<wbr/></p>
-<p>Note that auto-white balance (AWB) and auto-focus (AF)
-behavior is device dependent when AE is in OFF mode.<wbr/>
-To have consistent behavior across different devices,<wbr/>
-it is recommended to either set AWB and AF to OFF mode
-or lock AWB and AF before setting AE to OFF.<wbr/>
-See <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a>,<wbr/> and <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
-for more details.<wbr/></p>
-<p>LEGACY devices do not support the OFF mode and will
-override attempts to use this value to ON.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is active,<wbr/>
-with no flash control.<wbr/></p>
-<p>The application's values for
-<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are ignored.<wbr/> The
-application has control over the various
-android.<wbr/>flash.<wbr/>* fields.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON_AUTO_FLASH</span>
- <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
-the camera's flash unit,<wbr/> firing it in low-light
-conditions.<wbr/></p>
-<p>The flash may be fired during a precapture sequence
-(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
-may be fired for captures for which the
-<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
-STILL_<wbr/>CAPTURE</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON_ALWAYS_FLASH</span>
- <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
-the camera's flash unit,<wbr/> always firing it for still
-captures.<wbr/></p>
-<p>The flash may be fired during a precapture sequence
-(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
-will always be fired for captures for which the
-<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
-STILL_<wbr/>CAPTURE</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE</span>
- <span class="entry_type_enum_notes"><p>Like ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> but with automatic red eye
-reduction.<wbr/></p>
-<p>If deemed necessary by the camera device,<wbr/> a red eye
-reduction flash will fire during the precapture
-sequence.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired mode for the camera device's
-auto-exposure routine.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is
-AUTO.<wbr/></p>
-<p>When set to any of the ON modes,<wbr/> the camera device's
-auto-exposure routine is enabled,<wbr/> overriding the
-application's selected exposure time,<wbr/> sensor sensitivity,<wbr/>
-and frame duration (<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>).<wbr/> If one of the FLASH modes
-is selected,<wbr/> the camera device's flash unit controls are
-also overridden.<wbr/></p>
-<p>The FLASH modes are only available if the camera device
-has a flash unit (<a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> is <code>true</code>).<wbr/></p>
-<p>If flash TORCH mode is desired,<wbr/> this field must be set to
-ON or OFF,<wbr/> and <a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> set to TORCH.<wbr/></p>
-<p>When set to any of the ON modes,<wbr/> the values chosen by the
-camera device auto-exposure routine for the overridden
-fields for a given capture will be available in its
-CaptureResult.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x area_count
- </span>
- <span class="entry_type_visibility"> [public as meteringRectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of metering areas to use for auto-exposure adjustment.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not available if <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a> is 0.<wbr/>
-Otherwise will always be present.<wbr/></p>
-<p>The maximum number of regions supported by the device is determined by the value
-of <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a>.<wbr/></p>
-<p>The coordinate system is based on the active pixel array,<wbr/>
-with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
-(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
-bottom-right pixel in the active pixel array.<wbr/></p>
-<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
-for every pixel in the area.<wbr/> This means that a large metering area
-with the same weight as a smaller area will have more effect in
-the metering result.<wbr/> Metering areas can partially overlap and the
-camera device will add the weights in the overlap region.<wbr/></p>
-<p>The weights are relative to weights of other exposure metering regions,<wbr/> so if only one
-region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0
-weight is ignored.<wbr/></p>
-<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
-<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
-capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
-region and output only the intersection rectangle as the metering region in the result
-metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
-not reported in the result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL level representation of MeteringRectangle[] is a
-int[5 * area_<wbr/>count].<wbr/>
-Every five elements represent a metering region of
-(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
-The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
-exclusive on xmax and ymax.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeTargetFpsRange">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as rangeInt]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Range over which the auto-exposure routine can
-adjust the capture frame rate to maintain good
-exposure.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Frames per second (FPS)
- </td>
-
- <td class="entry_range">
- <p>Any of the entries in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only constrains auto-exposure (AE) algorithm,<wbr/> not
-manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> and
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aePrecaptureTrigger">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">IDLE</span>
- <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">START</span>
- <span class="entry_type_enum_notes"><p>The precapture metering sequence will be started
-by the camera device.<wbr/></p>
-<p>The exact effect of the precapture trigger depends on
-the current AE mode and state.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CANCEL</span>
- <span class="entry_type_enum_notes"><p>The camera device will cancel any currently active or completed
-precapture metering sequence,<wbr/> the auto-exposure routine will return to its
-initial state.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device will trigger a precapture
-metering sequence when it processes this request.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is normally set to IDLE,<wbr/> or is not
-included at all in the request settings.<wbr/> When included and
-set to START,<wbr/> the camera device will trigger the auto-exposure (AE)
-precapture metering sequence.<wbr/></p>
-<p>When set to CANCEL,<wbr/> the camera device will cancel any active
-precapture metering trigger,<wbr/> and return to its initial AE state.<wbr/>
-If a precapture metering sequence is already completed,<wbr/> and the camera
-device has implicitly locked the AE for subsequent still capture,<wbr/> the
-CANCEL trigger will unlock the AE and return to its initial AE state.<wbr/></p>
-<p>The precapture sequence should be triggered before starting a
-high-quality still capture for final metering decisions to
-be made,<wbr/> and for firing pre-capture flash pulses to estimate
-scene brightness and required final capture flash power,<wbr/> when
-the flash is enabled.<wbr/></p>
-<p>Normally,<wbr/> this entry should be set to START for only a
-single request,<wbr/> and the application should wait until the
-sequence completes before starting a new one.<wbr/></p>
-<p>When a precapture metering sequence is finished,<wbr/> the camera device
-may lock the auto-exposure routine internally to be able to accurately expose the
-subsequent still capture image (<code><a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE</code>).<wbr/>
-For this case,<wbr/> the AE may not resume normal scan if no subsequent still capture is
-submitted.<wbr/> To ensure that the AE routine restarts normal scan,<wbr/> the application should
-submit a request with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == true</code>,<wbr/> followed by a request
-with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == false</code>,<wbr/> if the application decides not to submit a
-still capture request after the precapture sequence completes.<wbr/> Alternatively,<wbr/> for
-API level 23 or newer devices,<wbr/> the CANCEL can be used to unlock the camera device
-internally locked AE if the application doesn't submit a still capture request after
-the AE precapture trigger.<wbr/> Note that,<wbr/> the CANCEL was added in API level 23,<wbr/> and must not
-be used in devices that have earlier API levels.<wbr/></p>
-<p>The exact effect of auto-exposure (AE) precapture trigger
-depends on the current AE mode and state; see
-<a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE precapture state transition
-details.<wbr/></p>
-<p>On LEGACY-level devices,<wbr/> the precapture trigger is not supported;
-capturing a high-resolution JPEG image will automatically trigger a
-precapture sequence before the high-resolution capture,<wbr/> including
-potentially firing a pre-capture flash.<wbr/></p>
-<p>Using the precapture trigger and the auto-focus trigger <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
-simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
-the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
-focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
-trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
-changes to <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> indicating the start of the precapture sequence,<wbr/> for
-example.<wbr/></p>
-<p>If both the precapture and the auto-focus trigger are activated on the same request,<wbr/> then
-the camera device will complete them in the optimal order for that device.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must support triggering the AE precapture trigger while an AF trigger is active
-(and vice versa),<wbr/> or at the same time as the AF trigger.<wbr/> It is acceptable for the HAL to
-treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
-AE trigger.<wbr/> Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
-to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.aeState">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>ae<wbr/>State
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">INACTIVE</span>
- <span class="entry_type_enum_notes"><p>AE is off or recently reset.<wbr/></p>
-<p>When a camera device is opened,<wbr/> it starts in
-this state.<wbr/> This is a transient state,<wbr/> the camera device may skip reporting
-this state in capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SEARCHING</span>
- <span class="entry_type_enum_notes"><p>AE doesn't yet have a good set of control values
-for the current scene.<wbr/></p>
-<p>This is a transient state,<wbr/> the camera device may skip
-reporting this state in capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONVERGED</span>
- <span class="entry_type_enum_notes"><p>AE has a good set of control values for the
-current scene.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">LOCKED</span>
- <span class="entry_type_enum_notes"><p>AE has been locked.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FLASH_REQUIRED</span>
- <span class="entry_type_enum_notes"><p>AE has a good set of control values,<wbr/> but flash
-needs to be fired for good quality still
-capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PRECAPTURE</span>
- <span class="entry_type_enum_notes"><p>AE has been asked to do a precapture sequence
-and is currently executing it.<wbr/></p>
-<p>Precapture can be triggered through setting
-<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> to START.<wbr/> Currently
-active and completed (if it causes camera device internal AE lock) precapture
-metering sequence can be canceled through setting
-<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> to CANCEL.<wbr/></p>
-<p>Once PRECAPTURE completes,<wbr/> AE will transition to CONVERGED
-or FLASH_<wbr/>REQUIRED as appropriate.<wbr/> This is a transient
-state,<wbr/> the camera device may skip reporting this state in
-capture result.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Current state of the auto-exposure (AE) algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Switching between or enabling AE modes (<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>) always
-resets the AE state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
-or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
-the algorithm states to INACTIVE.<wbr/></p>
-<p>The camera device can do several state transitions between two results,<wbr/> if it is
-allowed by the state transition table.<wbr/> For example: INACTIVE may never actually be
-seen in a result.<wbr/></p>
-<p>The state in the result is the state for this image (in sync with this image): if
-AE state becomes CONVERGED,<wbr/> then the image data associated with this result should
-be good to use.<wbr/></p>
-<p>Below are state transition tables for different AE modes.<wbr/></p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center"></td>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device auto exposure algorithm is disabled</td>
-</tr>
-</tbody>
-</table>
-<p>When <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is AE_<wbr/>MODE_<wbr/>ON_<wbr/>*:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device initiates AE scan</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values changing</td>
-</tr>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">SEARCHING</td>
-<td align="center">Camera device finishes AE scan</td>
-<td align="center">CONVERGED</td>
-<td align="center">Good values,<wbr/> not changing</td>
-</tr>
-<tr>
-<td align="center">SEARCHING</td>
-<td align="center">Camera device finishes AE scan</td>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Converged but too dark w/<wbr/>o flash</td>
-</tr>
-<tr>
-<td align="center">SEARCHING</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">CONVERGED</td>
-<td align="center">Camera device initiates AE scan</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values changing</td>
-</tr>
-<tr>
-<td align="center">CONVERGED</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Camera device initiates AE scan</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values changing</td>
-</tr>
-<tr>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values not good after unlock</td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
-<td align="center">CONVERGED</td>
-<td align="center">Values good after unlock</td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Exposure good,<wbr/> but too dark</td>
-</tr>
-<tr>
-<td align="center">PRECAPTURE</td>
-<td align="center">Sequence done.<wbr/> <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
-<td align="center">CONVERGED</td>
-<td align="center">Ready for high-quality capture</td>
-</tr>
-<tr>
-<td align="center">PRECAPTURE</td>
-<td align="center">Sequence done.<wbr/> <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Ready for high-quality capture</td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
-<td align="center">LOCKED</td>
-<td align="center">Precapture trigger is ignored when AE is already locked</td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
-<td align="center">LOCKED</td>
-<td align="center">Precapture trigger is ignored when AE is already locked</td>
-</tr>
-<tr>
-<td align="center">Any state (excluding LOCKED)</td>
-<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is START</td>
-<td align="center">PRECAPTURE</td>
-<td align="center">Start AE precapture metering sequence</td>
-</tr>
-<tr>
-<td align="center">Any state (excluding LOCKED)</td>
-<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Currently active precapture metering sequence is canceled</td>
-</tr>
-</tbody>
-</table>
-<p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
-without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
-can be skipped in that manner is called a transient state.<wbr/></p>
-<p>For example,<wbr/> for above AE modes (AE_<wbr/>MODE_<wbr/>ON_<wbr/>*),<wbr/> in addition to the state transitions
-listed in above table,<wbr/> it is also legal for the camera device to skip one or more
-transient states between two results.<wbr/> See below table for examples:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device finished AE scan</td>
-<td align="center">CONVERGED</td>
-<td align="center">Values are already good,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">Any state (excluding LOCKED)</td>
-<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is START,<wbr/> sequence done</td>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Converged but too dark w/<wbr/>o flash after a precapture sequence,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">Any state (excluding LOCKED)</td>
-<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is START,<wbr/> sequence done</td>
-<td align="center">CONVERGED</td>
-<td align="center">Converged after a precapture sequence,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">Any state (excluding LOCKED)</td>
-<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is CANCEL,<wbr/> converged</td>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Converged but too dark w/<wbr/>o flash after a precapture sequence is canceled,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">Any state (excluding LOCKED)</td>
-<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is CANCEL,<wbr/> converged</td>
-<td align="center">CONVERGED</td>
-<td align="center">Converged after a precapture sequenceis canceled,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">CONVERGED</td>
-<td align="center">Camera device finished AE scan</td>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Converged but too dark w/<wbr/>o flash after a new scan,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">FLASH_<wbr/>REQUIRED</td>
-<td align="center">Camera device finished AE scan</td>
-<td align="center">CONVERGED</td>
-<td align="center">Converged after a new scan,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-</tbody>
-</table>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.afMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>af<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The auto-focus routine does not control the lens;
-<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> is controlled by the
-application.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>Basic automatic focus mode.<wbr/></p>
-<p>In this mode,<wbr/> the lens does not move unless
-the autofocus trigger action is called.<wbr/> When that trigger
-is activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
-the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/></p>
-<p>Always supported if lens is not fixed focus.<wbr/></p>
-<p>Use <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> to determine if lens
-is fixed-focus.<wbr/></p>
-<p>Triggering AF_<wbr/>CANCEL resets the lens position to default,<wbr/>
-and sets the AF state to INACTIVE.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MACRO</span>
- <span class="entry_type_enum_notes"><p>Close-up focusing mode.<wbr/></p>
-<p>In this mode,<wbr/> the lens does not move unless the
-autofocus trigger action is called.<wbr/> When that trigger is
-activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
-the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/> This
-mode is optimized for focusing on objects very close to
-the camera.<wbr/></p>
-<p>When that trigger is activated,<wbr/> AF will transition to
-ACTIVE_<wbr/>SCAN,<wbr/> then to the outcome of the scan (FOCUSED or
-NOT_<wbr/>FOCUSED).<wbr/> Triggering cancel AF resets the lens
-position to default,<wbr/> and sets the AF state to
-INACTIVE.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONTINUOUS_VIDEO</span>
- <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
-position continually to attempt to provide a
-constantly-in-focus image stream.<wbr/></p>
-<p>The focusing behavior should be suitable for good quality
-video recording; typically this means slower focus
-movement and no overshoots.<wbr/> When the AF trigger is not
-involved,<wbr/> the AF algorithm should start in INACTIVE state,<wbr/>
-and then transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED
-states as appropriate.<wbr/> When the AF trigger is activated,<wbr/>
-the algorithm should immediately transition into
-AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
-lens position until a cancel AF trigger is received.<wbr/></p>
-<p>Once cancel is received,<wbr/> the algorithm should transition
-back to INACTIVE and resume passive scan.<wbr/> Note that this
-behavior is not identical to CONTINUOUS_<wbr/>PICTURE,<wbr/> since an
-ongoing PASSIVE_<wbr/>SCAN must immediately be
-canceled.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONTINUOUS_PICTURE</span>
- <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
-position continually to attempt to provide a
-constantly-in-focus image stream.<wbr/></p>
-<p>The focusing behavior should be suitable for still image
-capture; typically this means focusing as fast as
-possible.<wbr/> When the AF trigger is not involved,<wbr/> the AF
-algorithm should start in INACTIVE state,<wbr/> and then
-transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED states as
-appropriate as it attempts to maintain focus.<wbr/> When the AF
-trigger is activated,<wbr/> the algorithm should finish its
-PASSIVE_<wbr/>SCAN if active,<wbr/> and then transition into
-AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
-lens position until a cancel AF trigger is received.<wbr/></p>
-<p>When the AF cancel trigger is activated,<wbr/> the algorithm
-should transition back to INACTIVE and then act as if it
-has just been started.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">EDOF</span>
- <span class="entry_type_enum_notes"><p>Extended depth of field (digital focus) mode.<wbr/></p>
-<p>The camera device will produce images with an extended
-depth of field automatically; no special focusing
-operations need to be done before taking a picture.<wbr/></p>
-<p>AF triggers are ignored,<wbr/> and the AF state will always be
-INACTIVE.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-focus (AF) is currently enabled,<wbr/> and what
-mode it is set to.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.afAvailableModes">android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> = AUTO and the lens is not fixed focus
-(i.<wbr/>e.<wbr/> <code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> > 0</code>).<wbr/> Also note that
-when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/> the behavior of AF is device
-dependent.<wbr/> It is recommended to lock AF by using <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> before
-setting <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> to OFF,<wbr/> or set AF mode to OFF when AE is OFF.<wbr/></p>
-<p>If the lens is controlled by the camera device auto-focus algorithm,<wbr/>
-the camera device will report the current AF status in <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>
-in result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When afMode is AUTO or MACRO,<wbr/> the lens must not move until an AF trigger is sent in a
-request (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> <code>==</code> START).<wbr/> After an AF trigger,<wbr/> the afState will end
-up with either FOCUSED_<wbr/>LOCKED or NOT_<wbr/>FOCUSED_<wbr/>LOCKED state (see
-<a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> which indicates that the lens is
-locked and will not move.<wbr/> If camera movement (e.<wbr/>g.<wbr/> tilting camera) causes the lens to move
-after the lens is locked,<wbr/> the HAL must compensate this movement appropriately such that
-the same focal plane remains in focus.<wbr/></p>
-<p>When afMode is one of the continuous auto focus modes,<wbr/> the HAL is free to start a AF
-scan whenever it's not locked.<wbr/> When the lens is locked after an AF trigger
-(see <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> the HAL should maintain the
-same lock behavior as above.<wbr/></p>
-<p>When afMode is OFF,<wbr/> the application controls focus manually.<wbr/> The accuracy of the
-focus distance control depends on the <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a>.<wbr/>
-However,<wbr/> the lens must not move regardless of the camera movement for any focus distance
-manual control.<wbr/></p>
-<p>To put this in concrete terms,<wbr/> if the camera has lens elements which may move based on
-camera orientation or motion (e.<wbr/>g.<wbr/> due to gravity),<wbr/> then the HAL must drive the lens to
-remain in a fixed position invariant to the camera's orientation or motion,<wbr/> for example,<wbr/>
-by using accelerometer measurements in the lens control logic.<wbr/> This is a typical issue
-that will arise on camera modules with open-loop VCMs.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.afRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>af<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x area_count
- </span>
- <span class="entry_type_visibility"> [public as meteringRectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of metering areas to use for auto-focus.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not available if <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a> is 0.<wbr/>
-Otherwise will always be present.<wbr/></p>
-<p>The maximum number of focus areas supported by the device is determined by the value
-of <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a>.<wbr/></p>
-<p>The coordinate system is based on the active pixel array,<wbr/>
-with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
-(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
-bottom-right pixel in the active pixel array.<wbr/></p>
-<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
-for every pixel in the area.<wbr/> This means that a large metering area
-with the same weight as a smaller area will have more effect in
-the metering result.<wbr/> Metering areas can partially overlap and the
-camera device will add the weights in the overlap region.<wbr/></p>
-<p>The weights are relative to weights of other metering regions,<wbr/> so if only one region
-is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0 weight is
-ignored.<wbr/></p>
-<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
-<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
-capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
-region and output only the intersection rectangle as the metering region in the result
-metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
-not reported in the result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL level representation of MeteringRectangle[] is a
-int[5 * area_<wbr/>count].<wbr/>
-Every five elements represent a metering region of
-(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
-The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
-exclusive on xmax and ymax.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.afTrigger">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>af<wbr/>Trigger
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">IDLE</span>
- <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">START</span>
- <span class="entry_type_enum_notes"><p>Autofocus will trigger now.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CANCEL</span>
- <span class="entry_type_enum_notes"><p>Autofocus will return to its initial
-state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device will trigger autofocus for this request.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is normally set to IDLE,<wbr/> or is not
-included at all in the request settings.<wbr/></p>
-<p>When included and set to START,<wbr/> the camera device will trigger the
-autofocus algorithm.<wbr/> If autofocus is disabled,<wbr/> this trigger has no effect.<wbr/></p>
-<p>When set to CANCEL,<wbr/> the camera device will cancel any active trigger,<wbr/>
-and return to its initial AF state.<wbr/></p>
-<p>Generally,<wbr/> applications should set this entry to START or CANCEL for only a
-single capture,<wbr/> and then return it to IDLE (or not set at all).<wbr/> Specifying
-START for multiple captures in a row means restarting the AF operation over
-and over again.<wbr/></p>
-<p>See <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for what the trigger means for each AF mode.<wbr/></p>
-<p>Using the autofocus trigger and the precapture trigger <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>
-simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
-the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
-focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
-trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
-changes to <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>,<wbr/> for example.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must support triggering the AF trigger while an AE precapture trigger is active
-(and vice versa),<wbr/> or at the same time as the AE trigger.<wbr/> It is acceptable for the HAL to
-treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
-AE trigger.<wbr/> Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
-to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.afState">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>af<wbr/>State
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">INACTIVE</span>
- <span class="entry_type_enum_notes"><p>AF is off or has not yet tried to scan/<wbr/>been asked
-to scan.<wbr/></p>
-<p>When a camera device is opened,<wbr/> it starts in this
-state.<wbr/> This is a transient state,<wbr/> the camera device may
-skip reporting this state in capture
-result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PASSIVE_SCAN</span>
- <span class="entry_type_enum_notes"><p>AF is currently performing an AF scan initiated the
-camera device in a continuous autofocus mode.<wbr/></p>
-<p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient
-state,<wbr/> the camera device may skip reporting this state in
-capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PASSIVE_FOCUSED</span>
- <span class="entry_type_enum_notes"><p>AF currently believes it is in focus,<wbr/> but may
-restart scanning at any time.<wbr/></p>
-<p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient
-state,<wbr/> the camera device may skip reporting this state in
-capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ACTIVE_SCAN</span>
- <span class="entry_type_enum_notes"><p>AF is performing an AF scan because it was
-triggered by AF trigger.<wbr/></p>
-<p>Only used by AUTO or MACRO AF modes.<wbr/> This is a transient
-state,<wbr/> the camera device may skip reporting this state in
-capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FOCUSED_LOCKED</span>
- <span class="entry_type_enum_notes"><p>AF believes it is focused correctly and has locked
-focus.<wbr/></p>
-<p>This state is reached only after an explicit START AF trigger has been
-sent (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>),<wbr/> when good focus has been obtained.<wbr/></p>
-<p>The lens will remain stationary until the AF mode (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) is changed or
-a new AF trigger is sent to the camera device (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NOT_FOCUSED_LOCKED</span>
- <span class="entry_type_enum_notes"><p>AF has failed to focus successfully and has locked
-focus.<wbr/></p>
-<p>This state is reached only after an explicit START AF trigger has been
-sent (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>),<wbr/> when good focus cannot be obtained.<wbr/></p>
-<p>The lens will remain stationary until the AF mode (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) is changed or
-a new AF trigger is sent to the camera device (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PASSIVE_UNFOCUSED</span>
- <span class="entry_type_enum_notes"><p>AF finished a passive scan without finding focus,<wbr/>
-and may restart scanning at any time.<wbr/></p>
-<p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient state,<wbr/> the camera
-device may skip reporting this state in capture result.<wbr/></p>
-<p>LEGACY camera devices do not support this state.<wbr/> When a passive
-scan has finished,<wbr/> it will always go to PASSIVE_<wbr/>FOCUSED.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Current state of auto-focus (AF) algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Switching between or enabling AF modes (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) always
-resets the AF state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
-or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
-the algorithm states to INACTIVE.<wbr/></p>
-<p>The camera device can do several state transitions between two results,<wbr/> if it is
-allowed by the state transition table.<wbr/> For example: INACTIVE may never actually be
-seen in a result.<wbr/></p>
-<p>The state in the result is the state for this image (in sync with this image): if
-AF state becomes FOCUSED,<wbr/> then the image data associated with this result should
-be sharp.<wbr/></p>
-<p>Below are state transition tables for different AF modes.<wbr/></p>
-<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>OFF or AF_<wbr/>MODE_<wbr/>EDOF:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center"></td>
-<td align="center">INACTIVE</td>
-<td align="center">Never changes</td>
-</tr>
-</tbody>
-</table>
-<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>AUTO or AF_<wbr/>MODE_<wbr/>MACRO:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">ACTIVE_<wbr/>SCAN</td>
-<td align="center">Start AF sweep,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">ACTIVE_<wbr/>SCAN</td>
-<td align="center">AF sweep done</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Focused,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">ACTIVE_<wbr/>SCAN</td>
-<td align="center">AF sweep done</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Not focused,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">ACTIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Cancel/<wbr/>reset AF,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Cancel/<wbr/>reset AF</td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">ACTIVE_<wbr/>SCAN</td>
-<td align="center">Start new sweep,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Cancel/<wbr/>reset AF</td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">ACTIVE_<wbr/>SCAN</td>
-<td align="center">Start new sweep,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">Any state</td>
-<td align="center">Mode change</td>
-<td align="center">INACTIVE</td>
-<td align="center"></td>
-</tr>
-</tbody>
-</table>
-<p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
-without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
-can be skipped in that manner is called a transient state.<wbr/></p>
-<p>For example,<wbr/> for these AF modes (AF_<wbr/>MODE_<wbr/>AUTO and AF_<wbr/>MODE_<wbr/>MACRO),<wbr/> in addition to the
-state transitions listed in above table,<wbr/> it is also legal for the camera device to skip
-one or more transient states between two results.<wbr/> See below table for examples:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Focus is already good or good after a scan,<wbr/> lens is now locked.<wbr/></td>
-</tr>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Focus failed after a scan,<wbr/> lens is now locked.<wbr/></td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Focus is already good or good after a scan,<wbr/> lens is now locked.<wbr/></td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Focus is good after a scan,<wbr/> lens is not locked.<wbr/></td>
-</tr>
-</tbody>
-</table>
-<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>CONTINUOUS_<wbr/>VIDEO:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device initiates new scan</td>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Start AF scan,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF state query,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Camera device completes current scan</td>
-<td align="center">PASSIVE_<wbr/>FOCUSED</td>
-<td align="center">End AF scan,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Camera device fails current scan</td>
-<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
-<td align="center">End AF scan,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Immediate transition,<wbr/> if focus is good.<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Immediate transition,<wbr/> if focus is bad.<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Reset lens position,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>FOCUSED</td>
-<td align="center">Camera device initiates new scan</td>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Start AF scan,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
-<td align="center">Camera device initiates new scan</td>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Start AF scan,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>FOCUSED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Immediate transition,<wbr/> lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Immediate transition,<wbr/> lens now locked</td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">No effect</td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Restart AF scan</td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">No effect</td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Restart AF scan</td>
-</tr>
-</tbody>
-</table>
-<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>CONTINUOUS_<wbr/>PICTURE:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device initiates new scan</td>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Start AF scan,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF state query,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Camera device completes current scan</td>
-<td align="center">PASSIVE_<wbr/>FOCUSED</td>
-<td align="center">End AF scan,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Camera device fails current scan</td>
-<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
-<td align="center">End AF scan,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Eventual transition once the focus is good.<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Eventual transition if cannot find focus.<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Reset lens position,<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>FOCUSED</td>
-<td align="center">Camera device initiates new scan</td>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Start AF scan,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
-<td align="center">Camera device initiates new scan</td>
-<td align="center">PASSIVE_<wbr/>SCAN</td>
-<td align="center">Start AF scan,<wbr/> Lens now moving</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>FOCUSED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Immediate trans.<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">Immediate trans.<wbr/> Lens now locked</td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">No effect</td>
-</tr>
-<tr>
-<td align="center">FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Restart AF scan</td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>TRIGGER</td>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">No effect</td>
-</tr>
-<tr>
-<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
-<td align="center">AF_<wbr/>CANCEL</td>
-<td align="center">INACTIVE</td>
-<td align="center">Restart AF scan</td>
-</tr>
-</tbody>
-</table>
-<p>When switch between AF_<wbr/>MODE_<wbr/>CONTINUOUS_<wbr/>* (CAF modes) and AF_<wbr/>MODE_<wbr/>AUTO/<wbr/>AF_<wbr/>MODE_<wbr/>MACRO
-(AUTO modes),<wbr/> the initial INACTIVE or PASSIVE_<wbr/>SCAN states may be skipped by the
-camera device.<wbr/> When a trigger is included in a mode switch request,<wbr/> the trigger
-will be evaluated in the context of the new mode in the request.<wbr/>
-See below table for examples:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">any state</td>
-<td align="center">CAF-->AUTO mode switch</td>
-<td align="center">INACTIVE</td>
-<td align="center">Mode switch without trigger,<wbr/> initial state must be INACTIVE</td>
-</tr>
-<tr>
-<td align="center">any state</td>
-<td align="center">CAF-->AUTO mode switch with AF_<wbr/>TRIGGER</td>
-<td align="center">trigger-reachable states from INACTIVE</td>
-<td align="center">Mode switch with trigger,<wbr/> INACTIVE is skipped</td>
-</tr>
-<tr>
-<td align="center">any state</td>
-<td align="center">AUTO-->CAF mode switch</td>
-<td align="center">passively reachable states from INACTIVE</td>
-<td align="center">Mode switch without trigger,<wbr/> passive transient state is skipped</td>
-</tr>
-</tbody>
-</table>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.afTriggerId">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>control.<wbr/>af<wbr/>Trigger<wbr/>Id
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The ID sent with the latest
-CAMERA2_<wbr/>TRIGGER_<wbr/>AUTOFOCUS call</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Must be 0 if no CAMERA2_<wbr/>TRIGGER_<wbr/>AUTOFOCUS trigger
-received yet by HAL.<wbr/> Always updated even if AF algorithm
-ignores the trigger</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.awbLock">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>Lock
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Auto-white balance lock is disabled; the AWB
-algorithm is free to update its parameters if in AUTO
-mode.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Auto-white balance lock is enabled; the AWB
-algorithm will not update its parameters while the lock
-is active.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-white balance (AWB) is currently locked to its
-latest calculated values.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to <code>true</code> (ON),<wbr/> the AWB algorithm is locked to its latest parameters,<wbr/>
-and will not change color balance settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
-<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
-get locked do not necessarily correspond to the settings that were present in the
-latest capture result received from the camera device,<wbr/> since additional captures
-and AWB updates may have occurred even before the result was sent out.<wbr/> If an
-application is switching between automatic and manual control and wishes to eliminate
-any flicker during the switch,<wbr/> the following procedure is recommended:</p>
-<ol>
-<li>Starting in auto-AWB mode:</li>
-<li>Lock AWB</li>
-<li>Wait for the first result to be output that has the AWB locked</li>
-<li>Copy AWB settings from that result into a request,<wbr/> set the request to manual AWB</li>
-<li>Submit the capture request,<wbr/> proceed to run manual AWB as desired.<wbr/></li>
-</ol>
-<p>Note that AWB lock is only meaningful when
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> is in the AUTO mode; in other modes,<wbr/>
-AWB is already fixed to a specific setting.<wbr/></p>
-<p>Some LEGACY devices may not support ON; the value is then overridden to OFF.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.awbMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled.<wbr/></p>
-<p>The application-selected color transform matrix
-(<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>) and gains
-(<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>) are used by the camera
-device for manual white balance control.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is active.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">INCANDESCENT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses incandescent light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant A.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FLUORESCENT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses fluorescent light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant F2.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">WARM_FLUORESCENT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses warm fluorescent light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant F4.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DAYLIGHT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses daylight light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>While the exact white balance transforms are up to the
-camera device,<wbr/> they will approximately match the CIE
-standard illuminant D65.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CLOUDY_DAYLIGHT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses cloudy daylight light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">TWILIGHT</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses twilight light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SHADE</span>
- <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
-the camera device uses shade light as the assumed scene
-illumination for white balance.<wbr/></p>
-<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
-and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
-For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
-values used by the camera device for the transform and gains
-will be available in the capture result for this request.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether auto-white balance (AWB) is currently setting the color
-transform fields,<wbr/> and what its illumination target
-is.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is AUTO.<wbr/></p>
-<p>When set to the ON mode,<wbr/> the camera device's auto-white balance
-routine is enabled,<wbr/> overriding the application's selected
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/> Note that when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
-is OFF,<wbr/> the behavior of AWB is device dependent.<wbr/> It is recommened to
-also set AWB mode to OFF or lock AWB by using <a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> before
-setting AE mode to OFF.<wbr/></p>
-<p>When set to the OFF mode,<wbr/> the camera device's auto-white balance
-routine is disabled.<wbr/> The application manually controls the white
-balance by <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>
-and <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/></p>
-<p>When set to any other modes,<wbr/> the camera device's auto-white
-balance routine is disabled.<wbr/> The camera device uses each
-particular illumination target for white balance
-adjustment.<wbr/> The application's values for
-<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/>
-<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
-<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> are ignored.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.awbRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>awb<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5 x area_count
- </span>
- <span class="entry_type_visibility"> [public as meteringRectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of metering areas to use for auto-white-balance illuminant
-estimation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Not available if <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a> is 0.<wbr/>
-Otherwise will always be present.<wbr/></p>
-<p>The maximum number of regions supported by the device is determined by the value
-of <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a>.<wbr/></p>
-<p>The coordinate system is based on the active pixel array,<wbr/>
-with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
-(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
-bottom-right pixel in the active pixel array.<wbr/></p>
-<p>The weight must range from 0 to 1000,<wbr/> and represents a weight
-for every pixel in the area.<wbr/> This means that a large metering area
-with the same weight as a smaller area will have more effect in
-the metering result.<wbr/> Metering areas can partially overlap and the
-camera device will add the weights in the overlap region.<wbr/></p>
-<p>The weights are relative to weights of other white balance metering regions,<wbr/> so if
-only one region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with
-0 weight is ignored.<wbr/></p>
-<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
-<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
-capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
-region and output only the intersection rectangle as the metering region in the result
-metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
-not reported in the result metadata.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL level representation of MeteringRectangle[] is a
-int[5 * area_<wbr/>count].<wbr/>
-Every five elements represent a metering region of
-(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
-The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
-exclusive on xmax and ymax.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.captureIntent">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>capture<wbr/>Intent
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CUSTOM</span>
- <span class="entry_type_enum_notes"><p>The goal of this request doesn't fall into the other
-categories.<wbr/> The camera device will default to preview-like
-behavior.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PREVIEW</span>
- <span class="entry_type_enum_notes"><p>This request is for a preview-like use case.<wbr/></p>
-<p>The precapture trigger may be used to start off a metering
-w/<wbr/>flash sequence.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">STILL_CAPTURE</span>
- <span class="entry_type_enum_notes"><p>This request is for a still capture-type
-use case.<wbr/></p>
-<p>If the flash unit is under automatic control,<wbr/> it may fire as needed.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">VIDEO_RECORD</span>
- <span class="entry_type_enum_notes"><p>This request is for a video recording
-use case.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">VIDEO_SNAPSHOT</span>
- <span class="entry_type_enum_notes"><p>This request is for a video snapshot (still
-image while recording video) use case.<wbr/></p>
-<p>The camera device should take the highest-quality image
-possible (given the other settings) without disrupting the
-frame rate of video recording.<wbr/> </p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
- <span class="entry_type_enum_notes"><p>This request is for a ZSL usecase; the
-application will stream full-resolution images and
-reprocess one or several later for a final
-capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MANUAL</span>
- <span class="entry_type_enum_notes"><p>This request is for manual capture use case where
-the applications want to directly control the capture parameters.<wbr/></p>
-<p>For example,<wbr/> the application may wish to manually control
-<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> etc.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Information to the camera device 3A (auto-exposure,<wbr/>
-auto-focus,<wbr/> auto-white balance) routines about the purpose
-of this capture,<wbr/> to help the camera device to decide optimal 3A
-strategy.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control (except for MANUAL) is only effective if
-<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF</code> and any 3A routine is active.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
-contains PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/> MANUAL will be supported if
-<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR.<wbr/> Other intent values are
-always supported.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.awbState">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>awb<wbr/>State
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">INACTIVE</span>
- <span class="entry_type_enum_notes"><p>AWB is not in auto mode,<wbr/> or has not yet started metering.<wbr/></p>
-<p>When a camera device is opened,<wbr/> it starts in this
-state.<wbr/> This is a transient state,<wbr/> the camera device may
-skip reporting this state in capture
-result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SEARCHING</span>
- <span class="entry_type_enum_notes"><p>AWB doesn't yet have a good set of control
-values for the current scene.<wbr/></p>
-<p>This is a transient state,<wbr/> the camera device
-may skip reporting this state in capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONVERGED</span>
- <span class="entry_type_enum_notes"><p>AWB has a good set of control values for the
-current scene.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">LOCKED</span>
- <span class="entry_type_enum_notes"><p>AWB has been locked.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Current state of auto-white balance (AWB) algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Switching between or enabling AWB modes (<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>) always
-resets the AWB state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
-or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
-the algorithm states to INACTIVE.<wbr/></p>
-<p>The camera device can do several state transitions between two results,<wbr/> if it is
-allowed by the state transition table.<wbr/> So INACTIVE may never actually be seen in
-a result.<wbr/></p>
-<p>The state in the result is the state for this image (in sync with this image): if
-AWB state becomes CONVERGED,<wbr/> then the image data associated with this result should
-be good to use.<wbr/></p>
-<p>Below are state transition tables for different AWB modes.<wbr/></p>
-<p>When <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != AWB_<wbr/>MODE_<wbr/>AUTO</code>:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center"></td>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device auto white balance algorithm is disabled</td>
-</tr>
-</tbody>
-</table>
-<p>When <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> is AWB_<wbr/>MODE_<wbr/>AUTO:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device initiates AWB scan</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values changing</td>
-</tr>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">SEARCHING</td>
-<td align="center">Camera device finishes AWB scan</td>
-<td align="center">CONVERGED</td>
-<td align="center">Good values,<wbr/> not changing</td>
-</tr>
-<tr>
-<td align="center">SEARCHING</td>
-<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">CONVERGED</td>
-<td align="center">Camera device initiates AWB scan</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values changing</td>
-</tr>
-<tr>
-<td align="center">CONVERGED</td>
-<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is ON</td>
-<td align="center">LOCKED</td>
-<td align="center">Values locked</td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is OFF</td>
-<td align="center">SEARCHING</td>
-<td align="center">Values not good after unlock</td>
-</tr>
-</tbody>
-</table>
-<p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
-without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
-can be skipped in that manner is called a transient state.<wbr/></p>
-<p>For example,<wbr/> for this AWB mode (AWB_<wbr/>MODE_<wbr/>AUTO),<wbr/> in addition to the state transitions
-listed in above table,<wbr/> it is also legal for the camera device to skip one or more
-transient states between two results.<wbr/> See below table for examples:</p>
-<table>
-<thead>
-<tr>
-<th align="center">State</th>
-<th align="center">Transition Cause</th>
-<th align="center">New State</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">INACTIVE</td>
-<td align="center">Camera device finished AWB scan</td>
-<td align="center">CONVERGED</td>
-<td align="center">Values are already good,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-<tr>
-<td align="center">LOCKED</td>
-<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is OFF</td>
-<td align="center">CONVERGED</td>
-<td align="center">Values good after unlock,<wbr/> transient states are skipped by camera device.<wbr/></td>
-</tr>
-</tbody>
-</table>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.effectMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>effect<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No color effect will be applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MONO</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "monocolor" effect where the image is mapped into
-a single color.<wbr/></p>
-<p>This will typically be grayscale.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NEGATIVE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "photo-negative" effect where the image's colors
-are inverted.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SOLARIZE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "solarisation" effect (Sabattier effect) where the
-image is wholly or partially reversed in
-tone.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SEPIA</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "sepia" effect where the image is mapped into warm
-gray,<wbr/> red,<wbr/> and brown tones.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">POSTERIZE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "posterization" effect where the image uses
-discrete regions of tone rather than a continuous
-gradient of tones.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">WHITEBOARD</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "whiteboard" effect where the image is typically displayed
-as regions of white,<wbr/> with black or grey details.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BLACKBOARD</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>A "blackboard" effect where the image is typically displayed
-as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AQUA</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>An "aqua" effect where a blue hue is added to the image.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A special color effect to apply.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.availableEffects">android.<wbr/>control.<wbr/>available<wbr/>Effects</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When this mode is set,<wbr/> a color effect will be applied
-to images produced by the camera device.<wbr/> The interpretation
-and implementation of these color effects is left to the
-implementor of the camera device,<wbr/> and should not be
-depended on to be consistent (or present) across all
-devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Full application control of pipeline.<wbr/></p>
-<p>All control by the device's metering and focusing (3A)
-routines is disabled,<wbr/> and no other settings in
-android.<wbr/>control.<wbr/>* have any effect,<wbr/> except that
-<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> may be used by the camera
-device to select post-processing values for processing
-blocks that do not allow for manual control,<wbr/> or are not
-exposed by the camera API.<wbr/></p>
-<p>However,<wbr/> the camera device's 3A routines may continue to
-collect statistics and update their internal state so that
-when control is switched to AUTO mode,<wbr/> good control values
-can be immediately applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">AUTO</span>
- <span class="entry_type_enum_notes"><p>Use settings for each individual 3A routine.<wbr/></p>
-<p>Manual control of capture parameters is disabled.<wbr/> All
-controls in android.<wbr/>control.<wbr/>* besides sceneMode take
-effect.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">USE_SCENE_MODE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Use a specific scene mode.<wbr/></p>
-<p>Enabling this disables control.<wbr/>aeMode,<wbr/> control.<wbr/>awbMode and
-control.<wbr/>afMode controls; the camera device will ignore
-those settings while USE_<wbr/>SCENE_<wbr/>MODE is active (except for
-FACE_<wbr/>PRIORITY scene mode).<wbr/> Other control entries are still active.<wbr/>
-This setting can only be used if scene mode is supported (i.<wbr/>e.<wbr/>
-<a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>
-contain some modes other than DISABLED).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">OFF_KEEP_STATE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Same as OFF mode,<wbr/> except that this capture will not be
-used by camera device background auto-exposure,<wbr/> auto-white balance and
-auto-focus algorithms (3A) to update their statistics.<wbr/></p>
-<p>Specifically,<wbr/> the 3A routines are locked to the last
-values set from a request with AUTO,<wbr/> OFF,<wbr/> or
-USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> and any statistics or state updates
-collected from manual captures with OFF_<wbr/>KEEP_<wbr/>STATE will be
-discarded by the camera device.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Overall mode of 3A (auto-exposure,<wbr/> auto-white-balance,<wbr/> auto-focus) control
-routines.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.availableModes">android.<wbr/>control.<wbr/>available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is a top-level 3A control switch.<wbr/> When set to OFF,<wbr/> all 3A control
-by the camera device is disabled.<wbr/> The application must set the fields for
-capture parameters itself.<wbr/></p>
-<p>When set to AUTO,<wbr/> the individual algorithm controls in
-android.<wbr/>control.<wbr/>* are in effect,<wbr/> such as <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>.<wbr/></p>
-<p>When set to USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> the individual controls in
-android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device implements
-one of the scene mode settings (such as ACTION,<wbr/> SUNSET,<wbr/> or PARTY)
-as it wishes.<wbr/> The camera device scene mode 3A settings are provided by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
-<p>When set to OFF_<wbr/>KEEP_<wbr/>STATE,<wbr/> it is similar to OFF mode,<wbr/> the only difference
-is that this frame will not be used by camera device background 3A statistics
-update,<wbr/> as if this frame is never captured.<wbr/> This mode can be used in the scenario
-where the application doesn't want a 3A manual control capture to affect
-the subsequent auto 3A capture results.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.sceneMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>control.<wbr/>scene<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">DISABLED</span>
- <span class="entry_type_enum_value">0</span>
- <span class="entry_type_enum_notes"><p>Indicates that no scene modes are set for a given capture request.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FACE_PRIORITY</span>
- <span class="entry_type_enum_notes"><p>If face detection support exists,<wbr/> use face
-detection data for auto-focus,<wbr/> auto-white balance,<wbr/> and
-auto-exposure routines.<wbr/></p>
-<p>If face detection statistics are disabled
-(i.<wbr/>e.<wbr/> <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> is set to OFF),<wbr/>
-this should still operate correctly (but will not return
-face detection statistics to the framework).<wbr/></p>
-<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
-remain active when FACE_<wbr/>PRIORITY is set.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ACTION</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving objects.<wbr/></p>
-<p>Similar to SPORTS.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PORTRAIT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for still photos of people.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">LANDSCAPE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for photos of distant macroscopic objects.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NIGHT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for low-light settings.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">NIGHT_PORTRAIT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for still photos of people in low-light
-settings.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">THEATRE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings where flash must
-remain off.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BEACH</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor beach settings.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SNOW</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor settings containing snow.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SUNSET</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for scenes of the setting sun.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">STEADYPHOTO</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized to avoid blurry photos due to small amounts of
-device motion (for example: due to hand shake).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FIREWORKS</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for nighttime photos of fireworks.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SPORTS</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving people.<wbr/></p>
-<p>Similar to ACTION.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PARTY</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings with multiple moving
-people.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CANDLELIGHT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for dim settings where the main light source
-is a flame.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BARCODE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optimized for accurately capturing a photo of barcode
-for use by camera applications that wish to read the
-barcode value.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_SPEED_VIDEO</span>
- <span class="entry_type_enum_deprecated">[deprecated]</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>This is deprecated,<wbr/> please use <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
-and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>
-for high speed video recording.<wbr/></p>
-<p>Optimized for high speed video recording (frame rate >=60fps) use case.<wbr/></p>
-<p>The supported high speed video sizes and fps ranges are specified in
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> To get desired
-output frame rates,<wbr/> the application is only allowed to select video size
-and fps range combinations listed in this static metadata.<wbr/> The fps range
-can be control via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
-<p>In this mode,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
-ON,<wbr/> ON,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
-controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
-and post-processing parameters is possible.<wbr/> All other controls operate the
-same as when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == AUTO.<wbr/> This means that all other
-android.<wbr/>control.<wbr/>* fields continue to work,<wbr/> such as</p>
-<ul>
-<li><a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a></li>
-<li><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a></li>
-<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
-<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
-<li><a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></li>
-<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a></li>
-<li><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a></li>
-</ul>
-<p>Outside of android.<wbr/>control.<wbr/>*,<wbr/> the following controls will work:</p>
-<ul>
-<li><a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> (automatic flash for still capture will not work since aeMode is ON)</li>
-<li><a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> (if it is supported)</li>
-<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
-<li><a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></li>
-</ul>
-<p>For high speed recording use case,<wbr/> the actual maximum supported frame rate may
-be lower than what camera can output,<wbr/> depending on the destination Surfaces for
-the image data.<wbr/> For example,<wbr/> if the destination surface is from video encoder,<wbr/>
-the application need check if the video encoder is capable of supporting the
-high frame rate for a given video size,<wbr/> or it will end up with lower recording
-frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the preview frame
-rate will be bounded by the screen refresh rate.<wbr/></p>
-<p>The camera device will only support up to 2 output high speed streams
-(processed non-stalling format defined in <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>)
-in this mode.<wbr/> This control will be effective only if all of below conditions are true:</p>
-<ul>
-<li>The application created no more than maxNumHighSpeedStreams processed non-stalling
-format output streams,<wbr/> where maxNumHighSpeedStreams is calculated as
-min(2,<wbr/> <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>[Processed (but not-stalling)]).<wbr/></li>
-<li>The stream sizes are selected from the sizes reported by
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/></li>
-<li>No processed non-stalling or raw streams are configured.<wbr/></li>
-</ul>
-<p>When above conditions are NOT satistied,<wbr/> the controls of this mode and
-<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> will be ignored by the camera device,<wbr/>
-the camera device will fall back to <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> <code>==</code> AUTO,<wbr/>
-and the returned capture result metadata will give the fps range choosen
-by the camera device.<wbr/></p>
-<p>Switching into or out of this mode may trigger some camera ISP/<wbr/>sensor
-reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommended that
-the application avoids unnecessary scene mode switch as much as possible.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HDR</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Turn on a device-specific high dynamic range (HDR) mode.<wbr/></p>
-<p>In this scene mode,<wbr/> the camera device captures images
-that keep a larger range of scene illumination levels
-visible in the final image.<wbr/> For example,<wbr/> when taking a
-picture of a object in front of a bright window,<wbr/> both
-the object and the scene through the window may be
-visible when using HDR mode,<wbr/> while in normal AUTO mode,<wbr/>
-one or the other may be poorly exposed.<wbr/> As a tradeoff,<wbr/>
-HDR mode generally takes much longer to capture a single
-image,<wbr/> has no user control,<wbr/> and may have other artifacts
-depending on the HDR method used.<wbr/></p>
-<p>Therefore,<wbr/> HDR captures operate at a much slower rate
-than regular captures.<wbr/></p>
-<p>In this mode,<wbr/> on LIMITED or FULL devices,<wbr/> when a request
-is made with a <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> of
-STILL_<wbr/>CAPTURE,<wbr/> the camera device will capture an image
-using a high dynamic range capture technique.<wbr/> On LEGACY
-devices,<wbr/> captures that target a JPEG-format output will
-be captured with HDR,<wbr/> and the capture intent is not
-relevant.<wbr/></p>
-<p>The HDR capture may involve the device capturing a burst
-of images internally and combining them into one,<wbr/> or it
-may involve the device using specialized high dynamic
-range capture hardware.<wbr/> In all cases,<wbr/> a single image is
-produced in response to a capture request submitted
-while in HDR mode.<wbr/></p>
-<p>Since substantial post-processing is generally needed to
-produce an HDR image,<wbr/> only YUV,<wbr/> PRIVATE,<wbr/> and JPEG
-outputs are supported for LIMITED/<wbr/>FULL device HDR
-captures,<wbr/> and only JPEG outputs are supported for LEGACY
-HDR captures.<wbr/> Using a RAW output for HDR capture is not
-supported.<wbr/></p>
-<p>Some devices may also support always-on HDR,<wbr/> which
-applies HDR processing at full frame rate.<wbr/> For these
-devices,<wbr/> intents other than STILL_<wbr/>CAPTURE will also
-produce an HDR output with no frame rate impact compared
-to normal operation,<wbr/> though the quality may be lower
-than for STILL_<wbr/>CAPTURE intents.<wbr/></p>
-<p>If SCENE_<wbr/>MODE_<wbr/>HDR is used with unsupported output types
-or capture intents,<wbr/> the images captured will be as if
-the SCENE_<wbr/>MODE was not enabled at all.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_hidden">[hidden]</span>
- <span class="entry_type_enum_notes"><p>Same as FACE_<wbr/>PRIORITY scene mode,<wbr/> except that the camera
-device will choose higher sensitivity values (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
-under low light conditions.<wbr/></p>
-<p>The camera device may be tuned to expose the images in a reduced
-sensitivity range to produce the best quality images.<wbr/> For example,<wbr/>
-if the <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> gives range of [100,<wbr/> 1600],<wbr/>
-the camera device auto-exposure routine tuning process may limit the actual
-exposure sensitivity range to [100,<wbr/> 1200] to ensure that the noise level isn't
-exessive in order to preserve the image quality.<wbr/> Under this situation,<wbr/> the image under
-low light may be under-exposed when the sensor max exposure time (bounded by the
-<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of the
-ON_<wbr/>* modes) and effective max sensitivity are reached.<wbr/> This scene mode allows the
-camera device auto-exposure routine to increase the sensitivity up to the max
-sensitivity specified by <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> when the scene is too
-dark and the max exposure time is reached.<wbr/> The captured images may be noisier
-compared with the images captured in normal FACE_<wbr/>PRIORITY mode; therefore,<wbr/> it is
-recommended that the application only use this scene mode when it is capable of
-reducing the noise level of the captured images.<wbr/></p>
-<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
-remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DEVICE_CUSTOM_START</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_hidden">[hidden]</span>
- <span class="entry_type_enum_value">100</span>
- <span class="entry_type_enum_notes"><p>Scene mode values within the range of
-<code>[DEVICE_<wbr/>CUSTOM_<wbr/>START,<wbr/> DEVICE_<wbr/>CUSTOM_<wbr/>END]</code> are reserved for device specific
-customized scene modes.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DEVICE_CUSTOM_END</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_hidden">[hidden]</span>
- <span class="entry_type_enum_value">127</span>
- <span class="entry_type_enum_notes"><p>Scene mode values within the range of
-<code>[DEVICE_<wbr/>CUSTOM_<wbr/>START,<wbr/> DEVICE_<wbr/>CUSTOM_<wbr/>END]</code> are reserved for device specific
-customized scene modes.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Control for which scene mode is currently active.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Scene modes are custom camera modes optimized for a certain set of conditions and
-capture settings.<wbr/></p>
-<p>This is the mode that that is active when
-<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code>.<wbr/> Aside from FACE_<wbr/>PRIORITY,<wbr/> these modes will
-disable <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
-while in use.<wbr/></p>
-<p>The interpretation and implementation of these scene modes is left
-to the implementor of the camera device.<wbr/> Their behavior will not be
-consistent across all devices,<wbr/> and any given device may only implement
-a subset of these modes.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL implementations that include scene modes are expected to provide
-the per-scene settings to use for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> in
-<a href="#static_android.control.sceneModeOverrides">android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides</a>.<wbr/></p>
-<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/>
-the HAL must list supported video size and fps range in
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>
-1280x720,<wbr/> if the HAL has two different sensor configurations for normal streaming
-mode and high speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture
-requests,<wbr/> the HAL may have to switch between different sensor modes.<wbr/>
-This mode is deprecated in HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a> and CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO
-capbility defined in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.videoStabilizationMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Video stabilization is disabled.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Video stabilization is enabled.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether video stabilization is
-active.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Video stabilization automatically warps images from
-the camera in order to stabilize motion between consecutive frames.<wbr/></p>
-<p>If enabled,<wbr/> video stabilization can modify the
-<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> to keep the video stream stabilized.<wbr/></p>
-<p>Switching between different video stabilization modes may take several
-frames to initialize,<wbr/> the camera device will report the current mode
-in capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/>
-the video stabilization modes in the first several capture results may
-still be "OFF",<wbr/> and it will become "ON" when the initialization is
-done.<wbr/></p>
-<p>In addition,<wbr/> not all recording sizes or frame rates may be supported for
-stabilization by a device that reports stabilization support.<wbr/> It is guaranteed
-that an output targeting a MediaRecorder or MediaCodec will be stabilized if
-the recording resolution is less than or equal to 1920 x 1080 (width less than
-or equal to 1920,<wbr/> height less than or equal to 1080),<wbr/> and the recording
-frame rate is less than or equal to 30fps.<wbr/> At other sizes,<wbr/> the CaptureResult
-<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a> field will return
-OFF if the recording output is not stabilized,<wbr/> or if there are no output
-Surface types that can be stabilized.<wbr/></p>
-<p>If a camera device supports both this mode and OIS
-(<a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may
-produce undesirable interaction,<wbr/> so it is recommended not to enable
-both at the same time.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.control.postRawSensitivityBoost">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The amount of additional sensitivity boost applied to output images
-after RAW sensor data is captured.<wbr/></p>
- </td>
-
- <td class="entry_units">
- ISO arithmetic units,<wbr/> the same as android.<wbr/>sensor.<wbr/>sensitivity
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.control.postRawSensitivityBoostRange">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Some camera devices support additional digital sensitivity boosting in the
-camera processing pipeline after sensor RAW image is captured.<wbr/>
-Such a boost will be applied to YUV/<wbr/>JPEG format output images but will not
-have effect on RAW output formats like RAW_<wbr/>SENSOR,<wbr/> RAW10,<wbr/> RAW12 or RAW_<wbr/>OPAQUE.<wbr/></p>
-<p>This key will be <code>null</code> for devices that do not support any RAW format
-outputs.<wbr/> For devices that do support RAW format outputs,<wbr/> this key will always
-present,<wbr/> and if a device does not support post RAW sensitivity boost,<wbr/> it will
-list <code>100</code> in this key.<wbr/></p>
-<p>If the camera device cannot apply the exact boost requested,<wbr/> it will reduce the
-boost to the nearest supported value.<wbr/>
-The final boost value used will be available in the output capture result.<wbr/></p>
-<p>For devices that support post RAW sensitivity boost,<wbr/> the YUV/<wbr/>JPEG output images
-of such device will have the total sensitivity of
-<code><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> * <a href="#controls_android.control.postRawSensitivityBoost">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost</a> /<wbr/> 100</code>
-The sensitivity of RAW format images will always be <code><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></code></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_demosaic" class="section">demosaic</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.demosaic.mode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>demosaic.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Minimal or no slowdown of frame rate compared to
-Bayer RAW output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Improved processing quality but the frame rate might be slowed down
-relative to raw output.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Controls the quality of the demosaicing
-processing.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_edge" class="section">edge</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.edge.mode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>edge.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No edge enhancement is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Apply edge enhancement at a quality level that does not slow down frame rate
-relative to sensor output.<wbr/> It may be the same as OFF if edge enhancement will
-slow down frame rate relative to sensor.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Apply high-quality edge enhancement,<wbr/> at a cost of possibly reduced output frame rate.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have
-edge enhancement applied,<wbr/> while higher-resolution streams have no edge enhancement
-applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned so that
-frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST (since it
-is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
-<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
-with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
-high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
-produced via reprocessing when a user takes a picture.<wbr/> For such a use case,<wbr/> the
-high-resolution buffers must not have edge enhancement applied to maximize efficiency of
-preview and to avoid double-applying enhancement when reprocessed,<wbr/> while low-resolution
-buffers (used for recording or preview,<wbr/> generally) need edge enhancement applied for
-reasonable preview quality.<wbr/></p>
-<p>This mode is guaranteed to be supported by devices that support either the
-YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
-(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
-be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operation mode for edge
-enhancement.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Edge enhancement improves sharpness and details in the captured image.<wbr/> OFF means
-no enhancement will be applied by the camera device.<wbr/></p>
-<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined enhancement
-will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the
-camera device will use the highest-quality enhancement algorithms,<wbr/>
-even if it slows down capture rate.<wbr/> FAST means the camera device will
-not slow down capture rate when applying edge enhancement.<wbr/> FAST may be the same as OFF if
-edge enhancement will slow down capture rate.<wbr/> Every output stream will have a similar
-amount of enhancement applied.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
-buffer of high-resolution images during preview and reprocess image(s) from that buffer
-into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
-edge enhancement to low-resolution streams (below maximum recording resolution) to
-maximize preview quality,<wbr/> but does not apply edge enhancement to high-resolution streams,<wbr/>
-since those will be reprocessed later if necessary.<wbr/></p>
-<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera
-device will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV-domain edge enhancement,<wbr/> respectively.<wbr/>
-The camera device may adjust its internal edge enhancement parameters for best
-image quality based on the <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a>,<wbr/> if it is set.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
-adjust the internal edge enhancement reduction parameters appropriately to get the best
-quality images.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.edge.strength">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>edge.<wbr/>strength
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Control the amount of edge enhancement
-applied to the images</p>
- </td>
-
- <td class="entry_units">
- 1-10; 10 is maximum sharpening
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.edge.availableEdgeModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of edge enhancement modes for <a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Full-capability camera devices must always support OFF; camera devices that support
-YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING will list ZERO_<wbr/>SHUTTER_<wbr/>LAG; all devices will
-list FAST.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if edge enhancement control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.edge.mode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>edge.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No edge enhancement is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Apply edge enhancement at a quality level that does not slow down frame rate
-relative to sensor output.<wbr/> It may be the same as OFF if edge enhancement will
-slow down frame rate relative to sensor.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Apply high-quality edge enhancement,<wbr/> at a cost of possibly reduced output frame rate.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have
-edge enhancement applied,<wbr/> while higher-resolution streams have no edge enhancement
-applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned so that
-frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST (since it
-is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
-<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
-with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
-high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
-produced via reprocessing when a user takes a picture.<wbr/> For such a use case,<wbr/> the
-high-resolution buffers must not have edge enhancement applied to maximize efficiency of
-preview and to avoid double-applying enhancement when reprocessed,<wbr/> while low-resolution
-buffers (used for recording or preview,<wbr/> generally) need edge enhancement applied for
-reasonable preview quality.<wbr/></p>
-<p>This mode is guaranteed to be supported by devices that support either the
-YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
-(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
-be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operation mode for edge
-enhancement.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Edge enhancement improves sharpness and details in the captured image.<wbr/> OFF means
-no enhancement will be applied by the camera device.<wbr/></p>
-<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined enhancement
-will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the
-camera device will use the highest-quality enhancement algorithms,<wbr/>
-even if it slows down capture rate.<wbr/> FAST means the camera device will
-not slow down capture rate when applying edge enhancement.<wbr/> FAST may be the same as OFF if
-edge enhancement will slow down capture rate.<wbr/> Every output stream will have a similar
-amount of enhancement applied.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
-buffer of high-resolution images during preview and reprocess image(s) from that buffer
-into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
-edge enhancement to low-resolution streams (below maximum recording resolution) to
-maximize preview quality,<wbr/> but does not apply edge enhancement to high-resolution streams,<wbr/>
-since those will be reprocessed later if necessary.<wbr/></p>
-<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera
-device will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV-domain edge enhancement,<wbr/> respectively.<wbr/>
-The camera device may adjust its internal edge enhancement parameters for best
-image quality based on the <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a>,<wbr/> if it is set.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
-adjust the internal edge enhancement reduction parameters appropriately to get the best
-quality images.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_flash" class="section">flash</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.flash.firingPower">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>firing<wbr/>Power
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Power for flash firing/<wbr/>torch</p>
- </td>
-
- <td class="entry_units">
- 10 is max power; 0 is no flash.<wbr/> Linear
- </td>
-
- <td class="entry_range">
- <p>0 - 10</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Power for snapshot may use a different scale than
-for torch mode.<wbr/> Only one entry for torch mode will be
-used</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.flash.firingTime">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>firing<wbr/>Time
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Firing time of flash relative to start of
-exposure</p>
- </td>
-
- <td class="entry_units">
- nanoseconds
- </td>
-
- <td class="entry_range">
- <p>0-(exposure time-flash duration)</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Clamped to (0,<wbr/> exposure time - flash
-duration).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.flash.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Do not fire the flash for this capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SINGLE</span>
- <span class="entry_type_enum_notes"><p>If the flash is available and charged,<wbr/> fire flash
-for this capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">TORCH</span>
- <span class="entry_type_enum_notes"><p>Transition flash to continuously on.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired mode for for the camera device's flash control.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control is only effective when flash unit is available
-(<code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == true</code>).<wbr/></p>
-<p>When this control is used,<wbr/> the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> must be set to ON or OFF.<wbr/>
-Otherwise,<wbr/> the camera device auto-exposure related flash control (ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/>
-ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> or ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE) will override this control.<wbr/></p>
-<p>When set to OFF,<wbr/> the camera device will not fire flash for this capture.<wbr/></p>
-<p>When set to SINGLE,<wbr/> the camera device will fire flash regardless of the camera
-device's auto-exposure routine's result.<wbr/> When used in still capture case,<wbr/> this
-control should be used along with auto-exposure (AE) precapture metering sequence
-(<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>),<wbr/> otherwise,<wbr/> the image may be incorrectly exposed.<wbr/></p>
-<p>When set to TORCH,<wbr/> the flash will be on continuously.<wbr/> This mode can be used
-for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr/> or video recording.<wbr/></p>
-<p>The flash status will be reported by <a href="#dynamic_android.flash.state">android.<wbr/>flash.<wbr/>state</a> in the capture result metadata.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.flash.info.available">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>info.<wbr/>available
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FALSE</span>
- </li>
- <li>
- <span class="entry_type_enum_name">TRUE</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether this camera device has a
-flash unit.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Will be <code>false</code> if no flash is available.<wbr/></p>
-<p>If there is no flash unit,<wbr/> none of the flash controls do
-anything.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.flash.info.chargeDuration">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>info.<wbr/>charge<wbr/>Duration
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Time taken before flash can fire
-again</p>
- </td>
-
- <td class="entry_units">
- nanoseconds
- </td>
-
- <td class="entry_range">
- <p>0-1e9</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>1 second too long/<wbr/>too short for recharge? Should
-this be power-dependent?</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
-
-
- <tr class="entry" id="static_android.flash.colorTemperature">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>flash.<wbr/>color<wbr/>Temperature
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The x,<wbr/>y whitepoint of the
-flash</p>
- </td>
-
- <td class="entry_units">
- pair of floats
- </td>
-
- <td class="entry_range">
- <p>0-1 for both</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.flash.maxEnergy">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>flash.<wbr/>max<wbr/>Energy
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Max energy output of the flash for a full
-power single flash</p>
- </td>
-
- <td class="entry_units">
- lumen-seconds
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.flash.firingPower">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>firing<wbr/>Power
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Power for flash firing/<wbr/>torch</p>
- </td>
-
- <td class="entry_units">
- 10 is max power; 0 is no flash.<wbr/> Linear
- </td>
-
- <td class="entry_range">
- <p>0 - 10</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Power for snapshot may use a different scale than
-for torch mode.<wbr/> Only one entry for torch mode will be
-used</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.flash.firingTime">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>firing<wbr/>Time
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Firing time of flash relative to start of
-exposure</p>
- </td>
-
- <td class="entry_units">
- nanoseconds
- </td>
-
- <td class="entry_range">
- <p>0-(exposure time-flash duration)</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Clamped to (0,<wbr/> exposure time - flash
-duration).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.flash.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Do not fire the flash for this capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SINGLE</span>
- <span class="entry_type_enum_notes"><p>If the flash is available and charged,<wbr/> fire flash
-for this capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">TORCH</span>
- <span class="entry_type_enum_notes"><p>Transition flash to continuously on.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired mode for for the camera device's flash control.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control is only effective when flash unit is available
-(<code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == true</code>).<wbr/></p>
-<p>When this control is used,<wbr/> the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> must be set to ON or OFF.<wbr/>
-Otherwise,<wbr/> the camera device auto-exposure related flash control (ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/>
-ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> or ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE) will override this control.<wbr/></p>
-<p>When set to OFF,<wbr/> the camera device will not fire flash for this capture.<wbr/></p>
-<p>When set to SINGLE,<wbr/> the camera device will fire flash regardless of the camera
-device's auto-exposure routine's result.<wbr/> When used in still capture case,<wbr/> this
-control should be used along with auto-exposure (AE) precapture metering sequence
-(<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>),<wbr/> otherwise,<wbr/> the image may be incorrectly exposed.<wbr/></p>
-<p>When set to TORCH,<wbr/> the flash will be on continuously.<wbr/> This mode can be used
-for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr/> or video recording.<wbr/></p>
-<p>The flash status will be reported by <a href="#dynamic_android.flash.state">android.<wbr/>flash.<wbr/>state</a> in the capture result metadata.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.flash.state">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>flash.<wbr/>state
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">UNAVAILABLE</span>
- <span class="entry_type_enum_notes"><p>No flash on camera.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CHARGING</span>
- <span class="entry_type_enum_notes"><p>Flash is charging and cannot be fired.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">READY</span>
- <span class="entry_type_enum_notes"><p>Flash is ready to fire.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FIRED</span>
- <span class="entry_type_enum_notes"><p>Flash fired for this capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PARTIAL</span>
- <span class="entry_type_enum_notes"><p>Flash partially illuminated this frame.<wbr/></p>
-<p>This is usually due to the next or previous frame having
-the flash fire,<wbr/> and the flash spilling into this capture
-due to hardware limitations.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Current state of the flash
-unit.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When the camera device doesn't have flash unit
-(i.<wbr/>e.<wbr/> <code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == false</code>),<wbr/> this state will always be UNAVAILABLE.<wbr/>
-Other states indicate the current flash status.<wbr/></p>
-<p>In certain conditions,<wbr/> this will be available on LEGACY devices:</p>
-<ul>
-<li>Flash-less cameras always return UNAVAILABLE.<wbr/></li>
-<li>Using <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>==</code> ON_<wbr/>ALWAYS_<wbr/>FLASH
- will always return FIRED.<wbr/></li>
-<li>Using <a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> <code>==</code> TORCH
- will always return FIRED.<wbr/></li>
-</ul>
-<p>In all other conditions the state will not be available on
-LEGACY devices (i.<wbr/>e.<wbr/> it will be <code>null</code>).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_hotPixel" class="section">hotPixel</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.hotPixel.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>hot<wbr/>Pixel.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No hot pixel correction is applied.<wbr/></p>
-<p>The frame rate must not be reduced relative to sensor raw output
-for this option.<wbr/></p>
-<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Hot pixel correction is applied,<wbr/> without reducing frame
-rate relative to sensor raw output.<wbr/></p>
-<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>High-quality hot pixel correction is applied,<wbr/> at a cost
-of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
-<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operational mode for hot pixel correction.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.hotPixel.availableHotPixelModes">android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
-that do not accurately measure the incoming light (i.<wbr/>e.<wbr/> pixels that
-are stuck at an arbitrary value or are oversensitive).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.hotPixel.availableHotPixelModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of hot pixel correction modes for <a href="#controls_android.hotPixel.mode">android.<wbr/>hot<wbr/>Pixel.<wbr/>mode</a> that are supported by this
-camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.hotPixel.mode">android.<wbr/>hot<wbr/>Pixel.<wbr/>mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>FULL mode camera devices will always support FAST.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>To avoid performance issues,<wbr/> there will be significantly fewer hot
-pixels than actual pixels on the camera sensor.<wbr/>
-HAL must support both FAST and HIGH_<wbr/>QUALITY if hot pixel correction control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.hotPixel.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>hot<wbr/>Pixel.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No hot pixel correction is applied.<wbr/></p>
-<p>The frame rate must not be reduced relative to sensor raw output
-for this option.<wbr/></p>
-<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Hot pixel correction is applied,<wbr/> without reducing frame
-rate relative to sensor raw output.<wbr/></p>
-<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>High-quality hot pixel correction is applied,<wbr/> at a cost
-of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
-<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operational mode for hot pixel correction.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.hotPixel.availableHotPixelModes">android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
-that do not accurately measure the incoming light (i.<wbr/>e.<wbr/> pixels that
-are stuck at an arbitrary value or are oversensitive).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_jpeg" class="section">jpeg</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.jpeg.gpsLocation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Location
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [java_public as location]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A location object to use when generating image GPS metadata.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Setting a location object in a request will include the GPS coordinates of the location
-into any JPEG images captured based on the request.<wbr/> These coordinates can then be
-viewed by anyone who receives the JPEG image.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.gpsCoordinates">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Coordinates
- </td>
- <td class="entry_type">
- <span class="entry_type_name">double</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">latitude,<wbr/> longitude,<wbr/> altitude.<wbr/> First two in degrees,<wbr/> the third in meters</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>GPS coordinates to include in output JPEG
-EXIF.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>(-180 - 180],<wbr/> [-90,<wbr/>90],<wbr/> [-inf,<wbr/> inf]</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.gpsProcessingMethod">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Processing<wbr/>Method
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [ndk_public as string]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>32 characters describing GPS algorithm to
-include in EXIF.<wbr/></p>
- </td>
-
- <td class="entry_units">
- UTF-8 null-terminated string
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.gpsTimestamp">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Timestamp
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Time GPS fix was made to include in
-EXIF.<wbr/></p>
- </td>
-
- <td class="entry_units">
- UTC in seconds since January 1,<wbr/> 1970
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.orientation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>orientation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The orientation for a JPEG image.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Degrees in multiples of 90
- </td>
-
- <td class="entry_range">
- <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The clockwise rotation angle in degrees,<wbr/> relative to the orientation
-to the camera,<wbr/> that the JPEG picture needs to be rotated by,<wbr/> to be viewed
-upright.<wbr/></p>
-<p>Camera devices may either encode this value into the JPEG EXIF header,<wbr/> or
-rotate the image data to match this orientation.<wbr/> When the image data is rotated,<wbr/>
-the thumbnail data will also be rotated.<wbr/></p>
-<p>Note that this orientation is relative to the orientation of the camera sensor,<wbr/> given
-by <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a>.<wbr/></p>
-<p>To translate from the device orientation given by the Android sensor APIs,<wbr/> the following
-sample code may be used:</p>
-<pre><code>private int getJpegOrientation(CameraCharacteristics c,<wbr/> int deviceOrientation) {
- if (deviceOrientation == android.<wbr/>view.<wbr/>Orientation<wbr/>Event<wbr/>Listener.<wbr/>ORIENTATION_<wbr/>UNKNOWN) return 0;
- int sensorOrientation = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>SENSOR_<wbr/>ORIENTATION);
-
- //<wbr/> Round device orientation to a multiple of 90
- deviceOrientation = (deviceOrientation + 45) /<wbr/> 90 * 90;
-
- //<wbr/> Reverse device orientation for front-facing cameras
- boolean facingFront = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING) == Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING_<wbr/>FRONT;
- if (facingFront) deviceOrientation = -deviceOrientation;
-
- //<wbr/> Calculate desired JPEG orientation relative to camera orientation to make
- //<wbr/> the image upright relative to the device orientation
- int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
-
- return jpegOrientation;
-}
-</code></pre>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.quality">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>quality
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Compression quality of the final JPEG
-image.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>1-100; larger is higher quality</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>85-95 is typical usage range.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.thumbnailQuality">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Quality
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Compression quality of JPEG
-thumbnail.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>1-100; larger is higher quality</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.jpeg.thumbnailSize">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as size]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Resolution of embedded JPEG thumbnail.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.jpeg.availableThumbnailSizes">android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to (0,<wbr/> 0) value,<wbr/> the JPEG EXIF will not contain thumbnail,<wbr/>
-but the captured JPEG will still be a valid image.<wbr/></p>
-<p>For best results,<wbr/> when issuing a request for a JPEG image,<wbr/> the thumbnail size selected
-should have the same aspect ratio as the main JPEG output.<wbr/></p>
-<p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
-ratio,<wbr/> the camera device creates the thumbnail by cropping it from the primary image.<wbr/>
-For example,<wbr/> if the primary image has 4:3 aspect ratio,<wbr/> the thumbnail image has
-16:9 aspect ratio,<wbr/> the primary image will be cropped vertically (letterbox) to
-generate the thumbnail image.<wbr/> The thumbnail image will always have a smaller Field
-Of View (FOV) than the primary image when aspect ratios differ.<wbr/></p>
-<p>When an <a href="#controls_android.jpeg.orientation">android.<wbr/>jpeg.<wbr/>orientation</a> of non-zero degree is requested,<wbr/>
-the camera device will handle thumbnail rotation in one of the following ways:</p>
-<ul>
-<li>Set the <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
- and keep jpeg and thumbnail image data unrotated.<wbr/></li>
-<li>Rotate the jpeg and thumbnail image data and not set
- <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.<wbr/> In this
- case,<wbr/> LIMITED or FULL hardware level devices will report rotated thumnail size in
- capture result,<wbr/> so the width and height will be interchanged if 90 or 270 degree
- orientation is requested.<wbr/> LEGACY device will always report unrotated thumbnail
- size.<wbr/></li>
-</ul>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.<wbr/>
-The cropping must be done on the primary jpeg image rather than the sensor active array.<wbr/>
-The stream cropping rule specified by "S5.<wbr/> Cropping" in camera3.<wbr/>h doesn't apply to the
-thumbnail image cropping.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.jpeg.availableThumbnailSizes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2 x n
- </span>
- <span class="entry_type_visibility"> [public as size]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of JPEG thumbnail sizes for <a href="#controls_android.jpeg.thumbnailSize">android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Size</a> supported by this
-camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This list will include at least one non-zero resolution,<wbr/> plus <code>(0,<wbr/>0)</code> for indicating no
-thumbnail should be generated.<wbr/></p>
-<p>Below condiditions will be satisfied for this size list:</p>
-<ul>
-<li>The sizes will be sorted by increasing pixel area (width x height).<wbr/>
-If several resolutions have the same area,<wbr/> they will be sorted by increasing width.<wbr/></li>
-<li>The aspect ratio of the largest thumbnail size will be same as the
-aspect ratio of largest JPEG output size in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a>.<wbr/>
-The largest size is defined as the size that has the largest pixel area
-in a given size list.<wbr/></li>
-<li>Each output JPEG size in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> will have at least
-one corresponding size that has the same aspect ratio in availableThumbnailSizes,<wbr/>
-and vice versa.<wbr/></li>
-<li>All non-<code>(0,<wbr/> 0)</code> sizes will have non-zero widths and heights.<wbr/></li>
-</ul>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.jpeg.maxSize">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>max<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum size in bytes for the compressed
-JPEG buffer</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Must be large enough to fit any JPEG produced by
-the camera</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is used for sizing the gralloc buffers for
-JPEG</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.jpeg.gpsLocation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Location
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [java_public as location]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A location object to use when generating image GPS metadata.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Setting a location object in a request will include the GPS coordinates of the location
-into any JPEG images captured based on the request.<wbr/> These coordinates can then be
-viewed by anyone who receives the JPEG image.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.gpsCoordinates">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Coordinates
- </td>
- <td class="entry_type">
- <span class="entry_type_name">double</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">latitude,<wbr/> longitude,<wbr/> altitude.<wbr/> First two in degrees,<wbr/> the third in meters</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>GPS coordinates to include in output JPEG
-EXIF.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>(-180 - 180],<wbr/> [-90,<wbr/>90],<wbr/> [-inf,<wbr/> inf]</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.gpsProcessingMethod">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Processing<wbr/>Method
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [ndk_public as string]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>32 characters describing GPS algorithm to
-include in EXIF.<wbr/></p>
- </td>
-
- <td class="entry_units">
- UTF-8 null-terminated string
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.gpsTimestamp">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>gps<wbr/>Timestamp
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Time GPS fix was made to include in
-EXIF.<wbr/></p>
- </td>
-
- <td class="entry_units">
- UTC in seconds since January 1,<wbr/> 1970
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.orientation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>orientation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The orientation for a JPEG image.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Degrees in multiples of 90
- </td>
-
- <td class="entry_range">
- <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The clockwise rotation angle in degrees,<wbr/> relative to the orientation
-to the camera,<wbr/> that the JPEG picture needs to be rotated by,<wbr/> to be viewed
-upright.<wbr/></p>
-<p>Camera devices may either encode this value into the JPEG EXIF header,<wbr/> or
-rotate the image data to match this orientation.<wbr/> When the image data is rotated,<wbr/>
-the thumbnail data will also be rotated.<wbr/></p>
-<p>Note that this orientation is relative to the orientation of the camera sensor,<wbr/> given
-by <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a>.<wbr/></p>
-<p>To translate from the device orientation given by the Android sensor APIs,<wbr/> the following
-sample code may be used:</p>
-<pre><code>private int getJpegOrientation(CameraCharacteristics c,<wbr/> int deviceOrientation) {
- if (deviceOrientation == android.<wbr/>view.<wbr/>Orientation<wbr/>Event<wbr/>Listener.<wbr/>ORIENTATION_<wbr/>UNKNOWN) return 0;
- int sensorOrientation = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>SENSOR_<wbr/>ORIENTATION);
-
- //<wbr/> Round device orientation to a multiple of 90
- deviceOrientation = (deviceOrientation + 45) /<wbr/> 90 * 90;
-
- //<wbr/> Reverse device orientation for front-facing cameras
- boolean facingFront = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING) == Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING_<wbr/>FRONT;
- if (facingFront) deviceOrientation = -deviceOrientation;
-
- //<wbr/> Calculate desired JPEG orientation relative to camera orientation to make
- //<wbr/> the image upright relative to the device orientation
- int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
-
- return jpegOrientation;
-}
-</code></pre>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.quality">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>quality
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Compression quality of the final JPEG
-image.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>1-100; larger is higher quality</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>85-95 is typical usage range.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.size">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>jpeg.<wbr/>size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The size of the compressed JPEG image,<wbr/> in
-bytes</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If no JPEG output is produced for the request,<wbr/>
-this must be 0.<wbr/></p>
-<p>Otherwise,<wbr/> this describes the real size of the compressed
-JPEG image placed in the output stream.<wbr/> More specifically,<wbr/>
-if <a href="#static_android.jpeg.maxSize">android.<wbr/>jpeg.<wbr/>max<wbr/>Size</a> = 1000000,<wbr/> and a specific capture
-has <a href="#dynamic_android.jpeg.size">android.<wbr/>jpeg.<wbr/>size</a> = 500000,<wbr/> then the output buffer from
-the JPEG stream will be 1000000 bytes,<wbr/> of which the first
-500000 make up the real data.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.thumbnailQuality">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Quality
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Compression quality of JPEG
-thumbnail.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>1-100; larger is higher quality</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.jpeg.thumbnailSize">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as size]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Resolution of embedded JPEG thumbnail.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.jpeg.availableThumbnailSizes">android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to (0,<wbr/> 0) value,<wbr/> the JPEG EXIF will not contain thumbnail,<wbr/>
-but the captured JPEG will still be a valid image.<wbr/></p>
-<p>For best results,<wbr/> when issuing a request for a JPEG image,<wbr/> the thumbnail size selected
-should have the same aspect ratio as the main JPEG output.<wbr/></p>
-<p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
-ratio,<wbr/> the camera device creates the thumbnail by cropping it from the primary image.<wbr/>
-For example,<wbr/> if the primary image has 4:3 aspect ratio,<wbr/> the thumbnail image has
-16:9 aspect ratio,<wbr/> the primary image will be cropped vertically (letterbox) to
-generate the thumbnail image.<wbr/> The thumbnail image will always have a smaller Field
-Of View (FOV) than the primary image when aspect ratios differ.<wbr/></p>
-<p>When an <a href="#controls_android.jpeg.orientation">android.<wbr/>jpeg.<wbr/>orientation</a> of non-zero degree is requested,<wbr/>
-the camera device will handle thumbnail rotation in one of the following ways:</p>
-<ul>
-<li>Set the <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
- and keep jpeg and thumbnail image data unrotated.<wbr/></li>
-<li>Rotate the jpeg and thumbnail image data and not set
- <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.<wbr/> In this
- case,<wbr/> LIMITED or FULL hardware level devices will report rotated thumnail size in
- capture result,<wbr/> so the width and height will be interchanged if 90 or 270 degree
- orientation is requested.<wbr/> LEGACY device will always report unrotated thumbnail
- size.<wbr/></li>
-</ul>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.<wbr/>
-The cropping must be done on the primary jpeg image rather than the sensor active array.<wbr/>
-The stream cropping rule specified by "S5.<wbr/> Cropping" in camera3.<wbr/>h doesn't apply to the
-thumbnail image cropping.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_lens" class="section">lens</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.lens.aperture">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>aperture
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired lens aperture size,<wbr/> as a ratio of lens focal length to the
-effective aperture diameter.<wbr/></p>
- </td>
-
- <td class="entry_units">
- The f-number (f/<wbr/>N)
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Setting this value is only supported on the camera devices that have a variable
-aperture lens.<wbr/></p>
-<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/>
-this can be set along with <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>
-to achieve manual exposure control.<wbr/></p>
-<p>The requested aperture value may take several frames to reach the
-requested value; the camera device will report the current (intermediate)
-aperture size in capture result metadata while the aperture is changing.<wbr/>
-While the aperture is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
-<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of
-the ON modes,<wbr/> this will be overridden by the camera device
-auto-exposure algorithm,<wbr/> the overridden values are then provided
-back to the user in the corresponding result.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.lens.filterDensity">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>filter<wbr/>Density
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired setting for the lens neutral density filter(s).<wbr/></p>
- </td>
-
- <td class="entry_units">
- Exposure Value (EV)
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control will not be supported on most camera devices.<wbr/></p>
-<p>Lens filters are typically used to lower the amount of light the
-sensor is exposed to (measured in steps of EV).<wbr/> As used here,<wbr/> an EV
-step is the standard logarithmic representation,<wbr/> which are
-non-negative,<wbr/> and inversely proportional to the amount of light
-hitting the sensor.<wbr/> For example,<wbr/> setting this to 0 would result
-in no reduction of the incoming light,<wbr/> and setting this to 2 would
-mean that the filter is set to reduce incoming light by two stops
-(allowing 1/<wbr/>4 of the prior amount of light to the sensor).<wbr/></p>
-<p>It may take several frames before the lens filter density changes
-to the requested value.<wbr/> While the filter density is still changing,<wbr/>
-<a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.lens.focalLength">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>focal<wbr/>Length
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired lens focal length; used for optical zoom.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Millimeters
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This setting controls the physical focal length of the camera
-device's lens.<wbr/> Changing the focal length changes the field of
-view of the camera device,<wbr/> and is usually used for optical zoom.<wbr/></p>
-<p>Like <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> this
-setting won't be applied instantaneously,<wbr/> and it may take several
-frames before the lens can change to the requested focal length.<wbr/>
-While the focal length is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will
-be set to MOVING.<wbr/></p>
-<p>Optical zoom will not be supported on most devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.lens.focusDistance">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>focus<wbr/>Distance
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Desired distance to plane of sharpest focus,<wbr/>
-measured from frontmost surface of the lens.<wbr/></p>
- </td>
-
- <td class="entry_units">
- See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control can be used for setting manual focus,<wbr/> on devices that support
-the MANUAL_<wbr/>SENSOR capability and have a variable-focus lens (see
-<a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>).<wbr/></p>
-<p>A value of <code>0.<wbr/>0f</code> means infinity focus.<wbr/> The value set will be clamped to
-<code>[0.<wbr/>0f,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code>.<wbr/></p>
-<p>Like <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> this setting won't be applied
-instantaneously,<wbr/> and it may take several frames before the lens
-can move to the requested focus distance.<wbr/> While the lens is still moving,<wbr/>
-<a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
-<p>LEGACY devices support at most setting this to <code>0.<wbr/>0f</code>
-for infinity focus.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.lens.opticalStabilizationMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Optical stabilization is unavailable.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optical stabilization is enabled.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Sets whether the camera device uses optical image stabilization (OIS)
-when capturing images.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>OIS is used to compensate for motion blur due to small
-movements of the camera during capture.<wbr/> Unlike digital image
-stabilization (<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> OIS
-makes use of mechanical elements to stabilize the camera
-sensor,<wbr/> and thus allows for longer exposure times before
-camera shake becomes apparent.<wbr/></p>
-<p>Switching between different optical stabilization modes may take several
-frames to initialize,<wbr/> the camera device will report the current mode in
-capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/> the
-optical stabilization modes in the first several capture results may still
-be "OFF",<wbr/> and it will become "ON" when the initialization is done.<wbr/></p>
-<p>If a camera device supports both OIS and digital image stabilization
-(<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may produce undesirable
-interaction,<wbr/> so it is recommended not to enable both at the same time.<wbr/></p>
-<p>Not all devices will support OIS; see
-<a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a> for
-available controls.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.lens.info.availableApertures">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of aperture size values for <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a> that are
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- The aperture f-number
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If the camera device doesn't support a variable lens aperture,<wbr/>
-this list will contain only one value,<wbr/> which is the fixed aperture size.<wbr/></p>
-<p>If the camera device supports a variable aperture,<wbr/> the aperture values
-in this list will be sorted in ascending order.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.availableFilterDensities">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of neutral density filter values for
-<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> that are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Exposure value (EV)
- </td>
-
- <td class="entry_range">
- <p>Values are >= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If a neutral density filter is not supported by this camera device,<wbr/>
-this list will contain only 0.<wbr/> Otherwise,<wbr/> this list will include every
-filter density supported by the camera device,<wbr/> in ascending order.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.availableFocalLengths">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">The list of available focal lengths</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of focal lengths for <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Millimeters
- </td>
-
- <td class="entry_range">
- <p>Values are > 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If optical zoom is not supported,<wbr/> this list will only contain
-a single value corresponding to the fixed focal length of the
-device.<wbr/> Otherwise,<wbr/> this list will include every focal length supported
-by the camera device,<wbr/> in ascending order.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.availableOpticalStabilization">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of optical image stabilization (OIS) modes for
-<a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> that are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If OIS is not supported by a given camera device,<wbr/> this list will
-contain only OFF.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.hyperfocalDistance">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>info.<wbr/>hyperfocal<wbr/>Distance
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Hyperfocal distance for this lens.<wbr/></p>
- </td>
-
- <td class="entry_units">
- See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
- </td>
-
- <td class="entry_range">
- <p>If lens is fixed focus,<wbr/> >= 0.<wbr/> If lens has focuser unit,<wbr/> the value is
-within <code>(0.<wbr/>0f,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If the lens is not fixed focus,<wbr/> the camera device will report this
-field when <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a> is APPROXIMATE or CALIBRATED.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.minimumFocusDistance">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Shortest distance from frontmost surface
-of the lens that can be brought into sharp focus.<wbr/></p>
- </td>
-
- <td class="entry_units">
- See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If the lens is fixed-focus,<wbr/> this will be
-0.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Mandatory for FULL devices; LIMITED devices
-must always set this value to 0 for fixed-focus; and may omit
-the minimum focus distance otherwise.<wbr/></p>
-<p>This field is also mandatory for all devices advertising
-the MANUAL_<wbr/>SENSOR capability.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.shadingMapSize">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [ndk_public as size]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">width and height (N,<wbr/> M) of lens shading map provided by the camera device.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Dimensions of lens shading map.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Both values >= 1</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The map should be on the order of 30-40 rows and columns,<wbr/> and
-must be smaller than 64x64.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.info.focusDistanceCalibration">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">UNCALIBRATED</span>
- <span class="entry_type_enum_notes"><p>The lens focus distance is not accurate,<wbr/> and the units used for
-<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> do not correspond to any physical units.<wbr/></p>
-<p>Setting the lens to the same focus distance on separate occasions may
-result in a different real focus distance,<wbr/> depending on factors such
-as the orientation of the device,<wbr/> the age of the focusing mechanism,<wbr/>
-and the device temperature.<wbr/> The focus distance value will still be
-in the range of <code>[0,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code>,<wbr/> where 0
-represents the farthest focus.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">APPROXIMATE</span>
- <span class="entry_type_enum_notes"><p>The lens focus distance is measured in diopters.<wbr/></p>
-<p>However,<wbr/> setting the lens to the same focus distance
-on separate occasions may result in a different real
-focus distance,<wbr/> depending on factors such as the
-orientation of the device,<wbr/> the age of the focusing
-mechanism,<wbr/> and the device temperature.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CALIBRATED</span>
- <span class="entry_type_enum_notes"><p>The lens focus distance is measured in diopters,<wbr/> and
-is calibrated.<wbr/></p>
-<p>The lens mechanism is calibrated so that setting the
-same focus distance is repeatable on multiple
-occasions with good accuracy,<wbr/> and the focus distance
-corresponds to the real physical distance to the plane
-of best focus.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The lens focus distance calibration quality.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The lens focus distance calibration quality determines the reliability of
-focus related metadata entries,<wbr/> i.<wbr/>e.<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
-<a href="#dynamic_android.lens.focusRange">android.<wbr/>lens.<wbr/>focus<wbr/>Range</a>,<wbr/> <a href="#static_android.lens.info.hyperfocalDistance">android.<wbr/>lens.<wbr/>info.<wbr/>hyperfocal<wbr/>Distance</a>,<wbr/> and
-<a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>.<wbr/></p>
-<p>APPROXIMATE and CALIBRATED devices report the focus metadata in
-units of diopters (1/<wbr/>meter),<wbr/> so <code>0.<wbr/>0f</code> represents focusing at infinity,<wbr/>
-and increasing positive numbers represent focusing closer and closer
-to the camera device.<wbr/> The focus distance control also uses diopters
-on these devices.<wbr/></p>
-<p>UNCALIBRATED devices do not use units that are directly comparable
-to any real physical measurement,<wbr/> but <code>0.<wbr/>0f</code> still represents farthest
-focus,<wbr/> and <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> represents the
-nearest focus the device can achieve.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For devices advertise APPROXIMATE quality or higher,<wbr/> diopters 0 (infinity
-focus) must work.<wbr/> When autofocus is disabled (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> == OFF)
-and the lens focus distance is set to 0 diopters
-(<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> == 0),<wbr/> the lens will move to focus at infinity
-and is stably focused at infinity even if the device tilts.<wbr/> It may take the
-lens some time to move; during the move the lens state should be MOVING and
-the output diopter value should be changing toward 0.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
-
-
- <tr class="entry" id="static_android.lens.facing">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>lens.<wbr/>facing
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FRONT</span>
- <span class="entry_type_enum_notes"><p>The camera device faces the same direction as the device's screen.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BACK</span>
- <span class="entry_type_enum_notes"><p>The camera device faces the opposite direction as the device's screen.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">EXTERNAL</span>
- <span class="entry_type_enum_notes"><p>The camera device is an external camera,<wbr/> and has no fixed facing relative to the
-device's screen.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Direction the camera faces relative to
-device screen.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.poseRotation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>pose<wbr/>Rotation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The orientation of the camera relative to the sensor
-coordinate system.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- Quaternion coefficients
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The four coefficients that describe the quaternion
-rotation from the Android sensor coordinate system to a
-camera-aligned coordinate system where the X-axis is
-aligned with the long side of the image sensor,<wbr/> the Y-axis
-is aligned with the short side of the image sensor,<wbr/> and
-the Z-axis is aligned with the optical axis of the sensor.<wbr/></p>
-<p>To convert from the quaternion coefficients <code>(x,<wbr/>y,<wbr/>z,<wbr/>w)</code>
-to the axis of rotation <code>(a_<wbr/>x,<wbr/> a_<wbr/>y,<wbr/> a_<wbr/>z)</code> and rotation
-amount <code>theta</code>,<wbr/> the following formulas can be used:</p>
-<pre><code> theta = 2 * acos(w)
-a_<wbr/>x = x /<wbr/> sin(theta/<wbr/>2)
-a_<wbr/>y = y /<wbr/> sin(theta/<wbr/>2)
-a_<wbr/>z = z /<wbr/> sin(theta/<wbr/>2)
-</code></pre>
-<p>To create a 3x3 rotation matrix that applies the rotation
-defined by this quaternion,<wbr/> the following matrix can be
-used:</p>
-<pre><code>R = [ 1 - 2y^2 - 2z^2,<wbr/> 2xy - 2zw,<wbr/> 2xz + 2yw,<wbr/>
- 2xy + 2zw,<wbr/> 1 - 2x^2 - 2z^2,<wbr/> 2yz - 2xw,<wbr/>
- 2xz - 2yw,<wbr/> 2yz + 2xw,<wbr/> 1 - 2x^2 - 2y^2 ]
-</code></pre>
-<p>This matrix can then be used to apply the rotation to a
- column vector point with</p>
-<p><code>p' = Rp</code></p>
-<p>where <code>p</code> is in the device sensor coordinate system,<wbr/> and
- <code>p'</code> is in the camera-oriented coordinate system.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.poseTranslation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>pose<wbr/>Translation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Position of the camera optical center.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Meters
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The position of the camera device's lens optical center,<wbr/>
-as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>,<wbr/> relative to the
-optical center of the largest camera device facing in the
-same direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
-axes</a>.<wbr/> Note that only the axis definitions are shared with
-the sensor coordinate system,<wbr/> but not the origin.<wbr/></p>
-<p>If this device is the largest or only camera device with a
-given facing,<wbr/> then this position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a
-camera device with a lens optical center located 3 cm from
-the main sensor along the +X axis (to the right from the
-user's perspective) will report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
-<p>To transform a pixel coordinates between two cameras
-facing the same direction,<wbr/> first the source camera
-<a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> must be corrected for.<wbr/> Then
-the source camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs
-to be applied,<wbr/> followed by the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a>
-of the source camera,<wbr/> the translation of the source camera
-relative to the destination camera,<wbr/> the
-<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination camera,<wbr/> and
-finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
-of the destination camera.<wbr/> This obtains a
-radial-distortion-free coordinate in the destination
-camera pixel coordinates.<wbr/></p>
-<p>To compare this against a real image from the destination
-camera,<wbr/> the destination camera image then needs to be
-corrected for radial distortion before comparison or
-sampling.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.intrinsicCalibration">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The parameters for this camera device's intrinsic
-calibration.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- Pixels in the
- android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size
- coordinate system.<wbr/>
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The five calibration parameters that describe the
-transform from camera-centric 3D coordinates to sensor
-pixel coordinates:</p>
-<pre><code>[f_<wbr/>x,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>x,<wbr/> c_<wbr/>y,<wbr/> s]
-</code></pre>
-<p>Where <code>f_<wbr/>x</code> and <code>f_<wbr/>y</code> are the horizontal and vertical
-focal lengths,<wbr/> <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code> is the position of the optical
-axis,<wbr/> and <code>s</code> is a skew parameter for the sensor plane not
-being aligned with the lens plane.<wbr/></p>
-<p>These are typically used within a transformation matrix K:</p>
-<pre><code>K = [ f_<wbr/>x,<wbr/> s,<wbr/> c_<wbr/>x,<wbr/>
- 0,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>y,<wbr/>
- 0 0,<wbr/> 1 ]
-</code></pre>
-<p>which can then be combined with the camera pose rotation
-<code>R</code> and translation <code>t</code> (<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> and
-<a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a>,<wbr/> respective) to calculate the
-complete transform from world coordinates to pixel
-coordinates:</p>
-<pre><code>P = [ K 0 * [ R t
- 0 1 ] 0 1 ]
-</code></pre>
-<p>and with <code>p_<wbr/>w</code> being a point in the world coordinate system
-and <code>p_<wbr/>s</code> being a point in the camera active pixel array
-coordinate system,<wbr/> and with the mapping including the
-homogeneous division by z:</p>
-<pre><code> p_<wbr/>h = (x_<wbr/>h,<wbr/> y_<wbr/>h,<wbr/> z_<wbr/>h) = P p_<wbr/>w
-p_<wbr/>s = p_<wbr/>h /<wbr/> z_<wbr/>h
-</code></pre>
-<p>so <code>[x_<wbr/>s,<wbr/> y_<wbr/>s]</code> is the pixel coordinates of the world
-point,<wbr/> <code>z_<wbr/>s = 1</code>,<wbr/> and <code>w_<wbr/>s</code> is a measurement of disparity
-(depth) in pixel coordinates.<wbr/></p>
-<p>Note that the coordinate system for this transform is the
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> system,<wbr/>
-where <code>(0,<wbr/>0)</code> is the top-left of the
-preCorrectionActiveArraySize rectangle.<wbr/> Once the pose and
-intrinsic calibration transforms have been applied to a
-world point,<wbr/> then the <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>
-transform needs to be applied,<wbr/> and the result adjusted to
-be in the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate
-system (where <code>(0,<wbr/> 0)</code> is the top-left of the
-activeArraySize rectangle),<wbr/> to determine the final pixel
-coordinate of the world point for processed (non-RAW)
-output buffers.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.lens.radialDistortion">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>radial<wbr/>Distortion
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 6
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The correction coefficients to correct for this camera device's
-radial and tangential lens distortion.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- Unitless coefficients.<wbr/>
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Four radial distortion coefficients <code>[kappa_<wbr/>0,<wbr/> kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
-kappa_<wbr/>3]</code> and two tangential distortion coefficients
-<code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
-lens's geometric distortion with the mapping equations:</p>
-<pre><code> x_<wbr/>c = x_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
- kappa_<wbr/>4 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>5 * ( r^2 + 2 * x_<wbr/>i^2 )
- y_<wbr/>c = y_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
- kappa_<wbr/>5 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>4 * ( r^2 + 2 * y_<wbr/>i^2 )
-</code></pre>
-<p>Here,<wbr/> <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> are the coordinates to sample in the
-input image that correspond to the pixel values in the
-corrected image at the coordinate <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code>:</p>
-<pre><code> correctedImage(x_<wbr/>i,<wbr/> y_<wbr/>i) = sample_<wbr/>at(x_<wbr/>c,<wbr/> y_<wbr/>c,<wbr/> inputImage)
-</code></pre>
-<p>The pixel coordinates are defined in a normalized
-coordinate system related to the
-<a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> calibration fields.<wbr/>
-Both <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code> and <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> have <code>(0,<wbr/>0)</code> at the
-lens optical center <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code>.<wbr/> The maximum magnitudes
-of both x and y coordinates are normalized to be 1 at the
-edge further from the optical center,<wbr/> so the range
-for both dimensions is <code>-1 <= x <= 1</code>.<wbr/></p>
-<p>Finally,<wbr/> <code>r</code> represents the radial distance from the
-optical center,<wbr/> <code>r^2 = x_<wbr/>i^2 + y_<wbr/>i^2</code>,<wbr/> and its magnitude
-is therefore no larger than <code>|<wbr/>r|<wbr/> <= sqrt(2)</code>.<wbr/></p>
-<p>The distortion model used is the Brown-Conrady model.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.lens.aperture">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>aperture
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired lens aperture size,<wbr/> as a ratio of lens focal length to the
-effective aperture diameter.<wbr/></p>
- </td>
-
- <td class="entry_units">
- The f-number (f/<wbr/>N)
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Setting this value is only supported on the camera devices that have a variable
-aperture lens.<wbr/></p>
-<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/>
-this can be set along with <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
-<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>
-to achieve manual exposure control.<wbr/></p>
-<p>The requested aperture value may take several frames to reach the
-requested value; the camera device will report the current (intermediate)
-aperture size in capture result metadata while the aperture is changing.<wbr/>
-While the aperture is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
-<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of
-the ON modes,<wbr/> this will be overridden by the camera device
-auto-exposure algorithm,<wbr/> the overridden values are then provided
-back to the user in the corresponding result.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.filterDensity">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>filter<wbr/>Density
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired setting for the lens neutral density filter(s).<wbr/></p>
- </td>
-
- <td class="entry_units">
- Exposure Value (EV)
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control will not be supported on most camera devices.<wbr/></p>
-<p>Lens filters are typically used to lower the amount of light the
-sensor is exposed to (measured in steps of EV).<wbr/> As used here,<wbr/> an EV
-step is the standard logarithmic representation,<wbr/> which are
-non-negative,<wbr/> and inversely proportional to the amount of light
-hitting the sensor.<wbr/> For example,<wbr/> setting this to 0 would result
-in no reduction of the incoming light,<wbr/> and setting this to 2 would
-mean that the filter is set to reduce incoming light by two stops
-(allowing 1/<wbr/>4 of the prior amount of light to the sensor).<wbr/></p>
-<p>It may take several frames before the lens filter density changes
-to the requested value.<wbr/> While the filter density is still changing,<wbr/>
-<a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.focalLength">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>focal<wbr/>Length
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired lens focal length; used for optical zoom.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Millimeters
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This setting controls the physical focal length of the camera
-device's lens.<wbr/> Changing the focal length changes the field of
-view of the camera device,<wbr/> and is usually used for optical zoom.<wbr/></p>
-<p>Like <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> this
-setting won't be applied instantaneously,<wbr/> and it may take several
-frames before the lens can change to the requested focal length.<wbr/>
-While the focal length is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will
-be set to MOVING.<wbr/></p>
-<p>Optical zoom will not be supported on most devices.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.focusDistance">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>focus<wbr/>Distance
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Desired distance to plane of sharpest focus,<wbr/>
-measured from frontmost surface of the lens.<wbr/></p>
- </td>
-
- <td class="entry_units">
- See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Should be zero for fixed-focus cameras</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.focusRange">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>focus<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as pairFloatFloat]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
- <div class="entry_type_notes">Range of scene distances that are in focus</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The range of scene distances that are in
-sharp focus (depth of field).<wbr/></p>
- </td>
-
- <td class="entry_units">
- A pair of focus distances in diopters: (near,<wbr/>
- far); see android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details.<wbr/>
- </td>
-
- <td class="entry_range">
- <p>>=0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If variable focus not supported,<wbr/> can still report
-fixed depth of field range</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.opticalStabilizationMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Optical stabilization is unavailable.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Optical stabilization is enabled.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Sets whether the camera device uses optical image stabilization (OIS)
-when capturing images.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>OIS is used to compensate for motion blur due to small
-movements of the camera during capture.<wbr/> Unlike digital image
-stabilization (<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> OIS
-makes use of mechanical elements to stabilize the camera
-sensor,<wbr/> and thus allows for longer exposure times before
-camera shake becomes apparent.<wbr/></p>
-<p>Switching between different optical stabilization modes may take several
-frames to initialize,<wbr/> the camera device will report the current mode in
-capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/> the
-optical stabilization modes in the first several capture results may still
-be "OFF",<wbr/> and it will become "ON" when the initialization is done.<wbr/></p>
-<p>If a camera device supports both OIS and digital image stabilization
-(<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may produce undesirable
-interaction,<wbr/> so it is recommended not to enable both at the same time.<wbr/></p>
-<p>Not all devices will support OIS; see
-<a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a> for
-available controls.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.state">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>state
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">STATIONARY</span>
- <span class="entry_type_enum_notes"><p>The lens parameters (<a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
-<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>) are not changing.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MOVING</span>
- <span class="entry_type_enum_notes"><p>One or several of the lens parameters
-(<a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
-<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> or <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>) is
-currently changing.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Current lens status.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For lens parameters <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
-<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> when changes are requested,<wbr/>
-they may take several frames to reach the requested values.<wbr/> This state indicates
-the current status of the lens parameters.<wbr/></p>
-<p>When the state is STATIONARY,<wbr/> the lens parameters are not changing.<wbr/> This could be
-either because the parameters are all fixed,<wbr/> or because the lens has had enough
-time to reach the most recently-requested values.<wbr/>
-If all these lens parameters are not changable for a camera device,<wbr/> as listed below:</p>
-<ul>
-<li>Fixed focus (<code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> == 0</code>),<wbr/> which means
-<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> parameter will always be 0.<wbr/></li>
-<li>Fixed focal length (<a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a> contains single value),<wbr/>
-which means the optical zoom is not supported.<wbr/></li>
-<li>No ND filter (<a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a> contains only 0).<wbr/></li>
-<li>Fixed aperture (<a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a> contains single value).<wbr/></li>
-</ul>
-<p>Then this state will always be STATIONARY.<wbr/></p>
-<p>When the state is MOVING,<wbr/> it indicates that at least one of the lens parameters
-is changing.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.poseRotation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>pose<wbr/>Rotation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The orientation of the camera relative to the sensor
-coordinate system.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- Quaternion coefficients
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The four coefficients that describe the quaternion
-rotation from the Android sensor coordinate system to a
-camera-aligned coordinate system where the X-axis is
-aligned with the long side of the image sensor,<wbr/> the Y-axis
-is aligned with the short side of the image sensor,<wbr/> and
-the Z-axis is aligned with the optical axis of the sensor.<wbr/></p>
-<p>To convert from the quaternion coefficients <code>(x,<wbr/>y,<wbr/>z,<wbr/>w)</code>
-to the axis of rotation <code>(a_<wbr/>x,<wbr/> a_<wbr/>y,<wbr/> a_<wbr/>z)</code> and rotation
-amount <code>theta</code>,<wbr/> the following formulas can be used:</p>
-<pre><code> theta = 2 * acos(w)
-a_<wbr/>x = x /<wbr/> sin(theta/<wbr/>2)
-a_<wbr/>y = y /<wbr/> sin(theta/<wbr/>2)
-a_<wbr/>z = z /<wbr/> sin(theta/<wbr/>2)
-</code></pre>
-<p>To create a 3x3 rotation matrix that applies the rotation
-defined by this quaternion,<wbr/> the following matrix can be
-used:</p>
-<pre><code>R = [ 1 - 2y^2 - 2z^2,<wbr/> 2xy - 2zw,<wbr/> 2xz + 2yw,<wbr/>
- 2xy + 2zw,<wbr/> 1 - 2x^2 - 2z^2,<wbr/> 2yz - 2xw,<wbr/>
- 2xz - 2yw,<wbr/> 2yz + 2xw,<wbr/> 1 - 2x^2 - 2y^2 ]
-</code></pre>
-<p>This matrix can then be used to apply the rotation to a
- column vector point with</p>
-<p><code>p' = Rp</code></p>
-<p>where <code>p</code> is in the device sensor coordinate system,<wbr/> and
- <code>p'</code> is in the camera-oriented coordinate system.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.poseTranslation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>pose<wbr/>Translation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Position of the camera optical center.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Meters
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The position of the camera device's lens optical center,<wbr/>
-as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>,<wbr/> relative to the
-optical center of the largest camera device facing in the
-same direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
-axes</a>.<wbr/> Note that only the axis definitions are shared with
-the sensor coordinate system,<wbr/> but not the origin.<wbr/></p>
-<p>If this device is the largest or only camera device with a
-given facing,<wbr/> then this position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a
-camera device with a lens optical center located 3 cm from
-the main sensor along the +X axis (to the right from the
-user's perspective) will report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
-<p>To transform a pixel coordinates between two cameras
-facing the same direction,<wbr/> first the source camera
-<a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> must be corrected for.<wbr/> Then
-the source camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs
-to be applied,<wbr/> followed by the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a>
-of the source camera,<wbr/> the translation of the source camera
-relative to the destination camera,<wbr/> the
-<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination camera,<wbr/> and
-finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
-of the destination camera.<wbr/> This obtains a
-radial-distortion-free coordinate in the destination
-camera pixel coordinates.<wbr/></p>
-<p>To compare this against a real image from the destination
-camera,<wbr/> the destination camera image then needs to be
-corrected for radial distortion before comparison or
-sampling.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.intrinsicCalibration">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 5
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The parameters for this camera device's intrinsic
-calibration.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- Pixels in the
- android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size
- coordinate system.<wbr/>
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The five calibration parameters that describe the
-transform from camera-centric 3D coordinates to sensor
-pixel coordinates:</p>
-<pre><code>[f_<wbr/>x,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>x,<wbr/> c_<wbr/>y,<wbr/> s]
-</code></pre>
-<p>Where <code>f_<wbr/>x</code> and <code>f_<wbr/>y</code> are the horizontal and vertical
-focal lengths,<wbr/> <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code> is the position of the optical
-axis,<wbr/> and <code>s</code> is a skew parameter for the sensor plane not
-being aligned with the lens plane.<wbr/></p>
-<p>These are typically used within a transformation matrix K:</p>
-<pre><code>K = [ f_<wbr/>x,<wbr/> s,<wbr/> c_<wbr/>x,<wbr/>
- 0,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>y,<wbr/>
- 0 0,<wbr/> 1 ]
-</code></pre>
-<p>which can then be combined with the camera pose rotation
-<code>R</code> and translation <code>t</code> (<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> and
-<a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a>,<wbr/> respective) to calculate the
-complete transform from world coordinates to pixel
-coordinates:</p>
-<pre><code>P = [ K 0 * [ R t
- 0 1 ] 0 1 ]
-</code></pre>
-<p>and with <code>p_<wbr/>w</code> being a point in the world coordinate system
-and <code>p_<wbr/>s</code> being a point in the camera active pixel array
-coordinate system,<wbr/> and with the mapping including the
-homogeneous division by z:</p>
-<pre><code> p_<wbr/>h = (x_<wbr/>h,<wbr/> y_<wbr/>h,<wbr/> z_<wbr/>h) = P p_<wbr/>w
-p_<wbr/>s = p_<wbr/>h /<wbr/> z_<wbr/>h
-</code></pre>
-<p>so <code>[x_<wbr/>s,<wbr/> y_<wbr/>s]</code> is the pixel coordinates of the world
-point,<wbr/> <code>z_<wbr/>s = 1</code>,<wbr/> and <code>w_<wbr/>s</code> is a measurement of disparity
-(depth) in pixel coordinates.<wbr/></p>
-<p>Note that the coordinate system for this transform is the
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> system,<wbr/>
-where <code>(0,<wbr/>0)</code> is the top-left of the
-preCorrectionActiveArraySize rectangle.<wbr/> Once the pose and
-intrinsic calibration transforms have been applied to a
-world point,<wbr/> then the <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>
-transform needs to be applied,<wbr/> and the result adjusted to
-be in the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate
-system (where <code>(0,<wbr/> 0)</code> is the top-left of the
-activeArraySize rectangle),<wbr/> to determine the final pixel
-coordinate of the world point for processed (non-RAW)
-output buffers.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.lens.radialDistortion">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>lens.<wbr/>radial<wbr/>Distortion
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 6
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The correction coefficients to correct for this camera device's
-radial and tangential lens distortion.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- Unitless coefficients.<wbr/>
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Four radial distortion coefficients <code>[kappa_<wbr/>0,<wbr/> kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
-kappa_<wbr/>3]</code> and two tangential distortion coefficients
-<code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
-lens's geometric distortion with the mapping equations:</p>
-<pre><code> x_<wbr/>c = x_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
- kappa_<wbr/>4 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>5 * ( r^2 + 2 * x_<wbr/>i^2 )
- y_<wbr/>c = y_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
- kappa_<wbr/>5 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>4 * ( r^2 + 2 * y_<wbr/>i^2 )
-</code></pre>
-<p>Here,<wbr/> <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> are the coordinates to sample in the
-input image that correspond to the pixel values in the
-corrected image at the coordinate <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code>:</p>
-<pre><code> correctedImage(x_<wbr/>i,<wbr/> y_<wbr/>i) = sample_<wbr/>at(x_<wbr/>c,<wbr/> y_<wbr/>c,<wbr/> inputImage)
-</code></pre>
-<p>The pixel coordinates are defined in a normalized
-coordinate system related to the
-<a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> calibration fields.<wbr/>
-Both <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code> and <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> have <code>(0,<wbr/>0)</code> at the
-lens optical center <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code>.<wbr/> The maximum magnitudes
-of both x and y coordinates are normalized to be 1 at the
-edge further from the optical center,<wbr/> so the range
-for both dimensions is <code>-1 <= x <= 1</code>.<wbr/></p>
-<p>Finally,<wbr/> <code>r</code> represents the radial distance from the
-optical center,<wbr/> <code>r^2 = x_<wbr/>i^2 + y_<wbr/>i^2</code>,<wbr/> and its magnitude
-is therefore no larger than <code>|<wbr/>r|<wbr/> <= sqrt(2)</code>.<wbr/></p>
-<p>The distortion model used is the Brown-Conrady model.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_noiseReduction" class="section">noiseReduction</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.noiseReduction.mode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>noise<wbr/>Reduction.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No noise reduction is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Noise reduction is applied without reducing frame rate relative to sensor
-output.<wbr/> It may be the same as OFF if noise reduction will reduce frame rate
-relative to sensor.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>High-quality noise reduction is applied,<wbr/> at the cost of possibly reduced frame
-rate relative to sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MINIMAL</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>MINIMAL noise reduction is applied without reducing frame rate relative to
-sensor output.<wbr/> </p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Noise reduction is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have noise
-reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if supported) or no
-noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of noise reduction
-for low-resolution streams is tuned so that frame rate is not impacted,<wbr/> and the quality
-is equal to or better than FAST (since it is only applied to lower-resolution outputs,<wbr/>
-quality may improve from FAST).<wbr/></p>
-<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
-with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
-high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
-produced via reprocessing when a user takes a picture.<wbr/> For such a use case,<wbr/> the
-high-resolution buffers must not have noise reduction applied to maximize efficiency of
-preview and to avoid over-applying noise filtering when reprocessing,<wbr/> while
-low-resolution buffers (used for recording or preview,<wbr/> generally) need noise reduction
-applied for reasonable preview quality.<wbr/></p>
-<p>This mode is guaranteed to be supported by devices that support either the
-YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
-(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
-be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Mode of operation for the noise reduction algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The noise reduction algorithm attempts to improve image quality by removing
-excessive noise added by the capture process,<wbr/> especially in dark conditions.<wbr/></p>
-<p>OFF means no noise reduction will be applied by the camera device,<wbr/> for both raw and
-YUV domain.<wbr/></p>
-<p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,<wbr/>to remove
-demosaicing or other processing artifacts.<wbr/> For YUV_<wbr/>REPROCESSING,<wbr/> MINIMAL is same as OFF.<wbr/>
-This mode is optional,<wbr/> may not be support by all devices.<wbr/> The application should check
-<a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> before using it.<wbr/></p>
-<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined noise filtering
-will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device
-will use the highest-quality noise filtering algorithms,<wbr/>
-even if it slows down capture rate.<wbr/> FAST means the camera device will not
-slow down capture rate when applying noise filtering.<wbr/> FAST may be the same as MINIMAL if
-MINIMAL is listed,<wbr/> or the same as OFF if any noise filtering will slow down capture rate.<wbr/>
-Every output stream will have a similar amount of enhancement applied.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
-buffer of high-resolution images during preview and reprocess image(s) from that buffer
-into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
-noise reduction to low-resolution streams (below maximum recording resolution) to maximize
-preview quality,<wbr/> but does not apply noise reduction to high-resolution streams,<wbr/> since
-those will be reprocessed later if necessary.<wbr/></p>
-<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera device
-will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV domain noise reduction,<wbr/> respectively.<wbr/> The camera device
-may adjust the noise reduction parameters for best image quality based on the
-<a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> if it is set.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
-adjust the internal noise reduction parameters appropriately to get the best quality
-images.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.noiseReduction.strength">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>noise<wbr/>Reduction.<wbr/>strength
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Control the amount of noise reduction
-applied to the images</p>
- </td>
-
- <td class="entry_units">
- 1-10; 10 is max noise reduction
- </td>
-
- <td class="entry_range">
- <p>1 - 10</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.noiseReduction.availableNoiseReductionModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of noise reduction modes for <a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a> that are supported
-by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Full-capability camera devices will always support OFF and FAST.<wbr/></p>
-<p>Camera devices that support YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING will support
-ZERO_<wbr/>SHUTTER_<wbr/>LAG.<wbr/></p>
-<p>Legacy-capability camera devices will only support FAST mode.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if noise reduction control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.noiseReduction.mode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>noise<wbr/>Reduction.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No noise reduction is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Noise reduction is applied without reducing frame rate relative to sensor
-output.<wbr/> It may be the same as OFF if noise reduction will reduce frame rate
-relative to sensor.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>High-quality noise reduction is applied,<wbr/> at the cost of possibly reduced frame
-rate relative to sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MINIMAL</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>MINIMAL noise reduction is applied without reducing frame rate relative to
-sensor output.<wbr/> </p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Noise reduction is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have noise
-reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if supported) or no
-noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of noise reduction
-for low-resolution streams is tuned so that frame rate is not impacted,<wbr/> and the quality
-is equal to or better than FAST (since it is only applied to lower-resolution outputs,<wbr/>
-quality may improve from FAST).<wbr/></p>
-<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
-with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
-high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
-produced via reprocessing when a user takes a picture.<wbr/> For such a use case,<wbr/> the
-high-resolution buffers must not have noise reduction applied to maximize efficiency of
-preview and to avoid over-applying noise filtering when reprocessing,<wbr/> while
-low-resolution buffers (used for recording or preview,<wbr/> generally) need noise reduction
-applied for reasonable preview quality.<wbr/></p>
-<p>This mode is guaranteed to be supported by devices that support either the
-YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
-(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
-be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Mode of operation for the noise reduction algorithm.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The noise reduction algorithm attempts to improve image quality by removing
-excessive noise added by the capture process,<wbr/> especially in dark conditions.<wbr/></p>
-<p>OFF means no noise reduction will be applied by the camera device,<wbr/> for both raw and
-YUV domain.<wbr/></p>
-<p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,<wbr/>to remove
-demosaicing or other processing artifacts.<wbr/> For YUV_<wbr/>REPROCESSING,<wbr/> MINIMAL is same as OFF.<wbr/>
-This mode is optional,<wbr/> may not be support by all devices.<wbr/> The application should check
-<a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> before using it.<wbr/></p>
-<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined noise filtering
-will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device
-will use the highest-quality noise filtering algorithms,<wbr/>
-even if it slows down capture rate.<wbr/> FAST means the camera device will not
-slow down capture rate when applying noise filtering.<wbr/> FAST may be the same as MINIMAL if
-MINIMAL is listed,<wbr/> or the same as OFF if any noise filtering will slow down capture rate.<wbr/>
-Every output stream will have a similar amount of enhancement applied.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
-buffer of high-resolution images during preview and reprocess image(s) from that buffer
-into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
-noise reduction to low-resolution streams (below maximum recording resolution) to maximize
-preview quality,<wbr/> but does not apply noise reduction to high-resolution streams,<wbr/> since
-those will be reprocessed later if necessary.<wbr/></p>
-<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera device
-will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV domain noise reduction,<wbr/> respectively.<wbr/> The camera device
-may adjust the noise reduction parameters for best image quality based on the
-<a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> if it is set.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
-adjust the internal noise reduction parameters appropriately to get the best quality
-images.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_quirks" class="section">quirks</td></tr>
-
-
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.quirks.meteringCropRegion">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>quirks.<wbr/>metering<wbr/>Crop<wbr/>Region
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>If set to 1,<wbr/> the camera service does not
-scale 'normalized' coordinates with respect to the crop
-region.<wbr/> This applies to metering input (a{e,<wbr/>f,<wbr/>wb}Region
-and output (face rectangles).<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Normalized coordinates refer to those in the
-(-1000,<wbr/>1000) range mentioned in the
-android.<wbr/>hardware.<wbr/>Camera API.<wbr/></p>
-<p>HAL implementations should instead always use and emit
-sensor array-relative coordinates for all region data.<wbr/> Does
-not need to be listed in static metadata.<wbr/> Support will be
-removed in future versions of camera service.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.quirks.triggerAfWithAuto">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>quirks.<wbr/>trigger<wbr/>Af<wbr/>With<wbr/>Auto
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>If set to 1,<wbr/> then the camera service always
-switches to FOCUS_<wbr/>MODE_<wbr/>AUTO before issuing a AF
-trigger.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL implementations should implement AF trigger
-modes for AUTO,<wbr/> MACRO,<wbr/> CONTINUOUS_<wbr/>FOCUS,<wbr/> and
-CONTINUOUS_<wbr/>PICTURE modes instead of using this flag.<wbr/> Does
-not need to be listed in static metadata.<wbr/> Support will be
-removed in future versions of camera service</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.quirks.useZslFormat">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>quirks.<wbr/>use<wbr/>Zsl<wbr/>Format
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>If set to 1,<wbr/> the camera service uses
-CAMERA2_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>ZSL instead of
-HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>IMPLEMENTATION_<wbr/>DEFINED for the zero
-shutter lag stream</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL implementations should use gralloc usage flags
-to determine that a stream will be used for
-zero-shutter-lag,<wbr/> instead of relying on an explicit
-format setting.<wbr/> Does not need to be listed in static
-metadata.<wbr/> Support will be removed in future versions of
-camera service.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.quirks.usePartialResult">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="5">
- android.<wbr/>quirks.<wbr/>use<wbr/>Partial<wbr/>Result
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>If set to 1,<wbr/> the HAL will always split result
-metadata for a single capture into multiple buffers,<wbr/>
-returned using multiple process_<wbr/>capture_<wbr/>result calls.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Does not need to be listed in static
-metadata.<wbr/> Support for partial results will be reworked in
-future versions of camera service.<wbr/> This quirk will stop
-working at that point; DO NOT USE without careful
-consideration of future support.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Refer to <code>camera3_<wbr/>capture_<wbr/>result::partial_<wbr/>result</code>
-for information on how to implement partial results.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.quirks.partialResult">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="5">
- android.<wbr/>quirks.<wbr/>partial<wbr/>Result
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [hidden as boolean]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FINAL</span>
- <span class="entry_type_enum_notes"><p>The last or only metadata result buffer
-for this capture.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PARTIAL</span>
- <span class="entry_type_enum_notes"><p>A partial buffer of result metadata for this
-capture.<wbr/> More result buffers for this capture will be sent
-by the camera device,<wbr/> the last of which will be marked
-FINAL.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether a result given to the framework is the
-final one for the capture,<wbr/> or only a partial that contains a
-subset of the full set of dynamic metadata
-values.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>Optional.<wbr/> Default value is FINAL.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The entries in the result metadata buffers for a
-single capture may not overlap,<wbr/> except for this entry.<wbr/> The
-FINAL buffers must retain FIFO ordering relative to the
-requests that generate them,<wbr/> so the FINAL buffer for frame 3 must
-always be sent to the framework after the FINAL buffer for frame 2,<wbr/> and
-before the FINAL buffer for frame 4.<wbr/> PARTIAL buffers may be returned
-in any order relative to other frames,<wbr/> but all PARTIAL buffers for a given
-capture must arrive before the FINAL buffer for that capture.<wbr/> This entry may
-only be used by the camera device if quirks.<wbr/>usePartialResult is set to 1.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Refer to <code>camera3_<wbr/>capture_<wbr/>result::partial_<wbr/>result</code>
-for information on how to implement partial results.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_request" class="section">request</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.request.frameCount">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="1">
- android.<wbr/>request.<wbr/>frame<wbr/>Count
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A frame counter set by the framework.<wbr/> Must
-be maintained unchanged in output frame.<wbr/> This value monotonically
-increases with every new result (that is,<wbr/> each new result has a unique
-frameCount value).<wbr/></p>
- </td>
-
- <td class="entry_units">
- incrementing integer
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>Any int.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.request.id">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>request.<wbr/>id
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>An application-specified ID for the current
-request.<wbr/> Must be maintained unchanged in output
-frame</p>
- </td>
-
- <td class="entry_units">
- arbitrary integer assigned by application
- </td>
-
- <td class="entry_range">
- <p>Any int</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.request.inputStreams">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>request.<wbr/>input<wbr/>Streams
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List which camera reprocess stream is used
-for the source of reprocessing data.<wbr/></p>
- </td>
-
- <td class="entry_units">
- List of camera reprocess stream IDs
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>Typically,<wbr/> only one entry allowed,<wbr/> must be a valid reprocess stream ID.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only meaningful when <a href="#controls_android.request.type">android.<wbr/>request.<wbr/>type</a> ==
-REPROCESS.<wbr/> Ignored otherwise</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.request.metadataMode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>request.<wbr/>metadata<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">NONE</span>
- <span class="entry_type_enum_notes"><p>No metadata should be produced on output,<wbr/> except
-for application-bound buffer data.<wbr/> If no
-application-bound streams exist,<wbr/> no frame should be
-placed in the output frame queue.<wbr/> If such streams
-exist,<wbr/> a frame should be placed on the output queue
-with null metadata but with the necessary output buffer
-information.<wbr/> Timestamp information should still be
-included with any output stream buffers</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FULL</span>
- <span class="entry_type_enum_notes"><p>All metadata should be produced.<wbr/> Statistics will
-only be produced if they are separately
-enabled</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>How much metadata to produce on
-output</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.request.outputStreams">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>request.<wbr/>output<wbr/>Streams
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Lists which camera output streams image data
-from this capture must be sent to</p>
- </td>
-
- <td class="entry_units">
- List of camera stream IDs
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>List must only include streams that have been
-created</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If no output streams are listed,<wbr/> then the image
-data should simply be discarded.<wbr/> The image data must
-still be captured for metadata and statistics production,<wbr/>
-and the lens and flash must operate as requested.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.request.type">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="1">
- android.<wbr/>request.<wbr/>type
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CAPTURE</span>
- <span class="entry_type_enum_notes"><p>Capture a new image from the imaging hardware,<wbr/>
-and process it according to the
-settings</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">REPROCESS</span>
- <span class="entry_type_enum_notes"><p>Process previously captured data; the
-<a href="#controls_android.request.inputStreams">android.<wbr/>request.<wbr/>input<wbr/>Streams</a> parameter determines the
-source reprocessing stream.<wbr/> TODO: Mark dynamic metadata
-needed for reprocessing with [RP]</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The type of the request; either CAPTURE or
-REPROCESS.<wbr/> For HAL3,<wbr/> this tag is redundant.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.request.maxNumOutputStreams">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum numbers of different types of output streams
-that can be configured and used simultaneously by a camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>For processed (and stalling) format streams,<wbr/> >= 1.<wbr/></p>
-<p>For Raw format (either stalling or non-stalling) streams,<wbr/> >= 0.<wbr/></p>
-<p>For processed (but not stalling) format streams,<wbr/> >= 3
-for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>);
->= 2 for LIMITED mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>).<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is a 3 element tuple that contains the max number of output simultaneous
-streams for raw sensor,<wbr/> processed (but not stalling),<wbr/> and processed (and stalling)
-formats respectively.<wbr/> For example,<wbr/> assuming that JPEG is typically a processed and
-stalling stream,<wbr/> if max raw sensor format output stream number is 1,<wbr/> max YUV streams
-number is 3,<wbr/> and max JPEG stream number is 2,<wbr/> then this tuple should be <code>(1,<wbr/> 3,<wbr/> 2)</code>.<wbr/></p>
-<p>This lists the upper bound of the number of output streams supported by
-the camera device.<wbr/> Using more streams simultaneously may require more hardware and
-CPU resources that will consume more power.<wbr/> The image format for an output stream can
-be any supported format provided by <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a>.<wbr/>
-The formats defined in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> can be catergorized
-into the 3 stream types as below:</p>
-<ul>
-<li>Processed (but stalling): any non-RAW format with a stallDurations > 0.<wbr/>
- Typically <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a>.<wbr/></li>
-<li>Raw formats: <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a>,<wbr/> or <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a>.<wbr/></li>
-<li>Processed (but not-stalling): any non-RAW format without a stall duration.<wbr/>
- Typically <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a>,<wbr/>
- <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a>,<wbr/> or
- <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a>.<wbr/></li>
-</ul>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.maxNumOutputRaw">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Raw
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum numbers of different types of output streams
-that can be configured and used simultaneously by a camera device
-for any <code>RAW</code> formats.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value contains the max number of output simultaneous
-streams from the raw sensor.<wbr/></p>
-<p>This lists the upper bound of the number of output streams supported by
-the camera device.<wbr/> Using more streams simultaneously may require more hardware and
-CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
-be any <code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
-<p>In particular,<wbr/> a <code>RAW</code> format is typically one of:</p>
-<ul>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a></li>
-</ul>
-<p>LEGACY mode devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> <code>==</code> LEGACY)
-never support raw streams.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.maxNumOutputProc">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Proc
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum numbers of different types of output streams
-that can be configured and used simultaneously by a camera device
-for any processed (but not-stalling) formats.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 3
-for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>);
->= 2 for LIMITED mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>).<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value contains the max number of output simultaneous
-streams for any processed (but not-stalling) formats.<wbr/></p>
-<p>This lists the upper bound of the number of output streams supported by
-the camera device.<wbr/> Using more streams simultaneously may require more hardware and
-CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
-be any non-<code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
-<p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration.<wbr/>
-Typically:</p>
-<ul>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a></li>
-<li>Implementation-defined formats,<wbr/> i.<wbr/>e.<wbr/> <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#isOutputSupportedFor(Class)">StreamConfigurationMap#isOutputSupportedFor(Class)</a></li>
-</ul>
-<p>For full guarantees,<wbr/> query <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> with a
-processed format -- it will return 0 for a non-stalling stream.<wbr/></p>
-<p>LEGACY devices will support at least 2 processing/<wbr/>non-stalling streams.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.maxNumOutputProcStalling">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Proc<wbr/>Stalling
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum numbers of different types of output streams
-that can be configured and used simultaneously by a camera device
-for any processed (and stalling) formats.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 1</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value contains the max number of output simultaneous
-streams for any processed (but not-stalling) formats.<wbr/></p>
-<p>This lists the upper bound of the number of output streams supported by
-the camera device.<wbr/> Using more streams simultaneously may require more hardware and
-CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
-be any non-<code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
-<p>A processed and stalling format is defined as any non-RAW format with a stallDurations
-> 0.<wbr/> Typically only the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a> is a
-stalling format.<wbr/></p>
-<p>For full guarantees,<wbr/> query <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> with a
-processed format -- it will return a non-0 value for a stalling stream.<wbr/></p>
-<p>LEGACY devices will support up to 1 processing/<wbr/>stalling stream.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.maxNumReprocessStreams">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Reprocess<wbr/>Streams
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 1
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>How many reprocessing streams of any type
-can be allocated at the same time.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only used by HAL2.<wbr/>x.<wbr/></p>
-<p>When set to 0,<wbr/> it means no reprocess stream is supported.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.maxNumInputStreams">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum numbers of any type of input streams
-that can be configured and used simultaneously by a camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>0 or 1.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to 0,<wbr/> it means no input stream is supported.<wbr/></p>
-<p>The image format for a input stream can be any supported format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/> When using an
-input stream,<wbr/> there must be at least one output stream configured to to receive the
-reprocessed images.<wbr/></p>
-<p>When an input stream and some output streams are used in a reprocessing request,<wbr/>
-only the input buffer will be used to produce these output stream buffers,<wbr/> and a
-new sensor image will not be captured.<wbr/></p>
-<p>For example,<wbr/> for Zero Shutter Lag (ZSL) still capture use case,<wbr/> the input
-stream image format will be PRIVATE,<wbr/> the associated output stream image format
-should be JPEG.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For the reprocessing flow and controls,<wbr/> see
-hardware/<wbr/>libhardware/<wbr/>include/<wbr/>hardware/<wbr/>camera3.<wbr/>h Section 10 for more details.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.pipelineMaxDepth">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Specifies the number of maximum pipeline stages a frame
-has to go through from when it's exposed to when it's available
-to the framework.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>A typical minimum value for this is 2 (one stage to expose,<wbr/>
-one stage to readout) from the sensor.<wbr/> The ISP then usually adds
-its own stages to do custom HW processing.<wbr/> Further stages may be
-added by SW processing.<wbr/></p>
-<p>Depending on what settings are used (e.<wbr/>g.<wbr/> YUV,<wbr/> JPEG) and what
-processing is enabled (e.<wbr/>g.<wbr/> face detection),<wbr/> the actual pipeline
-depth (specified by <a href="#dynamic_android.request.pipelineDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Depth</a>) may be less than
-the max pipeline depth.<wbr/></p>
-<p>A pipeline depth of X stages is equivalent to a pipeline latency of
-X frame intervals.<wbr/></p>
-<p>This value will normally be 8 or less,<wbr/> however,<wbr/> for high speed capture session,<wbr/>
-the max pipeline depth will be up to 8 x size of high speed capture request list.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value should be 4 or less,<wbr/> expect for the high speed recording session,<wbr/> where the
-max batch sizes may be larger than 1.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.partialResultCount">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>request.<wbr/>partial<wbr/>Result<wbr/>Count
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Defines how many sub-components
-a result will be composed of.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 1</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>In order to combat the pipeline latency,<wbr/> partial results
-may be delivered to the application layer from the camera device as
-soon as they are available.<wbr/></p>
-<p>Optional; defaults to 1.<wbr/> A value of 1 means that partial
-results are not supported,<wbr/> and only the final TotalCaptureResult will
-be produced by the camera device.<wbr/></p>
-<p>A typical use case for this might be: after requesting an
-auto-focus (AF) lock the new AF state might be available 50%
-of the way through the pipeline.<wbr/> The camera device could
-then immediately dispatch this state via a partial result to
-the application,<wbr/> and the rest of the metadata via later
-partial results.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.availableCapabilities">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>available<wbr/>Capabilities
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">BACKWARD_COMPATIBLE</span>
- <span class="entry_type_enum_notes"><p>The minimal set of capabilities that every camera
-device (regardless of <a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>)
-supports.<wbr/></p>
-<p>This capability is listed by all normal devices,<wbr/> and
-indicates that the camera device has a feature set
-that's comparable to the baseline requirements for the
-older android.<wbr/>hardware.<wbr/>Camera API.<wbr/></p>
-<p>Devices with the DEPTH_<wbr/>OUTPUT capability might not list this
-capability,<wbr/> indicating that they support only depth measurement,<wbr/>
-not standard color output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MANUAL_SENSOR</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device can be manually controlled (3A algorithms such
-as auto-exposure,<wbr/> and auto-focus can be bypassed).<wbr/>
-The camera device supports basic manual control of the sensor image
-acquisition related stages.<wbr/> This means the following controls are
-guaranteed to be supported:</p>
-<ul>
-<li>Manual frame duration control<ul>
-<li><a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a></li>
-<li><a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a></li>
-</ul>
-</li>
-<li>Manual exposure control<ul>
-<li><a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a></li>
-<li><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></li>
-</ul>
-</li>
-<li>Manual sensitivity control<ul>
-<li><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></li>
-<li><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></li>
-</ul>
-</li>
-<li>Manual lens control (if the lens is adjustable)<ul>
-<li>android.<wbr/>lens.<wbr/>*</li>
-</ul>
-</li>
-<li>Manual flash control (if a flash unit is present)<ul>
-<li>android.<wbr/>flash.<wbr/>*</li>
-</ul>
-</li>
-<li>Manual black level locking<ul>
-<li><a href="#controls_android.blackLevel.lock">android.<wbr/>black<wbr/>Level.<wbr/>lock</a></li>
-</ul>
-</li>
-<li>Auto exposure lock<ul>
-<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
-</ul>
-</li>
-</ul>
-<p>If any of the above 3A algorithms are enabled,<wbr/> then the camera
-device will accurately report the values applied by 3A in the
-result.<wbr/></p>
-<p>A given camera device may also support additional manual sensor controls,<wbr/>
-but this capability only covers the above list of controls.<wbr/></p>
-<p>If this is supported,<wbr/> <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> will
-additionally return a min frame duration that is greater than
-zero for each supported size-format combination.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">MANUAL_POST_PROCESSING</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device post-processing stages can be manually controlled.<wbr/>
-The camera device supports basic manual control of the image post-processing
-stages.<wbr/> This means the following controls are guaranteed to be supported:</p>
-<ul>
-<li>
-<p>Manual tonemap control</p>
-<ul>
-<li><a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a></li>
-<li><a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a></li>
-<li><a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></li>
-<li><a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a></li>
-<li><a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a></li>
-</ul>
-</li>
-<li>
-<p>Manual white balance control</p>
-<ul>
-<li><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a></li>
-<li><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a></li>
-</ul>
-</li>
-<li>Manual lens shading map control<ul>
-<li><a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a></li>
-<li><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a></li>
-<li><a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a></li>
-<li><a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a></li>
-</ul>
-</li>
-<li>Manual aberration correction control (if aberration correction is supported)<ul>
-<li><a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a></li>
-<li><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></li>
-</ul>
-</li>
-<li>Auto white balance lock<ul>
-<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
-</ul>
-</li>
-</ul>
-<p>If auto white balance is enabled,<wbr/> then the camera device
-will accurately report the values applied by AWB in the result.<wbr/></p>
-<p>A given camera device may also support additional post-processing
-controls,<wbr/> but this capability only covers the above list of controls.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">RAW</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device supports outputting RAW buffers and
-metadata for interpreting them.<wbr/></p>
-<p>Devices supporting the RAW capability allow both for
-saving DNG files,<wbr/> and for direct application processing of
-raw sensor images.<wbr/></p>
-<ul>
-<li>RAW_<wbr/>SENSOR is supported as an output format.<wbr/></li>
-<li>The maximum available resolution for RAW_<wbr/>SENSOR streams
- will match either the value in
- <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a> or
- <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>.<wbr/></li>
-<li>All DNG-related optional metadata entries are provided
- by the camera device.<wbr/></li>
-</ul></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PRIVATE_REPROCESSING</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device supports the Zero Shutter Lag reprocessing use case.<wbr/></p>
-<ul>
-<li>One input stream is supported,<wbr/> that is,<wbr/> <code><a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> == 1</code>.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> is supported as an output/<wbr/>input format,<wbr/>
- that is,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> is included in the lists of
- formats returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputFormats">StreamConfigurationMap#getOutputFormats</a>.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getValidOutputFormatsForInput">StreamConfigurationMap#getValidOutputFormatsForInput</a>
- returns non empty int[] for each supported input format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/></li>
-<li>Each size returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputSizes">getInputSizes(ImageFormat.<wbr/>PRIVATE)</a> is also included in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">getOutputSizes(ImageFormat.<wbr/>PRIVATE)</a></li>
-<li>Using <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> does not cause a frame rate drop
- relative to the sensor's maximum capture rate (at that resolution).<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> will be reprocessable into both
- <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> and
- <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> formats.<wbr/></li>
-<li>The maximum available resolution for PRIVATE streams
- (both input/<wbr/>output) will match the maximum available
- resolution of JPEG streams.<wbr/></li>
-<li>Static metadata <a href="#static_android.reprocess.maxCaptureStall">android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall</a>.<wbr/></li>
-<li>Only below controls are effective for reprocessing requests and
- will be present in capture results,<wbr/> other controls in reprocess
- requests will be ignored by the camera device.<wbr/><ul>
-<li>android.<wbr/>jpeg.<wbr/>*</li>
-<li><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></li>
-<li><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></li>
-</ul>
-</li>
-<li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> and
- <a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a> will both list ZERO_<wbr/>SHUTTER_<wbr/>LAG as a supported mode.<wbr/></li>
-</ul></span>
- </li>
- <li>
- <span class="entry_type_enum_name">READ_SENSOR_SETTINGS</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device supports accurately reporting the sensor settings for many of
-the sensor controls while the built-in 3A algorithm is running.<wbr/> This allows
-reporting of sensor settings even when these settings cannot be manually changed.<wbr/></p>
-<p>The values reported for the following controls are guaranteed to be available
-in the CaptureResult,<wbr/> including when 3A is enabled:</p>
-<ul>
-<li>Exposure control<ul>
-<li><a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a></li>
-</ul>
-</li>
-<li>Sensitivity control<ul>
-<li><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></li>
-</ul>
-</li>
-<li>Lens controls (if the lens is adjustable)<ul>
-<li><a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a></li>
-<li><a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a></li>
-</ul>
-</li>
-</ul>
-<p>This capability is a subset of the MANUAL_<wbr/>SENSOR control capability,<wbr/> and will
-always be included if the MANUAL_<wbr/>SENSOR capability is available.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BURST_CAPTURE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device supports capturing high-resolution images at >= 20 frames per
-second,<wbr/> in at least the uncompressed YUV format,<wbr/> when post-processing settings are set
-to FAST.<wbr/> Additionally,<wbr/> maximum-resolution images can be captured at >= 10 frames
-per second.<wbr/> Here,<wbr/> 'high resolution' means at least 8 megapixels,<wbr/> or the maximum
-resolution of the device,<wbr/> whichever is smaller.<wbr/></p>
-<p>More specifically,<wbr/> this means that a size matching the camera device's active array
-size is listed as a supported size for the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> format in either <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a> or <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighResolutionOutputSizes">StreamConfigurationMap#getHighResolutionOutputSizes</a>,<wbr/>
-with a minimum frame duration for that format and size of either <= 1/<wbr/>20 s,<wbr/> or
-<= 1/<wbr/>10 s,<wbr/> respectively; and the <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a> entry
-lists at least one FPS range where the minimum FPS is >= 1 /<wbr/> minimumFrameDuration
-for the maximum-size YUV_<wbr/>420_<wbr/>888 format.<wbr/> If that maximum size is listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighResolutionOutputSizes">StreamConfigurationMap#getHighResolutionOutputSizes</a>,<wbr/>
-then the list of resolutions for YUV_<wbr/>420_<wbr/>888 from <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a> contains at
-least one resolution >= 8 megapixels,<wbr/> with a minimum frame duration of <= 1/<wbr/>20
-s.<wbr/></p>
-<p>If the device supports the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">ImageFormat#RAW10</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">ImageFormat#RAW12</a>,<wbr/> then those can also be captured at the same rate
-as the maximum-size YUV_<wbr/>420_<wbr/>888 resolution is.<wbr/></p>
-<p>If the device supports the PRIVATE_<wbr/>REPROCESSING capability,<wbr/> then the same guarantees
-as for the YUV_<wbr/>420_<wbr/>888 format also apply to the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> format.<wbr/></p>
-<p>In addition,<wbr/> the <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> field is guaranted to have a value between 0
-and 4,<wbr/> inclusive.<wbr/> <a href="#static_android.control.aeLockAvailable">android.<wbr/>control.<wbr/>ae<wbr/>Lock<wbr/>Available</a> and <a href="#static_android.control.awbLockAvailable">android.<wbr/>control.<wbr/>awb<wbr/>Lock<wbr/>Available</a>
-are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
-consistent image output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">YUV_REPROCESSING</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device supports the YUV_<wbr/>420_<wbr/>888 reprocessing use case,<wbr/> similar as
-PRIVATE_<wbr/>REPROCESSING,<wbr/> This capability requires the camera device to support the
-following:</p>
-<ul>
-<li>One input stream is supported,<wbr/> that is,<wbr/> <code><a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> == 1</code>.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> is supported as an output/<wbr/>input format,<wbr/> that is,<wbr/>
- YUV_<wbr/>420_<wbr/>888 is included in the lists of formats returned by
- <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a> and
- <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputFormats">StreamConfigurationMap#getOutputFormats</a>.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getValidOutputFormatsForInput">StreamConfigurationMap#getValidOutputFormatsForInput</a>
- returns non-empty int[] for each supported input format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/></li>
-<li>Each size returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputSizes">get<wbr/>Input<wbr/>Sizes(YUV_<wbr/>420_<wbr/>888)</a> is also included in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">get<wbr/>Output<wbr/>Sizes(YUV_<wbr/>420_<wbr/>888)</a></li>
-<li>Using <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> does not cause a frame rate drop
- relative to the sensor's maximum capture rate (at that resolution).<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> will be reprocessable into both
- <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> and <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> formats.<wbr/></li>
-<li>The maximum available resolution for <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> streams (both input/<wbr/>output) will match the
- maximum available resolution of <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> streams.<wbr/></li>
-<li>Static metadata <a href="#static_android.reprocess.maxCaptureStall">android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall</a>.<wbr/></li>
-<li>Only the below controls are effective for reprocessing requests and will be present
- in capture results.<wbr/> The reprocess requests are from the original capture results that
- are associated with the intermediate <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a>
- output buffers.<wbr/> All other controls in the reprocess requests will be ignored by the
- camera device.<wbr/><ul>
-<li>android.<wbr/>jpeg.<wbr/>*</li>
-<li><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></li>
-<li><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></li>
-<li><a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a></li>
-</ul>
-</li>
-<li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> and
- <a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a> will both list ZERO_<wbr/>SHUTTER_<wbr/>LAG as a supported mode.<wbr/></li>
-</ul></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DEPTH_OUTPUT</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The camera device can produce depth measurements from its field of view.<wbr/></p>
-<p>This capability requires the camera device to support the following:</p>
-<ul>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH16">ImageFormat#DEPTH16</a> is supported as an output format.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a> is optionally supported as an
- output format.<wbr/></li>
-<li>This camera device,<wbr/> and all camera devices with the same <a href="#static_android.lens.facing">android.<wbr/>lens.<wbr/>facing</a>,<wbr/>
- will list the following calibration entries in both
- <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a> and
- <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>:<ul>
-<li><a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a></li>
-<li><a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a></li>
-<li><a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a></li>
-<li><a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a></li>
-</ul>
-</li>
-<li>The <a href="#static_android.depth.depthIsExclusive">android.<wbr/>depth.<wbr/>depth<wbr/>Is<wbr/>Exclusive</a> entry is listed by this device.<wbr/></li>
-<li>A LIMITED camera with only the DEPTH_<wbr/>OUTPUT capability does not have to support
- normal YUV_<wbr/>420_<wbr/>888,<wbr/> JPEG,<wbr/> and PRIV-format outputs.<wbr/> It only has to support the DEPTH16
- format.<wbr/></li>
-</ul>
-<p>Generally,<wbr/> depth output operates at a slower frame rate than standard color capture,<wbr/>
-so the DEPTH16 and DEPTH_<wbr/>POINT_<wbr/>CLOUD formats will commonly have a stall duration that
-should be accounted for (see
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>).<wbr/>
-On a device that supports both depth and color-based output,<wbr/> to enable smooth preview,<wbr/>
-using a repeating burst is recommended,<wbr/> where a depth-output target is only included
-once every N frames,<wbr/> where N is the ratio between preview output rate and depth output
-rate,<wbr/> including depth stall time.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CONSTRAINED_HIGH_SPEED_VIDEO</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>The device supports constrained high speed video recording (frame rate >=120fps)
-use case.<wbr/> The camera device will support high speed capture session created by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>,<wbr/> which
-only accepts high speed request lists created by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>.<wbr/></p>
-<p>A camera device can still support high speed video streaming by advertising the high speed
-FPS ranges in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a>.<wbr/> For this case,<wbr/> all normal
-capture request per frame control and synchronization requirements will apply to
-the high speed fps ranges,<wbr/> the same as all other fps ranges.<wbr/> This capability describes
-the capability of a specialized operating mode with many limitations (see below),<wbr/> which
-is only targeted at high speed video recording.<wbr/></p>
-<p>The supported high speed video sizes and fps ranges are specified in
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/>
-To get desired output frame rates,<wbr/> the application is only allowed to select video size
-and FPS range combinations provided by
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/>
-The fps range can be controlled via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
-<p>In this capability,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
-ON,<wbr/> AUTO,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
-controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
-and post-processing parameters is possible.<wbr/> All other controls operate the
-same as when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == AUTO.<wbr/> This means that all other
-android.<wbr/>control.<wbr/>* fields continue to work,<wbr/> such as</p>
-<ul>
-<li><a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a></li>
-<li><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a></li>
-<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
-<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
-<li><a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></li>
-<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
-<li><a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a></li>
-<li><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a></li>
-</ul>
-<p>Outside of android.<wbr/>control.<wbr/>*,<wbr/> the following controls will work:</p>
-<ul>
-<li><a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> (TORCH mode only,<wbr/> automatic flash for still capture will not
-work since aeMode is ON)</li>
-<li><a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> (if it is supported)</li>
-<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
-<li><a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> (if it is supported)</li>
-</ul>
-<p>For high speed recording use case,<wbr/> the actual maximum supported frame rate may
-be lower than what camera can output,<wbr/> depending on the destination Surfaces for
-the image data.<wbr/> For example,<wbr/> if the destination surface is from video encoder,<wbr/>
-the application need check if the video encoder is capable of supporting the
-high frame rate for a given video size,<wbr/> or it will end up with lower recording
-frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the actual preview frame
-rate will be bounded by the screen refresh rate.<wbr/></p>
-<p>The camera device will only support up to 2 high speed simultaneous output surfaces
-(preview and recording surfaces)
-in this mode.<wbr/> Above controls will be effective only if all of below conditions are true:</p>
-<ul>
-<li>The application creates a camera capture session with no more than 2 surfaces via
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>.<wbr/> The
-targeted surfaces must be preview surface (either from
-<a href="https://developer.android.com/reference/android/view/SurfaceView.html">SurfaceView</a> or <a href="https://developer.android.com/reference/android/graphics/SurfaceTexture.html">SurfaceTexture</a>) or
-recording surface(either from <a href="https://developer.android.com/reference/android/media/MediaRecorder.html#getSurface">MediaRecorder#getSurface</a> or
-<a href="https://developer.android.com/reference/android/media/MediaCodec.html#createInputSurface">MediaCodec#createInputSurface</a>).<wbr/></li>
-<li>The stream sizes are selected from the sizes reported by
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/></li>
-<li>The FPS ranges are selected from
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/></li>
-</ul>
-<p>When above conditions are NOT satistied,<wbr/>
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
-will fail.<wbr/></p>
-<p>Switching to a FPS range that has different maximum FPS may trigger some camera device
-reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommended that
-the application avoids unnecessary maximum target FPS changes as much as possible
-during high speed streaming.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of capabilities that this camera device
-advertises as fully supporting.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>A capability is a contract that the camera device makes in order
-to be able to satisfy one or more use cases.<wbr/></p>
-<p>Listing a capability guarantees that the whole set of features
-required to support a common use will all be available.<wbr/></p>
-<p>Using a subset of the functionality provided by an unsupported
-capability may be possible on a specific camera device implementation;
-to do this query each of <a href="#static_android.request.availableRequestKeys">android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys</a>,<wbr/>
-<a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a>,<wbr/>
-<a href="#static_android.request.availableCharacteristicsKeys">android.<wbr/>request.<wbr/>available<wbr/>Characteristics<wbr/>Keys</a>.<wbr/></p>
-<p>The following capabilities are guaranteed to be available on
-<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> <code>==</code> FULL devices:</p>
-<ul>
-<li>MANUAL_<wbr/>SENSOR</li>
-<li>MANUAL_<wbr/>POST_<wbr/>PROCESSING</li>
-</ul>
-<p>Other capabilities may be available on either FULL or LIMITED
-devices,<wbr/> but the application should query this key to be sure.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Additional constraint details per-capability will be available
-in the Compatibility Test Suite.<wbr/></p>
-<p>Minimum baseline requirements required for the
-BACKWARD_<wbr/>COMPATIBLE capability are not explicitly listed.<wbr/>
-Instead refer to "BC" tags and the camera CTS tests in the
-android.<wbr/>hardware.<wbr/>camera2.<wbr/>cts package.<wbr/></p>
-<p>Listed controls that can be either request or result (e.<wbr/>g.<wbr/>
-<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) must be available both in the
-request and the result in order to be considered to be
-capability-compliant.<wbr/></p>
-<p>For example,<wbr/> if the HAL claims to support MANUAL control,<wbr/>
-then exposure time must be configurable via the request <em>and</em>
-the actual exposure applied must be available via
-the result.<wbr/></p>
-<p>If MANUAL_<wbr/>SENSOR is omitted,<wbr/> the HAL may choose to omit the
-<a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a> static property entirely.<wbr/></p>
-<p>For PRIVATE_<wbr/>REPROCESSING and YUV_<wbr/>REPROCESSING capabilities,<wbr/> see
-hardware/<wbr/>libhardware/<wbr/>include/<wbr/>hardware/<wbr/>camera3.<wbr/>h Section 10 for more information.<wbr/></p>
-<p>Devices that support the MANUAL_<wbr/>SENSOR capability must support the
-CAMERA3_<wbr/>TEMPLATE_<wbr/>MANUAL template defined in camera3.<wbr/>h.<wbr/></p>
-<p>Devices that support the PRIVATE_<wbr/>REPROCESSING capability or the
-YUV_<wbr/>REPROCESSING capability must support the
-CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template defined in camera3.<wbr/>h.<wbr/></p>
-<p>For DEPTH_<wbr/>OUTPUT,<wbr/> the depth-format keys
-<a href="#static_android.depth.availableDepthStreamConfigurations">android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stream<wbr/>Configurations</a>,<wbr/>
-<a href="#static_android.depth.availableDepthMinFrameDurations">android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Min<wbr/>Frame<wbr/>Durations</a>,<wbr/>
-<a href="#static_android.depth.availableDepthStallDurations">android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stall<wbr/>Durations</a> must be available,<wbr/> in
-addition to the other keys explicitly mentioned in the DEPTH_<wbr/>OUTPUT
-enum notes.<wbr/> The entry <a href="#static_android.depth.maxDepthSamples">android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples</a> must be available
-if the DEPTH_<wbr/>POINT_<wbr/>CLOUD format is supported (HAL pixel format BLOB,<wbr/> dataspace
-DEPTH).<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.availableRequestKeys">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A list of all keys that the camera device has available
-to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html">CaptureRequest</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Attempting to set a key into a CaptureRequest that is not
-listed here will result in an invalid request and will be rejected
-by the camera device.<wbr/></p>
-<p>This field can be used to query the feature set of a camera device
-at a more granular level than capabilities.<wbr/> This is especially
-important for optional keys that are not listed under any capability
-in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
-extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
-<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
-vendor extensions API and not against this field.<wbr/></p>
-<p>The HAL must not consume any request tags that are not listed either
-here or in the vendor tag list.<wbr/></p>
-<p>The public camera2 API will always make the vendor tags visible
-via
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.availableResultKeys">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A list of all keys that the camera device has available
-to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Attempting to get a key from a CaptureResult that is not
-listed here will always return a <code>null</code> value.<wbr/> Getting a key from
-a CaptureResult that is listed here will generally never return a <code>null</code>
-value.<wbr/></p>
-<p>The following keys may return <code>null</code> unless they are enabled:</p>
-<ul>
-<li><a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> (non-null iff <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON)</li>
-</ul>
-<p>(Those sometimes-null keys will nevertheless be listed here
-if they are available.<wbr/>)</p>
-<p>This field can be used to query the feature set of a camera device
-at a more granular level than capabilities.<wbr/> This is especially
-important for optional keys that are not listed under any capability
-in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Tags listed here must always have an entry in the result metadata,<wbr/>
-even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
-matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
-<p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
-extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
-<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
-vendor extensions API and not against this field.<wbr/></p>
-<p>The HAL must not produce any result tags that are not listed either
-here or in the vendor tag list.<wbr/></p>
-<p>The public camera2 API will always make the vendor tags visible via <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.request.availableCharacteristicsKeys">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>available<wbr/>Characteristics<wbr/>Keys
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A list of all keys that the camera device has available
-to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry follows the same rules as
-<a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a> (except that it applies for
-CameraCharacteristics instead of CaptureResult).<wbr/> See above for more
-details.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Keys listed here must always have an entry in the static info metadata,<wbr/>
-even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
-matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
-<p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
-extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
-<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
-vendor extensions API and not against this field.<wbr/></p>
-<p>The HAL must not have any tags in its static info that are not listed
-either here or in the vendor tag list.<wbr/></p>
-<p>The public camera2 API will always make the vendor tags visible
-via <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getKeys">CameraCharacteristics#getKeys</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.request.frameCount">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>request.<wbr/>frame<wbr/>Count
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A frame counter set by the framework.<wbr/> This value monotonically
-increases with every new result (that is,<wbr/> each new result has a unique
-frameCount value).<wbr/></p>
- </td>
-
- <td class="entry_units">
- count of frames
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>> 0</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Reset on release()</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.request.id">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>request.<wbr/>id
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>An application-specified ID for the current
-request.<wbr/> Must be maintained unchanged in output
-frame</p>
- </td>
-
- <td class="entry_units">
- arbitrary integer assigned by application
- </td>
-
- <td class="entry_range">
- <p>Any int</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.request.metadataMode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>request.<wbr/>metadata<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">NONE</span>
- <span class="entry_type_enum_notes"><p>No metadata should be produced on output,<wbr/> except
-for application-bound buffer data.<wbr/> If no
-application-bound streams exist,<wbr/> no frame should be
-placed in the output frame queue.<wbr/> If such streams
-exist,<wbr/> a frame should be placed on the output queue
-with null metadata but with the necessary output buffer
-information.<wbr/> Timestamp information should still be
-included with any output stream buffers</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FULL</span>
- <span class="entry_type_enum_notes"><p>All metadata should be produced.<wbr/> Statistics will
-only be produced if they are separately
-enabled</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>How much metadata to produce on
-output</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.request.outputStreams">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>request.<wbr/>output<wbr/>Streams
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Lists which camera output streams image data
-from this capture must be sent to</p>
- </td>
-
- <td class="entry_units">
- List of camera stream IDs
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>List must only include streams that have been
-created</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If no output streams are listed,<wbr/> then the image
-data should simply be discarded.<wbr/> The image data must
-still be captured for metadata and statistics production,<wbr/>
-and the lens and flash must operate as requested.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.request.pipelineDepth">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>request.<wbr/>pipeline<wbr/>Depth
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Specifies the number of pipeline stages the frame went
-through from when it was exposed to when the final completed result
-was available to the framework.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><= <a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Depending on what settings are used in the request,<wbr/> and
-what streams are configured,<wbr/> the data may undergo less processing,<wbr/>
-and some pipeline stages skipped.<wbr/></p>
-<p>See <a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a> for more details.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value must always represent the accurate count of how many
-pipeline stages were actually used.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_scaler" class="section">scaler</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.scaler.cropRegion">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>crop<wbr/>Region
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as rectangle]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired region of the sensor to read out for this capture.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates relative to
- android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control can be used to implement digital zoom.<wbr/></p>
-<p>The crop region coordinate system is based off
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with <code>(0,<wbr/> 0)</code> being the
-top-left corner of the sensor active array.<wbr/></p>
-<p>Output streams use this rectangle to produce their output,<wbr/>
-cropping to a smaller region if necessary to maintain the
-stream's aspect ratio,<wbr/> then scaling the sensor input to
-match the output's configured resolution.<wbr/></p>
-<p>The crop region is applied after the RAW to other color
-space (e.<wbr/>g.<wbr/> YUV) conversion.<wbr/> Since raw streams
-(e.<wbr/>g.<wbr/> RAW16) don't have the conversion stage,<wbr/> they are not
-croppable.<wbr/> The crop region will be ignored by raw streams.<wbr/></p>
-<p>For non-raw streams,<wbr/> any additional per-stream cropping will
-be done to maximize the final pixel area of the stream.<wbr/></p>
-<p>For example,<wbr/> if the crop region is set to a 4:3 aspect
-ratio,<wbr/> then 4:3 streams will use the exact crop
-region.<wbr/> 16:9 streams will further crop vertically
-(letterbox).<wbr/></p>
-<p>Conversely,<wbr/> if the crop region is set to a 16:9,<wbr/> then 4:3
-outputs will crop horizontally (pillarbox),<wbr/> and 16:9
-streams will match exactly.<wbr/> These additional crops will
-be centered within the crop region.<wbr/></p>
-<p>The width and height of the crop region cannot
-be set to be smaller than
-<code>floor( activeArraySize.<wbr/>width /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code> and
-<code>floor( activeArraySize.<wbr/>height /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code>,<wbr/> respectively.<wbr/></p>
-<p>The camera device may adjust the crop region to account
-for rounding and other hardware requirements; the final
-crop region used will be included in the output capture
-result.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The output streams must maintain square pixels at all
-times,<wbr/> no matter what the relative aspect ratios of the
-crop region and the stream are.<wbr/> Negative values for
-corner are allowed for raw output if full pixel array is
-larger than active pixel array.<wbr/> Width and height may be
-rounded to nearest larger supportable width,<wbr/> especially
-for raw output,<wbr/> where only a few fixed scales may be
-possible.<wbr/></p>
-<p>For a set of output streams configured,<wbr/> if the sensor output is cropped to a smaller
-size than active array size,<wbr/> the HAL need follow below cropping rules:</p>
-<ul>
-<li>
-<p>The HAL need handle the cropRegion as if the sensor crop size is the effective active
-array size.<wbr/>More specifically,<wbr/> the HAL must transform the request cropRegion from
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> to the sensor cropped pixel area size in this way:</p>
-<ol>
-<li>Translate the requested cropRegion w.<wbr/>r.<wbr/>t.,<wbr/> the left top corner of the sensor
-cropped pixel area by (tx,<wbr/> ty),<wbr/>
-where <code>tx = sensorCrop.<wbr/>top * (sensorCrop.<wbr/>height /<wbr/> activeArraySize.<wbr/>height)</code>
-and <code>tx = sensorCrop.<wbr/>left * (sensorCrop.<wbr/>width /<wbr/> activeArraySize.<wbr/>width)</code>.<wbr/> The
-(sensorCrop.<wbr/>top,<wbr/> sensorCrop.<wbr/>left) is the coordinate based off the
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></li>
-<li>Scale the width and height of requested cropRegion with scaling factor of
-sensor<wbr/>Crop.<wbr/>width/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>width and sensor<wbr/>Crop.<wbr/>height/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>height
-respectively.<wbr/>
-Once this new cropRegion is calculated,<wbr/> the HAL must use this region to crop the image
-with regard to the sensor crop size (effective active array size).<wbr/> The HAL still need
-follow the general cropping rule for this new cropRegion and effective active
-array size.<wbr/></li>
-</ol>
-</li>
-<li>
-<p>The HAL must report the cropRegion with regard to <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>
-The HAL need convert the new cropRegion generated above w.<wbr/>r.<wbr/>t.,<wbr/> full active array size.<wbr/>
-The reported cropRegion may be slightly different with the requested cropRegion since
-the HAL may adjust the crop region to account for rounding,<wbr/> conversion error,<wbr/> or other
-hardware limitations.<wbr/></p>
-</li>
-</ul>
-<p>HAL2.<wbr/>x uses only (x,<wbr/> y,<wbr/> width)</p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.scaler.availableFormats">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>available<wbr/>Formats
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [hidden as imageFormat]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">RAW16</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_value">0x20</span>
- <span class="entry_type_enum_notes"><p>RAW16 is a standard,<wbr/> cross-platform format for raw image
-buffers with 16-bit pixels.<wbr/></p>
-<p>Buffers of this format are typically expected to have a
-Bayer Color Filter Array (CFA) layout,<wbr/> which is given in
-<a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>.<wbr/> Sensors with
-CFAs that are not representable by a format in
-<a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a> should not
-use this format.<wbr/></p>
-<p>Buffers of this format will also follow the constraints given for
-RAW_<wbr/>OPAQUE buffers,<wbr/> but with relaxed performance constraints.<wbr/></p>
-<p>This format is intended to give users access to the full contents
-of the buffers coming directly from the image sensor prior to any
-cropping or scaling operations,<wbr/> and all coordinate systems for
-metadata used for this format are relative to the size of the
-active region of the image sensor before any geometric distortion
-correction has been applied (i.<wbr/>e.<wbr/>
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>).<wbr/> Supported
-dimensions for this format are limited to the full dimensions of
-the sensor (e.<wbr/>g.<wbr/> either <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a> or
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> will be the
-only supported output size).<wbr/></p>
-<p>See <a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a> for
-the full set of performance guarantees.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">RAW_OPAQUE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_value">0x24</span>
- <span class="entry_type_enum_notes"><p>RAW_<wbr/>OPAQUE (or
-<a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_PRIVATE">RAW_<wbr/>PRIVATE</a>
-as referred in public API) is a format for raw image buffers
-coming from an image sensor.<wbr/></p>
-<p>The actual structure of buffers of this format is
-platform-specific,<wbr/> but must follow several constraints:</p>
-<ol>
-<li>No image post-processing operations may have been applied to
-buffers of this type.<wbr/> These buffers contain raw image data coming
-directly from the image sensor.<wbr/></li>
-<li>If a buffer of this format is passed to the camera device for
-reprocessing,<wbr/> the resulting images will be identical to the images
-produced if the buffer had come directly from the sensor and was
-processed with the same settings.<wbr/></li>
-</ol>
-<p>The intended use for this format is to allow access to the native
-raw format buffers coming directly from the camera sensor without
-any additional conversions or decrease in framerate.<wbr/></p>
-<p>See <a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a> for the full set of
-performance guarantees.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">YV12</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_value">0x32315659</span>
- <span class="entry_type_enum_notes"><p>YCrCb 4:2:0 Planar</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">YCrCb_420_SP</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_value">0x11</span>
- <span class="entry_type_enum_notes"><p>NV21</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">IMPLEMENTATION_DEFINED</span>
- <span class="entry_type_enum_value">0x22</span>
- <span class="entry_type_enum_notes"><p>System internal format,<wbr/> not application-accessible</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">YCbCr_420_888</span>
- <span class="entry_type_enum_value">0x23</span>
- <span class="entry_type_enum_notes"><p>Flexible YUV420 Format</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">BLOB</span>
- <span class="entry_type_enum_value">0x21</span>
- <span class="entry_type_enum_notes"><p>JPEG format</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The list of image formats that are supported by this
-camera device for output streams.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All camera devices will support JPEG and YUV_<wbr/>420_<wbr/>888 formats.<wbr/></p>
-<p>When set to YUV_<wbr/>420_<wbr/>888,<wbr/> application can access the YUV420 data directly.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>These format values are from HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>* in
-system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h.<wbr/></p>
-<p>When IMPLEMENTATION_<wbr/>DEFINED is used,<wbr/> the platform
-gralloc module will select a format based on the usage flags provided
-by the camera HAL device and the other endpoint of the stream.<wbr/> It is
-usually used by preview and recording streams,<wbr/> where the application doesn't
-need access the image data.<wbr/></p>
-<p>YCb<wbr/>Cr_<wbr/>420_<wbr/>888 format must be supported by the HAL.<wbr/> When an image stream
-needs CPU/<wbr/>application direct access,<wbr/> this format will be used.<wbr/></p>
-<p>The BLOB format must be supported by the HAL.<wbr/> This is used for the JPEG stream.<wbr/></p>
-<p>A RAW_<wbr/>OPAQUE buffer should contain only pixel data.<wbr/> It is strongly
-recommended that any information used by the camera device when
-processing images is fully expressed by the result metadata
-for that image buffer.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableJpegMinDurations">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Min<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The minimum frame duration that is supported
-for each resolution in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>TODO: Remove property.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This corresponds to the minimum steady-state frame duration when only
-that JPEG stream is active and captured in a burst,<wbr/> with all
-processing (typically in android.<wbr/>*.<wbr/>mode) set to FAST.<wbr/></p>
-<p>When multiple streams are configured,<wbr/> the minimum
-frame duration will be >= max(individual stream min
-durations)</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableJpegSizes">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [hidden as size]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The JPEG resolutions that are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- <p>TODO: Remove property.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The resolutions are listed as <code>(width,<wbr/> height)</code> pairs.<wbr/> All camera devices will support
-sensor maximum resolution (defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>).<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must include sensor maximum resolution
-(defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>),<wbr/>
-and should include half/<wbr/>quarter of sensor maximum resolution.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableMaxDigitalZoom">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum ratio between both active area width
-and crop region width,<wbr/> and active area height and
-crop region height,<wbr/> for <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Zoom scale factor
- </td>
-
- <td class="entry_range">
- <p>>=1</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This represents the maximum amount of zooming possible by
-the camera device,<wbr/> or equivalently,<wbr/> the minimum cropping
-window size.<wbr/></p>
-<p>Crop regions that have a width or height that is smaller
-than this ratio allows will be rounded up to the minimum
-allowed size by the camera device.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableProcessedMinDurations">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Min<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>For each available processed output size (defined in
-<a href="#static_android.scaler.availableProcessedSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Sizes</a>),<wbr/> this property lists the
-minimum supportable frame duration for that size.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This should correspond to the frame duration when only that processed
-stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
-set to FAST.<wbr/></p>
-<p>When multiple streams are configured,<wbr/> the minimum frame duration will
-be >= max(individual stream min durations).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableProcessedSizes">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Sizes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [hidden as size]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The resolutions available for use with
-processed output streams,<wbr/> such as YV12,<wbr/> NV12,<wbr/> and
-platform opaque YUV/<wbr/>RGB streams to the GPU or video
-encoders.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The resolutions are listed as <code>(width,<wbr/> height)</code> pairs.<wbr/></p>
-<p>For a given use case,<wbr/> the actual maximum supported resolution
-may be lower than what is listed here,<wbr/> depending on the destination
-Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
-the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
-smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
-can provide.<wbr/></p>
-<p>Please reference the documentation for the image data destination to
-check if it limits the maximum size for image data.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
-the HAL must include all JPEG sizes listed in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a>
-and each below resolution if it is smaller than or equal to the sensor
-maximum resolution (if they are not listed in JPEG sizes already):</p>
-<ul>
-<li>240p (320 x 240)</li>
-<li>480p (640 x 480)</li>
-<li>720p (1280 x 720)</li>
-<li>1080p (1920 x 1080)</li>
-</ul>
-<p>For LIMITED capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
-the HAL only has to list up to the maximum video size supported by the devices.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableRawMinDurations">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Min<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>For each available raw output size (defined in
-<a href="#static_android.scaler.availableRawSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Sizes</a>),<wbr/> this property lists the minimum
-supportable frame duration for that size.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Should correspond to the frame duration when only the raw stream is
-active.<wbr/></p>
-<p>When multiple streams are configured,<wbr/> the minimum
-frame duration will be >= max(individual stream min
-durations)</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableRawSizes">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="1">
- android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Sizes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [system as size]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The resolutions available for use with raw
-sensor output streams,<wbr/> listed as width,<wbr/>
-height</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableInputOutputFormatsMap">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [hidden as reprocessFormatsMap]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The mapping of image formats that are supported by this
-camera device for input streams,<wbr/> to their corresponding output formats.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All camera devices with at least 1
-<a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> will have at least one
-available input format.<wbr/></p>
-<p>The camera device will support the following map of formats,<wbr/>
-if its dependent capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>) is supported:</p>
-<table>
-<thead>
-<tr>
-<th align="left">Input Format</th>
-<th align="left">Output Format</th>
-<th align="left">Capability</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a></td>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="left">PRIVATE_<wbr/>REPROCESSING</td>
-</tr>
-<tr>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a></td>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
-<td align="left">PRIVATE_<wbr/>REPROCESSING</td>
-</tr>
-<tr>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="left">YUV_<wbr/>REPROCESSING</td>
-</tr>
-<tr>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
-<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
-<td align="left">YUV_<wbr/>REPROCESSING</td>
-</tr>
-</tbody>
-</table>
-<p>PRIVATE refers to a device-internal format that is not directly application-visible.<wbr/> A
-PRIVATE input surface can be acquired by <a href="https://developer.android.com/reference/android/media/ImageReader.html#newInstance">ImageReader#newInstance</a>
-with <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> as the format.<wbr/></p>
-<p>For a PRIVATE_<wbr/>REPROCESSING-capable camera device,<wbr/> using the PRIVATE format as either input
-or output will never hurt maximum frame rate (i.<wbr/>e.<wbr/> <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">getOutputStallDuration(ImageFormat.<wbr/>PRIVATE,<wbr/> size)</a> is always 0),<wbr/></p>
-<p>Attempting to configure an input stream with output streams not
-listed as available in this map is not valid.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For the formats,<wbr/> see <code>system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h</code> for a definition
-of the image format enumerations.<wbr/> The PRIVATE format refers to the
-HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>IMPLEMENTATION_<wbr/>DEFINED format.<wbr/> The HAL could determine
-the actual format by using the gralloc usage flags.<wbr/>
-For ZSL use case in particular,<wbr/> the HAL could choose appropriate format (partially
-processed YUV or RAW based format) by checking the format and GRALLOC_<wbr/>USAGE_<wbr/>HW_<wbr/>CAMERA_<wbr/>ZSL.<wbr/>
-See camera3.<wbr/>h for more details.<wbr/></p>
-<p>This value is encoded as a variable-size array-of-arrays.<wbr/>
-The inner array always contains <code>[format,<wbr/> length,<wbr/> ...<wbr/>]</code> where
-<code>...<wbr/></code> has <code>length</code> elements.<wbr/> An inner array is followed by another
-inner array if the total metadata entry size hasn't yet been exceeded.<wbr/></p>
-<p>A code sample to read/<wbr/>write this encoding (with a device that
-supports reprocessing IMPLEMENTATION_<wbr/>DEFINED to YUV_<wbr/>420_<wbr/>888,<wbr/> and JPEG,<wbr/>
-and reprocessing YUV_<wbr/>420_<wbr/>888 to YUV_<wbr/>420_<wbr/>888 and JPEG):</p>
-<pre><code>//<wbr/> reading
-int32_<wbr/>t* contents = &entry.<wbr/>i32[0];
-for (size_<wbr/>t i = 0; i < entry.<wbr/>count; ) {
- int32_<wbr/>t format = contents[i++];
- int32_<wbr/>t length = contents[i++];
- int32_<wbr/>t output_<wbr/>formats[length];
- memcpy(&output_<wbr/>formats[0],<wbr/> &contents[i],<wbr/>
- length * sizeof(int32_<wbr/>t));
- i += length;
-}
-
-//<wbr/> writing (static example,<wbr/> PRIVATE_<wbr/>REPROCESSING + YUV_<wbr/>REPROCESSING)
-int32_<wbr/>t[] contents = {
- IMPLEMENTATION_<wbr/>DEFINED,<wbr/> 2,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> BLOB,<wbr/>
- YUV_<wbr/>420_<wbr/>888,<wbr/> 2,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> BLOB,<wbr/>
-};
-update_<wbr/>camera_<wbr/>metadata_<wbr/>entry(metadata,<wbr/> index,<wbr/> &contents[0],<wbr/>
- sizeof(contents)/<wbr/>sizeof(contents[0]),<wbr/> &updated_<wbr/>entry);
-</code></pre>
-<p>If the HAL claims to support any of the capabilities listed in the
-above details,<wbr/> then it must also support all the input-output
-combinations listed for that capability.<wbr/> It can optionally support
-additional formats if it so chooses.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableStreamConfigurations">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 4
- </span>
- <span class="entry_type_visibility"> [ndk_public as streamConfiguration]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OUTPUT</span>
- </li>
- <li>
- <span class="entry_type_enum_name">INPUT</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The available stream configurations that this
-camera device supports
-(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The configurations are listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code>
-tuples.<wbr/></p>
-<p>For a given use case,<wbr/> the actual maximum supported resolution
-may be lower than what is listed here,<wbr/> depending on the destination
-Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
-the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
-smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
-can provide.<wbr/></p>
-<p>Please reference the documentation for the image data destination to
-check if it limits the maximum size for image data.<wbr/></p>
-<p>Not all output formats may be supported in a configuration with
-an input stream of a particular format.<wbr/> For more details,<wbr/> see
-<a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a>.<wbr/></p>
-<p>The following table describes the minimum required output stream
-configurations based on the hardware level
-(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>):</p>
-<table>
-<thead>
-<tr>
-<th align="center">Format</th>
-<th align="center">Size</th>
-<th align="center">Hardware Level</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center">JPEG</td>
-<td align="center"><a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></td>
-<td align="center">Any</td>
-<td align="center"></td>
-</tr>
-<tr>
-<td align="center">JPEG</td>
-<td align="center">1920x1080 (1080p)</td>
-<td align="center">Any</td>
-<td align="center">if 1080p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center">JPEG</td>
-<td align="center">1280x720 (720)</td>
-<td align="center">Any</td>
-<td align="center">if 720p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center">JPEG</td>
-<td align="center">640x480 (480p)</td>
-<td align="center">Any</td>
-<td align="center">if 480p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center">JPEG</td>
-<td align="center">320x240 (240p)</td>
-<td align="center">Any</td>
-<td align="center">if 240p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center">YUV_<wbr/>420_<wbr/>888</td>
-<td align="center">all output sizes available for JPEG</td>
-<td align="center">FULL</td>
-<td align="center"></td>
-</tr>
-<tr>
-<td align="center">YUV_<wbr/>420_<wbr/>888</td>
-<td align="center">all output sizes available for JPEG,<wbr/> up to the maximum video size</td>
-<td align="center">LIMITED</td>
-<td align="center"></td>
-</tr>
-<tr>
-<td align="center">IMPLEMENTATION_<wbr/>DEFINED</td>
-<td align="center">same as YUV_<wbr/>420_<wbr/>888</td>
-<td align="center">Any</td>
-<td align="center"></td>
-</tr>
-</tbody>
-</table>
-<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> for additional
-mandatory stream configurations on a per-capability basis.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>It is recommended (but not mandatory) to also include half/<wbr/>quarter
-of sensor maximum resolution for JPEG formats (regardless of hardware
-level).<wbr/></p>
-<p>(The following is a rewording of the above required table):</p>
-<p>For JPEG format,<wbr/> the sizes may be restricted by below conditions:</p>
-<ul>
-<li>The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
-(e.<wbr/>g.<wbr/> 4:3,<wbr/> 16:9,<wbr/> 3:2 etc.<wbr/>).<wbr/> If the sensor maximum resolution
-(defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) has an aspect ratio other than these,<wbr/>
-it does not have to be included in the supported JPEG sizes.<wbr/></li>
-<li>Some hardware JPEG encoders may have pixel boundary alignment requirements,<wbr/> such as
-the dimensions being a multiple of 16.<wbr/></li>
-</ul>
-<p>Therefore,<wbr/> the maximum JPEG size may be smaller than sensor maximum resolution.<wbr/>
-However,<wbr/> the largest JPEG size must be as close as possible to the sensor maximum
-resolution given above constraints.<wbr/> It is required that after aspect ratio adjustments,<wbr/>
-additional size reduction due to other issues must be less than 3% in area.<wbr/> For example,<wbr/>
-if the sensor maximum resolution is 3280x2464,<wbr/> if the maximum JPEG size has aspect
-ratio 4:3,<wbr/> the JPEG encoder alignment requirement is 16,<wbr/> the maximum JPEG size will be
-3264x2448.<wbr/></p>
-<p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
-the HAL must include all YUV_<wbr/>420_<wbr/>888 sizes that have JPEG sizes listed
-here as output streams.<wbr/></p>
-<p>It must also include each below resolution if it is smaller than or
-equal to the sensor maximum resolution (for both YUV_<wbr/>420_<wbr/>888 and JPEG
-formats),<wbr/> as output streams:</p>
-<ul>
-<li>240p (320 x 240)</li>
-<li>480p (640 x 480)</li>
-<li>720p (1280 x 720)</li>
-<li>1080p (1920 x 1080)</li>
-</ul>
-<p>For LIMITED capability devices
-(<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
-the HAL only has to list up to the maximum video size
-supported by the device.<wbr/></p>
-<p>Regardless of hardware level,<wbr/> every output resolution available for
-YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.<wbr/></p>
-<p>This supercedes the following fields,<wbr/> which are now deprecated:</p>
-<ul>
-<li>availableFormats</li>
-<li>available[Processed,<wbr/>Raw,<wbr/>Jpeg]Sizes</li>
-</ul>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableMinFrameDurations">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4 x n
- </span>
- <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>This lists the minimum frame duration for each
-format/<wbr/>size combination.<wbr/></p>
- </td>
-
- <td class="entry_units">
- (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This should correspond to the frame duration when only that
-stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
-set to either OFF or FAST.<wbr/></p>
-<p>When multiple streams are used in a request,<wbr/> the minimum frame
-duration will be max(individual stream min durations).<wbr/></p>
-<p>The minimum frame duration of a stream (of a particular format,<wbr/> size)
-is the same regardless of whether the stream is input or output.<wbr/></p>
-<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
-<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
-calculating the max frame rate.<wbr/></p>
-<p>(Keep in sync with
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>)</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.availableStallDurations">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4 x n
- </span>
- <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>This lists the maximum stall duration for each
-output format/<wbr/>size combination.<wbr/></p>
- </td>
-
- <td class="entry_units">
- (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>A stall duration is how much extra time would get added
-to the normal minimum frame duration for a repeating request
-that has streams with non-zero stall.<wbr/></p>
-<p>For example,<wbr/> consider JPEG captures which have the following
-characteristics:</p>
-<ul>
-<li>JPEG streams act like processed YUV streams in requests for which
-they are not included; in requests in which they are directly
-referenced,<wbr/> they act as JPEG streams.<wbr/> This is because supporting a
-JPEG stream requires the underlying YUV data to always be ready for
-use by a JPEG encoder,<wbr/> but the encoder will only be used (and impact
-frame duration) on requests that actually reference a JPEG stream.<wbr/></li>
-<li>The JPEG processor can run concurrently to the rest of the camera
-pipeline,<wbr/> but cannot process more than 1 capture at a time.<wbr/></li>
-</ul>
-<p>In other words,<wbr/> using a repeating YUV request would result
-in a steady frame rate (let's say it's 30 FPS).<wbr/> If a single
-JPEG request is submitted periodically,<wbr/> the frame rate will stay
-at 30 FPS (as long as we wait for the previous JPEG to return each
-time).<wbr/> If we try to submit a repeating YUV + JPEG request,<wbr/> then
-the frame rate will drop from 30 FPS.<wbr/></p>
-<p>In general,<wbr/> submitting a new request with a non-0 stall time
-stream will <em>not</em> cause a frame rate drop unless there are still
-outstanding buffers for that stream from previous requests.<wbr/></p>
-<p>Submitting a repeating request with streams (call this <code>S</code>)
-is the same as setting the minimum frame duration from
-the normal minimum frame duration corresponding to <code>S</code>,<wbr/> added with
-the maximum stall duration for <code>S</code>.<wbr/></p>
-<p>If interleaving requests with and without a stall duration,<wbr/>
-a request will stall by the maximum of the remaining times
-for each can-stall stream with outstanding buffers.<wbr/></p>
-<p>This means that a stalling request will not have an exposure start
-until the stall has completed.<wbr/></p>
-<p>This should correspond to the stall duration when only that stream is
-active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode) set to FAST
-or OFF.<wbr/> Setting any of the processing modes to HIGH_<wbr/>QUALITY
-effectively results in an indeterminate stall duration for all
-streams in a request (the regular stall calculation rules are
-ignored).<wbr/></p>
-<p>The following formats may always have a stall duration:</p>
-<ul>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">ImageFormat#RAW_<wbr/>SENSOR</a></li>
-</ul>
-<p>The following formats will never have a stall duration:</p>
-<ul>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">ImageFormat#RAW10</a></li>
-</ul>
-<p>All other formats may or may not have an allowed stall duration on
-a per-capability basis; refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
-for more details.<wbr/></p>
-<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> for more information about
-calculating the max frame rate (absent stalls).<wbr/></p>
-<p>(Keep up to date with
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> )</p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If possible,<wbr/> it is recommended that all non-JPEG formats
-(such as RAW16) should not have a stall duration.<wbr/> RAW10,<wbr/> RAW12,<wbr/> RAW_<wbr/>OPAQUE
-and IMPLEMENTATION_<wbr/>DEFINED must not have stall durations.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.streamConfigurationMap">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public as streamConfigurationMap]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The available stream configurations that this
-camera device supports; also includes the minimum frame durations
-and the stall durations for each format/<wbr/>size combination.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All camera devices will support sensor maximum resolution (defined by
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) for the JPEG format.<wbr/></p>
-<p>For a given use case,<wbr/> the actual maximum supported resolution
-may be lower than what is listed here,<wbr/> depending on the destination
-Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
-the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
-smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
-can provide.<wbr/></p>
-<p>Please reference the documentation for the image data destination to
-check if it limits the maximum size for image data.<wbr/></p>
-<p>The following table describes the minimum required output stream
-configurations based on the hardware level
-(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>):</p>
-<table>
-<thead>
-<tr>
-<th align="center">Format</th>
-<th align="center">Size</th>
-<th align="center">Hardware Level</th>
-<th align="center">Notes</th>
-</tr>
-</thead>
-<tbody>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="center"><a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> (*1)</td>
-<td align="center">Any</td>
-<td align="center"></td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="center">1920x1080 (1080p)</td>
-<td align="center">Any</td>
-<td align="center">if 1080p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="center">1280x720 (720p)</td>
-<td align="center">Any</td>
-<td align="center">if 720p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="center">640x480 (480p)</td>
-<td align="center">Any</td>
-<td align="center">if 480p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
-<td align="center">320x240 (240p)</td>
-<td align="center">Any</td>
-<td align="center">if 240p <= activeArraySize</td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
-<td align="center">all output sizes available for JPEG</td>
-<td align="center">FULL</td>
-<td align="center"></td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
-<td align="center">all output sizes available for JPEG,<wbr/> up to the maximum video size</td>
-<td align="center">LIMITED</td>
-<td align="center"></td>
-</tr>
-<tr>
-<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a></td>
-<td align="center">same as YUV_<wbr/>420_<wbr/>888</td>
-<td align="center">Any</td>
-<td align="center"></td>
-</tr>
-</tbody>
-</table>
-<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for additional mandatory
-stream configurations on a per-capability basis.<wbr/></p>
-<p>*1: For JPEG format,<wbr/> the sizes may be restricted by below conditions:</p>
-<ul>
-<li>The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
-(e.<wbr/>g.<wbr/> 4:3,<wbr/> 16:9,<wbr/> 3:2 etc.<wbr/>).<wbr/> If the sensor maximum resolution
-(defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) has an aspect ratio other than these,<wbr/>
-it does not have to be included in the supported JPEG sizes.<wbr/></li>
-<li>Some hardware JPEG encoders may have pixel boundary alignment requirements,<wbr/> such as
-the dimensions being a multiple of 16.<wbr/>
-Therefore,<wbr/> the maximum JPEG size may be smaller than sensor maximum resolution.<wbr/>
-However,<wbr/> the largest JPEG size will be as close as possible to the sensor maximum
-resolution given above constraints.<wbr/> It is required that after aspect ratio adjustments,<wbr/>
-additional size reduction due to other issues must be less than 3% in area.<wbr/> For example,<wbr/>
-if the sensor maximum resolution is 3280x2464,<wbr/> if the maximum JPEG size has aspect
-ratio 4:3,<wbr/> and the JPEG encoder alignment requirement is 16,<wbr/> the maximum JPEG size will be
-3264x2448.<wbr/></li>
-</ul>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Do not set this property directly
-(it is synthetic and will not be available at the HAL layer);
-set the <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> instead.<wbr/></p>
-<p>Not all output formats may be supported in a configuration with
-an input stream of a particular format.<wbr/> For more details,<wbr/> see
-<a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a>.<wbr/></p>
-<p>It is recommended (but not mandatory) to also include half/<wbr/>quarter
-of sensor maximum resolution for JPEG formats (regardless of hardware
-level).<wbr/></p>
-<p>(The following is a rewording of the above required table):</p>
-<p>The HAL must include sensor maximum resolution (defined by
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>).<wbr/></p>
-<p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
-the HAL must include all YUV_<wbr/>420_<wbr/>888 sizes that have JPEG sizes listed
-here as output streams.<wbr/></p>
-<p>It must also include each below resolution if it is smaller than or
-equal to the sensor maximum resolution (for both YUV_<wbr/>420_<wbr/>888 and JPEG
-formats),<wbr/> as output streams:</p>
-<ul>
-<li>240p (320 x 240)</li>
-<li>480p (640 x 480)</li>
-<li>720p (1280 x 720)</li>
-<li>1080p (1920 x 1080)</li>
-</ul>
-<p>For LIMITED capability devices
-(<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
-the HAL only has to list up to the maximum video size
-supported by the device.<wbr/></p>
-<p>Regardless of hardware level,<wbr/> every output resolution available for
-YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.<wbr/></p>
-<p>This supercedes the following fields,<wbr/> which are now deprecated:</p>
-<ul>
-<li>availableFormats</li>
-<li>available[Processed,<wbr/>Raw,<wbr/>Jpeg]Sizes</li>
-</ul>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.scaler.croppingType">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>scaler.<wbr/>cropping<wbr/>Type
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CENTER_ONLY</span>
- <span class="entry_type_enum_notes"><p>The camera device only supports centered crop regions.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FREEFORM</span>
- <span class="entry_type_enum_notes"><p>The camera device supports arbitrarily chosen crop regions.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The crop type that this camera device supports.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When passing a non-centered crop region (<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>) to a camera
-device that only supports CENTER_<wbr/>ONLY cropping,<wbr/> the camera device will move the
-crop region to the center of the sensor active array (<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>)
-and keep the crop region width and height unchanged.<wbr/> The camera device will return the
-final used crop region in metadata result <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>.<wbr/></p>
-<p>Camera devices that support FREEFORM cropping will support any crop region that
-is inside of the active array.<wbr/> The camera device will apply the same crop region and
-return the final used crop region in capture result metadata <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>.<wbr/></p>
-<p>LEGACY capability devices will only support CENTER_<wbr/>ONLY cropping.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.scaler.cropRegion">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>scaler.<wbr/>crop<wbr/>Region
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as rectangle]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The desired region of the sensor to read out for this capture.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates relative to
- android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This control can be used to implement digital zoom.<wbr/></p>
-<p>The crop region coordinate system is based off
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with <code>(0,<wbr/> 0)</code> being the
-top-left corner of the sensor active array.<wbr/></p>
-<p>Output streams use this rectangle to produce their output,<wbr/>
-cropping to a smaller region if necessary to maintain the
-stream's aspect ratio,<wbr/> then scaling the sensor input to
-match the output's configured resolution.<wbr/></p>
-<p>The crop region is applied after the RAW to other color
-space (e.<wbr/>g.<wbr/> YUV) conversion.<wbr/> Since raw streams
-(e.<wbr/>g.<wbr/> RAW16) don't have the conversion stage,<wbr/> they are not
-croppable.<wbr/> The crop region will be ignored by raw streams.<wbr/></p>
-<p>For non-raw streams,<wbr/> any additional per-stream cropping will
-be done to maximize the final pixel area of the stream.<wbr/></p>
-<p>For example,<wbr/> if the crop region is set to a 4:3 aspect
-ratio,<wbr/> then 4:3 streams will use the exact crop
-region.<wbr/> 16:9 streams will further crop vertically
-(letterbox).<wbr/></p>
-<p>Conversely,<wbr/> if the crop region is set to a 16:9,<wbr/> then 4:3
-outputs will crop horizontally (pillarbox),<wbr/> and 16:9
-streams will match exactly.<wbr/> These additional crops will
-be centered within the crop region.<wbr/></p>
-<p>The width and height of the crop region cannot
-be set to be smaller than
-<code>floor( activeArraySize.<wbr/>width /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code> and
-<code>floor( activeArraySize.<wbr/>height /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code>,<wbr/> respectively.<wbr/></p>
-<p>The camera device may adjust the crop region to account
-for rounding and other hardware requirements; the final
-crop region used will be included in the output capture
-result.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The output streams must maintain square pixels at all
-times,<wbr/> no matter what the relative aspect ratios of the
-crop region and the stream are.<wbr/> Negative values for
-corner are allowed for raw output if full pixel array is
-larger than active pixel array.<wbr/> Width and height may be
-rounded to nearest larger supportable width,<wbr/> especially
-for raw output,<wbr/> where only a few fixed scales may be
-possible.<wbr/></p>
-<p>For a set of output streams configured,<wbr/> if the sensor output is cropped to a smaller
-size than active array size,<wbr/> the HAL need follow below cropping rules:</p>
-<ul>
-<li>
-<p>The HAL need handle the cropRegion as if the sensor crop size is the effective active
-array size.<wbr/>More specifically,<wbr/> the HAL must transform the request cropRegion from
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> to the sensor cropped pixel area size in this way:</p>
-<ol>
-<li>Translate the requested cropRegion w.<wbr/>r.<wbr/>t.,<wbr/> the left top corner of the sensor
-cropped pixel area by (tx,<wbr/> ty),<wbr/>
-where <code>tx = sensorCrop.<wbr/>top * (sensorCrop.<wbr/>height /<wbr/> activeArraySize.<wbr/>height)</code>
-and <code>tx = sensorCrop.<wbr/>left * (sensorCrop.<wbr/>width /<wbr/> activeArraySize.<wbr/>width)</code>.<wbr/> The
-(sensorCrop.<wbr/>top,<wbr/> sensorCrop.<wbr/>left) is the coordinate based off the
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></li>
-<li>Scale the width and height of requested cropRegion with scaling factor of
-sensor<wbr/>Crop.<wbr/>width/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>width and sensor<wbr/>Crop.<wbr/>height/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>height
-respectively.<wbr/>
-Once this new cropRegion is calculated,<wbr/> the HAL must use this region to crop the image
-with regard to the sensor crop size (effective active array size).<wbr/> The HAL still need
-follow the general cropping rule for this new cropRegion and effective active
-array size.<wbr/></li>
-</ol>
-</li>
-<li>
-<p>The HAL must report the cropRegion with regard to <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>
-The HAL need convert the new cropRegion generated above w.<wbr/>r.<wbr/>t.,<wbr/> full active array size.<wbr/>
-The reported cropRegion may be slightly different with the requested cropRegion since
-the HAL may adjust the crop region to account for rounding,<wbr/> conversion error,<wbr/> or other
-hardware limitations.<wbr/></p>
-</li>
-</ul>
-<p>HAL2.<wbr/>x uses only (x,<wbr/> y,<wbr/> width)</p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_sensor" class="section">sensor</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.sensor.exposureTime">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>exposure<wbr/>Time
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Duration each pixel is exposed to
-light.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If the sensor can't expose this exact duration,<wbr/> it will shorten the
-duration exposed to the nearest possible value (rather than expose longer).<wbr/>
-The final exposure time used will be available in the output capture result.<wbr/></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.sensor.frameDuration">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>frame<wbr/>Duration
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Duration from start of frame exposure to
-start of next frame exposure.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
-<a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/> The duration
-is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The maximum frame rate that can be supported by a camera subsystem is
-a function of many factors:</p>
-<ul>
-<li>Requested resolutions of output image streams</li>
-<li>Availability of binning /<wbr/> skipping modes on the imager</li>
-<li>The bandwidth of the imager interface</li>
-<li>The bandwidth of the various ISP processing blocks</li>
-</ul>
-<p>Since these factors can vary greatly between different ISPs and
-sensors,<wbr/> the camera abstraction tries to represent the bandwidth
-restrictions with as simple a model as possible.<wbr/></p>
-<p>The model presented has the following characteristics:</p>
-<ul>
-<li>The image sensor is always configured to output the smallest
-resolution possible given the application's requested output stream
-sizes.<wbr/> The smallest resolution is defined as being at least as large
-as the largest requested output stream size; the camera pipeline must
-never digitally upsample sensor data when the crop region covers the
-whole sensor.<wbr/> In general,<wbr/> this means that if only small output stream
-resolutions are configured,<wbr/> the sensor can provide a higher frame
-rate.<wbr/></li>
-<li>Since any request may use any or all the currently configured
-output streams,<wbr/> the sensor and ISP must be configured to support
-scaling a single capture to all the streams at the same time.<wbr/> This
-means the camera pipeline must be ready to produce the largest
-requested output size without any delay.<wbr/> Therefore,<wbr/> the overall
-frame rate of a given configured stream set is governed only by the
-largest requested stream resolution.<wbr/></li>
-<li>Using more than one output stream in a request does not affect the
-frame duration.<wbr/></li>
-<li>Certain format-streams may need to do additional background processing
-before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
-can run concurrently to the rest of the camera pipeline,<wbr/> but
-cannot process more than 1 capture at a time.<wbr/></li>
-</ul>
-<p>The necessary information for the application,<wbr/> given the model above,<wbr/>
-is provided via the <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> field using
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/>
-These are used to determine the maximum frame rate /<wbr/> minimum frame
-duration that is possible for a given stream configuration.<wbr/></p>
-<p>Specifically,<wbr/> the application can use the following rules to
-determine the minimum frame duration it can request from the camera
-device:</p>
-<ol>
-<li>Let the set of currently configured input/<wbr/>output streams
-be called <code>S</code>.<wbr/></li>
-<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking
-it up in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> using <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
-(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be
-called <code>F</code>.<wbr/></li>
-<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
-for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
-used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
-</ol>
-<p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>
-using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code>
-determines the steady state frame rate that the application will get
-if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind of
-request be called <code>Rsimple</code>.<wbr/></p>
-<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
-by a single capture of a new request <code>Rstall</code> (which has at least
-one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
-same minimum frame duration this will not cause a frame rate loss
-if all buffers from the previous <code>Rstall</code> have already been
-delivered.<wbr/></p>
-<p>For more details about stalling,<wbr/> see
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For more details about stalling,<wbr/> see
-<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.sensor.sensitivity">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>sensitivity
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The amount of gain applied to sensor data
-before processing.<wbr/></p>
- </td>
-
- <td class="entry_units">
- ISO arithmetic units
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The sensitivity is the standard ISO sensitivity value,<wbr/>
-as defined in ISO 12232:2006.<wbr/></p>
-<p>The sensitivity must be within <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a>,<wbr/> and
-if if it less than <a href="#static_android.sensor.maxAnalogSensitivity">android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity</a>,<wbr/> the camera device
-is guaranteed to use only analog amplification for applying the gain.<wbr/></p>
-<p>If the camera device cannot apply the exact sensitivity
-requested,<wbr/> it will reduce the gain to the nearest supported
-value.<wbr/> The final sensitivity used will be available in the
-output capture result.<wbr/></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>ISO 12232:2006 REI method is acceptable.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.sensor.testPatternData">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A pixel <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> that supplies the test pattern
-when <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a> is SOLID_<wbr/>COLOR.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Each color channel is treated as an unsigned 32-bit integer.<wbr/>
-The camera device then uses the most significant X bits
-that correspond to how many bits are in its Bayer raw sensor
-output.<wbr/></p>
-<p>For example,<wbr/> a sensor with RAW10 Bayer output would use the
-10 most significant bits from each color channel.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
-
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.sensor.testPatternMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No test pattern mode is used,<wbr/> and the camera
-device returns captures from the image sensor.<wbr/></p>
-<p>This is the default if the key is not set.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SOLID_COLOR</span>
- <span class="entry_type_enum_notes"><p>Each pixel in <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> is replaced by its
-respective color channel provided in
-<a href="#controls_android.sensor.testPatternData">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data</a>.<wbr/></p>
-<p>For example:</p>
-<pre><code>android.<wbr/>testPatternData = [0,<wbr/> 0xFFFFFFFF,<wbr/> 0xFFFFFFFF,<wbr/> 0]
-</code></pre>
-<p>All green pixels are 100% green.<wbr/> All red/<wbr/>blue pixels are black.<wbr/></p>
-<pre><code>android.<wbr/>testPatternData = [0xFFFFFFFF,<wbr/> 0,<wbr/> 0xFFFFFFFF,<wbr/> 0]
-</code></pre>
-<p>All red pixels are 100% red.<wbr/> Only the odd green pixels
-are 100% green.<wbr/> All blue pixels are 100% black.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">COLOR_BARS</span>
- <span class="entry_type_enum_notes"><p>All pixel data is replaced with an 8-bar color pattern.<wbr/></p>
-<p>The vertical bars (left-to-right) are as follows:</p>
-<ul>
-<li>100% white</li>
-<li>yellow</li>
-<li>cyan</li>
-<li>green</li>
-<li>magenta</li>
-<li>red</li>
-<li>blue</li>
-<li>black</li>
-</ul>
-<p>In general the image would look like the following:</p>
-<pre><code>W Y C G M R B K
-W Y C G M R B K
-W Y C G M R B K
-W Y C G M R B K
-W Y C G M R B K
-.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
-.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
-.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
-
-(B = Blue,<wbr/> K = Black)
-</code></pre>
-<p>Each bar should take up 1/<wbr/>8 of the sensor pixel array width.<wbr/>
-When this is not possible,<wbr/> the bar size should be rounded
-down to the nearest integer and the pattern can repeat
-on the right side.<wbr/></p>
-<p>Each bar's height must always take up the full sensor
-pixel array height.<wbr/></p>
-<p>Each pixel in this test pattern must be set to either
-0% intensity or 100% intensity.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY</span>
- <span class="entry_type_enum_notes"><p>The test pattern is similar to COLOR_<wbr/>BARS,<wbr/> except that
-each bar should start at its specified color at the top,<wbr/>
-and fade to gray at the bottom.<wbr/></p>
-<p>Furthermore each bar is further subdivided into a left and
-right half.<wbr/> The left half should have a smooth gradient,<wbr/>
-and the right half should have a quantized gradient.<wbr/></p>
-<p>In particular,<wbr/> the right half's should consist of blocks of the
-same color for 1/<wbr/>16th active sensor pixel array width.<wbr/></p>
-<p>The least significant bits in the quantized gradient should
-be copied from the most significant bits of the smooth gradient.<wbr/></p>
-<p>The height of each bar should always be a multiple of 128.<wbr/>
-When this is not the case,<wbr/> the pattern should repeat at the bottom
-of the image.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PN9</span>
- <span class="entry_type_enum_notes"><p>All pixel data is replaced by a pseudo-random sequence
-generated from a PN9 512-bit sequence (typically implemented
-in hardware with a linear feedback shift register).<wbr/></p>
-<p>The generator should be reset at the beginning of each frame,<wbr/>
-and thus each subsequent raw frame with this test pattern should
-be exactly the same as the last.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CUSTOM1</span>
- <span class="entry_type_enum_value">256</span>
- <span class="entry_type_enum_notes"><p>The first custom test pattern.<wbr/> All custom patterns that are
-available only on this camera device are at least this numeric
-value.<wbr/></p>
-<p>All of the custom test patterns will be static
-(that is the raw image must not vary from frame to frame).<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>When enabled,<wbr/> the sensor sends a test pattern instead of
-doing a real exposure from the camera.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.sensor.availableTestPatternModes">android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When a test pattern is enabled,<wbr/> all manual sensor controls specified
-by android.<wbr/>sensor.<wbr/>* will be ignored.<wbr/> All other controls should
-work as normal.<wbr/></p>
-<p>For example,<wbr/> if manual flash is enabled,<wbr/> flash firing should still
-occur (and that the test pattern remain unmodified,<wbr/> since the flash
-would not actually affect it).<wbr/></p>
-<p>Defaults to OFF.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All test patterns are specified in the Bayer domain.<wbr/></p>
-<p>The HAL may choose to substitute test patterns from the sensor
-with test patterns from on-device memory.<wbr/> In that case,<wbr/> it should be
-indistinguishable to the ISP whether the data came from the
-sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.sensor.info.activeArraySize">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as rectangle]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">Four ints defining the active pixel rectangle</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The area of the image sensor which corresponds to active pixels after any geometric
-distortion correction has been applied.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates on the image sensor
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is the rectangle representing the size of the active region of the sensor (i.<wbr/>e.<wbr/>
-the region that actually receives light from the scene) after any geometric correction
-has been applied,<wbr/> and should be treated as the maximum size in pixels of any of the
-image output formats aside from the raw formats.<wbr/></p>
-<p>This rectangle is defined relative to the full pixel array; (0,<wbr/>0) is the top-left of
-the full pixel array,<wbr/> and the size of the full pixel array is given by
-<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>The coordinate system for most other keys that list pixel coordinates,<wbr/> including
-<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>,<wbr/> is defined relative to the active array rectangle given in
-this field,<wbr/> with <code>(0,<wbr/> 0)</code> being the top-left of this rectangle.<wbr/></p>
-<p>The active array may be smaller than the full pixel array,<wbr/> since the full array may
-include black calibration pixels or other inactive regions,<wbr/> and geometric correction
-resulting in scaling or cropping may have been applied.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This array contains <code>(xmin,<wbr/> ymin,<wbr/> width,<wbr/> height)</code>.<wbr/> The <code>(xmin,<wbr/> ymin)</code> must be
->= <code>(0,<wbr/>0)</code>.<wbr/>
-The <code>(width,<wbr/> height)</code> must be <= <code><a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a></code>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.sensitivityRange">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as rangeInt]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">Range of supported sensitivities</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Range of sensitivities for <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> supported by this
-camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Min <= 100,<wbr/> Max >= 800</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The values are the standard ISO sensitivity values,<wbr/>
-as defined in ISO 12232:2006.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.colorFilterArrangement">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">RGGB</span>
- </li>
- <li>
- <span class="entry_type_enum_name">GRBG</span>
- </li>
- <li>
- <span class="entry_type_enum_name">GBRG</span>
- </li>
- <li>
- <span class="entry_type_enum_name">BGGR</span>
- </li>
- <li>
- <span class="entry_type_enum_name">RGB</span>
- <span class="entry_type_enum_notes"><p>Sensor is not Bayer; output has 3 16-bit
-values for each pixel,<wbr/> instead of just 1 16-bit value
-per pixel.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The arrangement of color filters on sensor;
-represents the colors in the top-left 2x2 section of
-the sensor,<wbr/> in reading order.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.exposureTimeRange">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as rangeLong]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">nanoseconds</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The range of image exposure times for <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> supported
-by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p>The minimum exposure time will be less than 100 us.<wbr/> For FULL
-capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
-the maximum exposure time will be greater than 100ms.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For FULL capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
-The maximum of the range SHOULD be at least 1 second (1e9),<wbr/> MUST be at least
-100ms.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.maxFrameDuration">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum possible frame duration (minimum frame rate) for
-<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> that is supported this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p>For FULL capability devices
-(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/> at least 100ms.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Attempting to use frame durations beyond the maximum will result in the frame
-duration being clipped to the maximum.<wbr/> See that control for a full definition of frame
-durations.<wbr/></p>
-<p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
-for the minimum frame duration values.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For FULL capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
-The maximum of the range SHOULD be at least
-1 second (1e9),<wbr/> MUST be at least 100ms (100e6).<wbr/></p>
-<p><a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a> must be greater or
-equal to the <a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a> max
-value (since exposure time overrides frame duration).<wbr/></p>
-<p>Available minimum frame durations for JPEG must be no greater
-than that of the YUV_<wbr/>420_<wbr/>888/<wbr/>IMPLEMENTATION_<wbr/>DEFINED
-minimum frame durations (for that respective size).<wbr/></p>
-<p>Since JPEG processing is considered offline and can take longer than
-a single uncompressed capture,<wbr/> refer to
-<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>
-for details about encoding this scenario.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.physicalSize">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>info.<wbr/>physical<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as sizeF]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">width x height</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The physical dimensions of the full pixel
-array.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Millimeters
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is the physical size of the sensor pixel
-array defined by <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Needed for FOV calculation for old API</p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.pixelArraySize">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [public as size]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Dimensions of the full pixel array,<wbr/> possibly
-including black calibration pixels.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixels
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The pixel count of the full pixel array of the image sensor,<wbr/> which covers
-<a href="#static_android.sensor.info.physicalSize">android.<wbr/>sensor.<wbr/>info.<wbr/>physical<wbr/>Size</a> area.<wbr/> This represents the full pixel dimensions of
-the raw buffers produced by this sensor.<wbr/></p>
-<p>If a camera device supports raw sensor formats,<wbr/> either this or
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> is the maximum dimensions for the raw
-output formats listed in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> (this depends on
-whether or not the image sensor returns buffers containing pixels that are not
-part of the active array region for blacklevel calibration or other purposes).<wbr/></p>
-<p>Some parts of the full pixel array may not receive light from the scene,<wbr/>
-or be otherwise inactive.<wbr/> The <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> key
-defines the rectangle of active pixels that will be included in processed image
-formats.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.whiteLevel">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum raw value output by sensor.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>> 255 (8-bit output)</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This specifies the fully-saturated encoding level for the raw
-sample values from the sensor.<wbr/> This is typically caused by the
-sensor becoming highly non-linear or clipping.<wbr/> The minimum for
-each channel is specified by the offset in the
-<a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> key.<wbr/></p>
-<p>The white level is typically determined either by sensor bit depth
-(8-14 bits is expected),<wbr/> or by the point where the sensor response
-becomes too non-linear to be useful.<wbr/> The default value for this is
-maximum representable value for a 16-bit raw sample (2^16 - 1).<wbr/></p>
-<p>The white level values of captured images may vary for different
-capture settings (e.<wbr/>g.,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>).<wbr/> This key
-represents a coarse approximation for such case.<wbr/> It is recommended
-to use <a href="#dynamic_android.sensor.dynamicWhiteLevel">android.<wbr/>sensor.<wbr/>dynamic<wbr/>White<wbr/>Level</a> for captures when supported
-by the camera device,<wbr/> which provides more accurate white level values.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The full bit depth of the sensor must be available in the raw data,<wbr/>
-so the value for linear sensors should not be significantly lower
-than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixel).<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.timestampSource">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">UNKNOWN</span>
- <span class="entry_type_enum_notes"><p>Timestamps from <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> are in nanoseconds and monotonic,<wbr/>
-but can not be compared to timestamps from other subsystems
-(e.<wbr/>g.<wbr/> accelerometer,<wbr/> gyro etc.<wbr/>),<wbr/> or other instances of the same or different
-camera devices in the same system.<wbr/> Timestamps between streams and results for
-a single camera instance are comparable,<wbr/> and the timestamps for all buffers
-and the result metadata generated by a single capture are identical.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">REALTIME</span>
- <span class="entry_type_enum_notes"><p>Timestamps from <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> are in the same timebase as
-<a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,<wbr/>
-and they can be compared to other timestamps using that base.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The time base source for sensor capture start timestamps.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The timestamps provided for captures are always in nanoseconds and monotonic,<wbr/> but
-may not based on a time source that can be compared to other system time sources.<wbr/></p>
-<p>This characteristic defines the source for the timestamps,<wbr/> and therefore whether they
-can be compared against other system time sources/<wbr/>timestamps.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For camera devices implement UNKNOWN,<wbr/> the camera framework expects that the timestamp
-source to be SYSTEM_<wbr/>TIME_<wbr/>MONOTONIC.<wbr/> For camera devices implement REALTIME,<wbr/> the camera
-framework expects that the timestamp source to be SYSTEM_<wbr/>TIME_<wbr/>BOOTTIME.<wbr/> See
-system/<wbr/>core/<wbr/>include/<wbr/>utils/<wbr/>Timers.<wbr/>h for the definition of SYSTEM_<wbr/>TIME_<wbr/>MONOTONIC and
-SYSTEM_<wbr/>TIME_<wbr/>BOOTTIME.<wbr/> Note that HAL must follow above expectation; otherwise video
-recording might suffer unexpected behavior.<wbr/></p>
-<p>Also,<wbr/> camera devices implements REALTIME must pass the ITS sensor fusion test which
-tests the alignment between camera timestamps and gyro sensor timestamps.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.lensShadingApplied">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FALSE</span>
- </li>
- <li>
- <span class="entry_type_enum_name">TRUE</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the RAW images output from this camera device are subject to
-lens shading correction.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If TRUE,<wbr/> all images produced by the camera device in the RAW image formats will
-have lens shading correction already applied to it.<wbr/> If FALSE,<wbr/> the images will
-not be adjusted for lens shading correction.<wbr/>
-See <a href="#static_android.request.maxNumOutputRaw">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Raw</a> for a list of RAW image formats.<wbr/></p>
-<p>This key will be <code>null</code> for all devices do not report this information.<wbr/>
-Devices with RAW capability will always report this information in this key.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.info.preCorrectionActiveArraySize">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as rectangle]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">Four ints defining the active pixel rectangle</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The area of the image sensor which corresponds to active pixels prior to the
-application of any geometric distortion correction.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Pixel coordinates on the image sensor
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is the rectangle representing the size of the active region of the sensor (i.<wbr/>e.<wbr/>
-the region that actually receives light from the scene) before any geometric correction
-has been applied,<wbr/> and should be treated as the active region rectangle for any of the
-raw formats.<wbr/> All metadata associated with raw processing (e.<wbr/>g.<wbr/> the lens shading
-correction map,<wbr/> and radial distortion fields) treats the top,<wbr/> left of this rectangle as
-the origin,<wbr/> (0,<wbr/>0).<wbr/></p>
-<p>The size of this region determines the maximum field of view and the maximum number of
-pixels that an image from this sensor can contain,<wbr/> prior to the application of
-geometric distortion correction.<wbr/> The effective maximum pixel dimensions of a
-post-distortion-corrected image is given by the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>
-field,<wbr/> and the effective maximum field of view for a post-distortion-corrected image
-can be calculated by applying the geometric distortion correction fields to this
-rectangle,<wbr/> and cropping to the rectangle given in <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>E.<wbr/>g.<wbr/> to calculate position of a pixel,<wbr/> (x,<wbr/>y),<wbr/> in a processed YUV output image with the
-dimensions in <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> given the position of a pixel,<wbr/>
-(x',<wbr/> y'),<wbr/> in the raw pixel array with dimensions give in
-<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>:</p>
-<ol>
-<li>Choose a pixel (x',<wbr/> y') within the active array region of the raw buffer given in
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>,<wbr/> otherwise this pixel is considered
-to be outside of the FOV,<wbr/> and will not be shown in the processed output image.<wbr/></li>
-<li>Apply geometric distortion correction to get the post-distortion pixel coordinate,<wbr/>
-(x_<wbr/>i,<wbr/> y_<wbr/>i).<wbr/> When applying geometric correction metadata,<wbr/> note that metadata for raw
-buffers is defined relative to the top,<wbr/> left of the
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> rectangle.<wbr/></li>
-<li>If the resulting corrected pixel coordinate is within the region given in
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> then the position of this pixel in the
-processed output image buffer is <code>(x_<wbr/>i - activeArray.<wbr/>left,<wbr/> y_<wbr/>i - activeArray.<wbr/>top)</code>,<wbr/>
-when the top,<wbr/> left coordinate of that buffer is treated as (0,<wbr/> 0).<wbr/></li>
-</ol>
-<p>Thus,<wbr/> for pixel x',<wbr/>y' = (25,<wbr/> 25) on a sensor where <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>
-is (100,<wbr/>100),<wbr/> <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> is (10,<wbr/> 10,<wbr/> 100,<wbr/> 100),<wbr/>
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> is (20,<wbr/> 20,<wbr/> 80,<wbr/> 80),<wbr/> and the geometric distortion
-correction doesn't change the pixel coordinate,<wbr/> the resulting pixel selected in
-pixel coordinates would be x,<wbr/>y = (25,<wbr/> 25) relative to the top,<wbr/>left of the raw buffer
-with dimensions given in <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>,<wbr/> and would be (5,<wbr/> 5)
-relative to the top,<wbr/>left of post-processed YUV output buffer with dimensions given in
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>The currently supported fields that correct for geometric distortion are:</p>
-<ol>
-<li><a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>.<wbr/></li>
-</ol>
-<p>If all of the geometric distortion fields are no-ops,<wbr/> this rectangle will be the same
-as the post-distortion-corrected rectangle given in
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>This rectangle is defined relative to the full pixel array; (0,<wbr/>0) is the top-left of
-the full pixel array,<wbr/> and the size of the full pixel array is given by
-<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>The pre-correction active array may be smaller than the full pixel array,<wbr/> since the
-full array may include black calibration pixels or other inactive regions.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This array contains <code>(xmin,<wbr/> ymin,<wbr/> width,<wbr/> height)</code>.<wbr/> The <code>(xmin,<wbr/> ymin)</code> must be
->= <code>(0,<wbr/>0)</code>.<wbr/>
-The <code>(width,<wbr/> height)</code> must be <= <code><a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a></code>.<wbr/></p>
-<p>If omitted by the HAL implementation,<wbr/> the camera framework will assume that this is
-the same as the post-correction active array region given in
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
-
-
- <tr class="entry" id="static_android.sensor.referenceIlluminant1">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">DAYLIGHT</span>
- <span class="entry_type_enum_value">1</span>
- </li>
- <li>
- <span class="entry_type_enum_name">FLUORESCENT</span>
- <span class="entry_type_enum_value">2</span>
- </li>
- <li>
- <span class="entry_type_enum_name">TUNGSTEN</span>
- <span class="entry_type_enum_value">3</span>
- <span class="entry_type_enum_notes"><p>Incandescent light</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FLASH</span>
- <span class="entry_type_enum_value">4</span>
- </li>
- <li>
- <span class="entry_type_enum_name">FINE_WEATHER</span>
- <span class="entry_type_enum_value">9</span>
- </li>
- <li>
- <span class="entry_type_enum_name">CLOUDY_WEATHER</span>
- <span class="entry_type_enum_value">10</span>
- </li>
- <li>
- <span class="entry_type_enum_name">SHADE</span>
- <span class="entry_type_enum_value">11</span>
- </li>
- <li>
- <span class="entry_type_enum_name">DAYLIGHT_FLUORESCENT</span>
- <span class="entry_type_enum_value">12</span>
- <span class="entry_type_enum_notes"><p>D 5700 - 7100K</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">DAY_WHITE_FLUORESCENT</span>
- <span class="entry_type_enum_value">13</span>
- <span class="entry_type_enum_notes"><p>N 4600 - 5400K</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">COOL_WHITE_FLUORESCENT</span>
- <span class="entry_type_enum_value">14</span>
- <span class="entry_type_enum_notes"><p>W 3900 - 4500K</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">WHITE_FLUORESCENT</span>
- <span class="entry_type_enum_value">15</span>
- <span class="entry_type_enum_notes"><p>WW 3200 - 3700K</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">STANDARD_A</span>
- <span class="entry_type_enum_value">17</span>
- </li>
- <li>
- <span class="entry_type_enum_name">STANDARD_B</span>
- <span class="entry_type_enum_value">18</span>
- </li>
- <li>
- <span class="entry_type_enum_name">STANDARD_C</span>
- <span class="entry_type_enum_value">19</span>
- </li>
- <li>
- <span class="entry_type_enum_name">D55</span>
- <span class="entry_type_enum_value">20</span>
- </li>
- <li>
- <span class="entry_type_enum_name">D65</span>
- <span class="entry_type_enum_value">21</span>
- </li>
- <li>
- <span class="entry_type_enum_name">D75</span>
- <span class="entry_type_enum_value">22</span>
- </li>
- <li>
- <span class="entry_type_enum_name">D50</span>
- <span class="entry_type_enum_value">23</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ISO_STUDIO_TUNGSTEN</span>
- <span class="entry_type_enum_value">24</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The standard reference illuminant used as the scene light source when
-calculating the <a href="#static_android.sensor.colorTransform1">android.<wbr/>sensor.<wbr/>color<wbr/>Transform1</a>,<wbr/>
-<a href="#static_android.sensor.calibrationTransform1">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1</a>,<wbr/> and
-<a href="#static_android.sensor.forwardMatrix1">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1</a> matrices.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The values in this key correspond to the values defined for the
-EXIF LightSource tag.<wbr/> These illuminants are standard light sources
-that are often used calibrating camera devices.<wbr/></p>
-<p>If this key is present,<wbr/> then <a href="#static_android.sensor.colorTransform1">android.<wbr/>sensor.<wbr/>color<wbr/>Transform1</a>,<wbr/>
-<a href="#static_android.sensor.calibrationTransform1">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1</a>,<wbr/> and
-<a href="#static_android.sensor.forwardMatrix1">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1</a> will also be present.<wbr/></p>
-<p>Some devices may choose to provide a second set of calibration
-information for improved quality,<wbr/> including
-<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a> and its corresponding matrices.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The first reference illuminant (<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>)
-and corresponding matrices must be present to support the RAW capability
-and DNG output.<wbr/></p>
-<p>When producing raw images with a color profile that has only been
-calibrated against a single light source,<wbr/> it is valid to omit
-<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a> along with the
-<a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/> <a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/>
-and <a href="#static_android.sensor.forwardMatrix2">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2</a> matrices.<wbr/></p>
-<p>If only <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> is included,<wbr/> it should be
-chosen so that it is representative of typical scene lighting.<wbr/> In
-general,<wbr/> D50 or DAYLIGHT will be chosen for this case.<wbr/></p>
-<p>If both <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> and
-<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a> are included,<wbr/> they should be
-chosen to represent the typical range of scene lighting conditions.<wbr/>
-In general,<wbr/> low color temperature illuminant such as Standard-A will
-be chosen for the first reference illuminant and a higher color
-temperature illuminant such as D65 will be chosen for the second
-reference illuminant.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.referenceIlluminant2">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The standard reference illuminant used as the scene light source when
-calculating the <a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/>
-<a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/> and
-<a href="#static_android.sensor.forwardMatrix2">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2</a> matrices.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>See <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> for more details.<wbr/></p>
-<p>If this key is present,<wbr/> then <a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/>
-<a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/> and
-<a href="#static_android.sensor.forwardMatrix2">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2</a> will also be present.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.calibrationTransform1">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
-
-
- <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A per-device calibration transform matrix that maps from the
-reference sensor colorspace to the actual device sensor colorspace.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is used to correct for per-device variations in the
-sensor colorspace,<wbr/> and is used for processing raw buffer data.<wbr/></p>
-<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
-contains a per-device calibration transform that maps colors
-from reference sensor color space (i.<wbr/>e.<wbr/> the "golden module"
-colorspace) into this camera device's native sensor color
-space under the first reference illuminant
-(<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.calibrationTransform2">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
-
-
- <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A per-device calibration transform matrix that maps from the
-reference sensor colorspace to the actual device sensor colorspace
-(this is the colorspace of the raw buffer data).<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is used to correct for per-device variations in the
-sensor colorspace,<wbr/> and is used for processing raw buffer data.<wbr/></p>
-<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
-contains a per-device calibration transform that maps colors
-from reference sensor color space (i.<wbr/>e.<wbr/> the "golden module"
-colorspace) into this camera device's native sensor color
-space under the second reference illuminant
-(<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a>).<wbr/></p>
-<p>This matrix will only be present if the second reference
-illuminant is present.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.colorTransform1">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>color<wbr/>Transform1
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
-
-
- <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A matrix that transforms color values from CIE XYZ color space to
-reference sensor color space.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is used to convert from the standard CIE XYZ color
-space to the reference sensor colorspace,<wbr/> and is used when processing
-raw buffer data.<wbr/></p>
-<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
-contains a color transform matrix that maps colors from the CIE
-XYZ color space to the reference sensor color space (i.<wbr/>e.<wbr/> the
-"golden module" colorspace) under the first reference illuminant
-(<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>).<wbr/></p>
-<p>The white points chosen in both the reference sensor color space
-and the CIE XYZ colorspace when calculating this transform will
-match the standard white point for the first reference illuminant
-(i.<wbr/>e.<wbr/> no chromatic adaptation will be applied by this transform).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.colorTransform2">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>color<wbr/>Transform2
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
-
-
- <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A matrix that transforms color values from CIE XYZ color space to
-reference sensor color space.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is used to convert from the standard CIE XYZ color
-space to the reference sensor colorspace,<wbr/> and is used when processing
-raw buffer data.<wbr/></p>
-<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
-contains a color transform matrix that maps colors from the CIE
-XYZ color space to the reference sensor color space (i.<wbr/>e.<wbr/> the
-"golden module" colorspace) under the second reference illuminant
-(<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a>).<wbr/></p>
-<p>The white points chosen in both the reference sensor color space
-and the CIE XYZ colorspace when calculating this transform will
-match the standard white point for the second reference illuminant
-(i.<wbr/>e.<wbr/> no chromatic adaptation will be applied by this transform).<wbr/></p>
-<p>This matrix will only be present if the second reference
-illuminant is present.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.forwardMatrix1">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
-
-
- <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A matrix that transforms white balanced camera colors from the reference
-sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is used to convert to the standard CIE XYZ colorspace,<wbr/> and
-is used when processing raw buffer data.<wbr/></p>
-<p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
-a color transform matrix that maps white balanced colors from the
-reference sensor color space to the CIE XYZ color space with a D50 white
-point.<wbr/></p>
-<p>Under the first reference illuminant (<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>)
-this matrix is chosen so that the standard white point for this reference
-illuminant in the reference sensor colorspace is mapped to D50 in the
-CIE XYZ colorspace.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.forwardMatrix2">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
-
-
-
-
- <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A matrix that transforms white balanced camera colors from the reference
-sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This matrix is used to convert to the standard CIE XYZ colorspace,<wbr/> and
-is used when processing raw buffer data.<wbr/></p>
-<p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
-a color transform matrix that maps white balanced colors from the
-reference sensor color space to the CIE XYZ color space with a D50 white
-point.<wbr/></p>
-<p>Under the second reference illuminant (<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a>)
-this matrix is chosen so that the standard white point for this reference
-illuminant in the reference sensor colorspace is mapped to D50 in the
-CIE XYZ colorspace.<wbr/></p>
-<p>This matrix will only be present if the second reference
-illuminant is present.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.baseGainFactor">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>sensor.<wbr/>base<wbr/>Gain<wbr/>Factor
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Gain factor from electrons to raw units when
-ISO=100</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.blackLevelPattern">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public as blackLevelPattern]</span>
-
-
-
-
- <div class="entry_type_notes">2x2 raw count block</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A fixed black level offset for each of the color filter arrangement
-(CFA) mosaic channels.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 0 for each.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This key specifies the zero light value for each of the CFA mosaic
-channels in the camera sensor.<wbr/> The maximal value output by the
-sensor is represented by the value in <a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>.<wbr/></p>
-<p>The values are given in the same order as channels listed for the CFA
-layout key (see <a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>),<wbr/> i.<wbr/>e.<wbr/> the
-nth value given corresponds to the black level offset for the nth
-color channel listed in the CFA.<wbr/></p>
-<p>The black level values of captured images may vary for different
-capture settings (e.<wbr/>g.,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>).<wbr/> This key
-represents a coarse approximation for such case.<wbr/> It is recommended to
-use <a href="#dynamic_android.sensor.dynamicBlackLevel">android.<wbr/>sensor.<wbr/>dynamic<wbr/>Black<wbr/>Level</a> or use pixels from
-<a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a> directly for captures when
-supported by the camera device,<wbr/> which provides more accurate black
-level values.<wbr/> For raw capture in particular,<wbr/> it is recommended to use
-pixels from <a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a> to calculate black
-level values for each frame.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The values are given in row-column scan order,<wbr/> with the first value
-corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.maxAnalogSensitivity">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum sensitivity that is implemented
-purely through analog gain.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_FULL">FULL</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> values less than or
-equal to this,<wbr/> all applied gain must be analog.<wbr/> For
-values above this,<wbr/> the gain applied can be a mix of analog and
-digital.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.orientation">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>orientation
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Clockwise angle through which the output image needs to be rotated to be
-upright on the device screen in its native orientation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Degrees of clockwise rotation; always a multiple of
- 90
- </td>
-
- <td class="entry_range">
- <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Also defines the direction of rolling shutter readout,<wbr/> which is from top to bottom in
-the sensor's coordinate system.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.profileHueSatMapDimensions">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map<wbr/>Dimensions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
- <div class="entry_type_notes">Number of samples for hue,<wbr/> saturation,<wbr/> and value</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The number of input samples for each dimension of
-<a href="#dynamic_android.sensor.profileHueSatMap">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Hue >= 1,<wbr/>
-Saturation >= 2,<wbr/>
-Value >= 1</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The number of input samples for the hue,<wbr/> saturation,<wbr/> and value
-dimension of <a href="#dynamic_android.sensor.profileHueSatMap">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map</a>.<wbr/> The order of the
-dimensions given is hue,<wbr/> saturation,<wbr/> value; where hue is the 0th
-element.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.availableTestPatternModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of sensor test pattern modes for <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a>
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Defaults to OFF,<wbr/> and always includes OFF if defined.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All custom modes must be >= CUSTOM1.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.opticalBlackRegions">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4 x num_regions
- </span>
- <span class="entry_type_visibility"> [public as rectangle]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of disjoint rectangles indicating the sensor
-optically shielded black pixel regions.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>In most camera sensors,<wbr/> the active array is surrounded by some
-optically shielded pixel areas.<wbr/> By blocking light,<wbr/> these pixels
-provides a reliable black reference for black level compensation
-in active array region.<wbr/></p>
-<p>This key provides a list of disjoint rectangles specifying the
-regions of optically shielded (with metal shield) black pixel
-regions if the camera device is capable of reading out these black
-pixels in the output raw images.<wbr/> In comparison to the fixed black
-level values reported by <a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a>,<wbr/> this key
-may provide a more accurate way for the application to calculate
-black level of each captured raw images.<wbr/></p>
-<p>When this key is reported,<wbr/> the <a href="#dynamic_android.sensor.dynamicBlackLevel">android.<wbr/>sensor.<wbr/>dynamic<wbr/>Black<wbr/>Level</a> and
-<a href="#dynamic_android.sensor.dynamicWhiteLevel">android.<wbr/>sensor.<wbr/>dynamic<wbr/>White<wbr/>Level</a> will also be reported.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This array contains (xmin,<wbr/> ymin,<wbr/> width,<wbr/> height).<wbr/> The (xmin,<wbr/> ymin)
-must be >= (0,<wbr/>0) and <=
-<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/> The (width,<wbr/> height) must be
-<= <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/> Each region must be
-outside the region reported by
-<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>The HAL must report minimal number of disjoint regions for the
-optically shielded back pixel regions.<wbr/> For example,<wbr/> if a region can
-be covered by one rectangle,<wbr/> the HAL must not split this region into
-multiple rectangles.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.sensor.opaqueRawSize">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>opaque<wbr/>Raw<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 3
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Size in bytes for all the listed opaque RAW buffer sizes</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Must be large enough to fit the opaque RAW of corresponding size produced by
-the camera</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This configurations are listed as <code>(width,<wbr/> height,<wbr/> size_<wbr/>in_<wbr/>bytes)</code> tuples.<wbr/>
-This is used for sizing the gralloc buffers for opaque RAW buffers.<wbr/>
-All RAW_<wbr/>OPAQUE output stream configuration listed in
-<a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> will have a corresponding tuple in
-this key.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This key is added in HAL3.<wbr/>4.<wbr/>
-For HAL3.<wbr/>4 or above: devices advertising RAW_<wbr/>OPAQUE format output must list this key.<wbr/>
-For HAL3.<wbr/>3 or earlier devices: if RAW_<wbr/>OPAQUE ouput is advertised,<wbr/> camera framework
-will derive this key by assuming each pixel takes two bytes and no padding bytes
-between rows.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.sensor.exposureTime">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>exposure<wbr/>Time
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Duration each pixel is exposed to
-light.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If the sensor can't expose this exact duration,<wbr/> it will shorten the
-duration exposed to the nearest possible value (rather than expose longer).<wbr/>
-The final exposure time used will be available in the output capture result.<wbr/></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.frameDuration">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>frame<wbr/>Duration
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Duration from start of frame exposure to
-start of next frame exposure.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
-<a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/> The duration
-is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The maximum frame rate that can be supported by a camera subsystem is
-a function of many factors:</p>
-<ul>
-<li>Requested resolutions of output image streams</li>
-<li>Availability of binning /<wbr/> skipping modes on the imager</li>
-<li>The bandwidth of the imager interface</li>
-<li>The bandwidth of the various ISP processing blocks</li>
-</ul>
-<p>Since these factors can vary greatly between different ISPs and
-sensors,<wbr/> the camera abstraction tries to represent the bandwidth
-restrictions with as simple a model as possible.<wbr/></p>
-<p>The model presented has the following characteristics:</p>
-<ul>
-<li>The image sensor is always configured to output the smallest
-resolution possible given the application's requested output stream
-sizes.<wbr/> The smallest resolution is defined as being at least as large
-as the largest requested output stream size; the camera pipeline must
-never digitally upsample sensor data when the crop region covers the
-whole sensor.<wbr/> In general,<wbr/> this means that if only small output stream
-resolutions are configured,<wbr/> the sensor can provide a higher frame
-rate.<wbr/></li>
-<li>Since any request may use any or all the currently configured
-output streams,<wbr/> the sensor and ISP must be configured to support
-scaling a single capture to all the streams at the same time.<wbr/> This
-means the camera pipeline must be ready to produce the largest
-requested output size without any delay.<wbr/> Therefore,<wbr/> the overall
-frame rate of a given configured stream set is governed only by the
-largest requested stream resolution.<wbr/></li>
-<li>Using more than one output stream in a request does not affect the
-frame duration.<wbr/></li>
-<li>Certain format-streams may need to do additional background processing
-before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
-can run concurrently to the rest of the camera pipeline,<wbr/> but
-cannot process more than 1 capture at a time.<wbr/></li>
-</ul>
-<p>The necessary information for the application,<wbr/> given the model above,<wbr/>
-is provided via the <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> field using
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/>
-These are used to determine the maximum frame rate /<wbr/> minimum frame
-duration that is possible for a given stream configuration.<wbr/></p>
-<p>Specifically,<wbr/> the application can use the following rules to
-determine the minimum frame duration it can request from the camera
-device:</p>
-<ol>
-<li>Let the set of currently configured input/<wbr/>output streams
-be called <code>S</code>.<wbr/></li>
-<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking
-it up in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> using <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
-(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be
-called <code>F</code>.<wbr/></li>
-<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
-for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
-used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
-</ol>
-<p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>
-using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code>
-determines the steady state frame rate that the application will get
-if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind of
-request be called <code>Rsimple</code>.<wbr/></p>
-<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
-by a single capture of a new request <code>Rstall</code> (which has at least
-one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
-same minimum frame duration this will not cause a frame rate loss
-if all buffers from the previous <code>Rstall</code> have already been
-delivered.<wbr/></p>
-<p>For more details about stalling,<wbr/> see
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For more details about stalling,<wbr/> see
-<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.sensitivity">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>sensitivity
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The amount of gain applied to sensor data
-before processing.<wbr/></p>
- </td>
-
- <td class="entry_units">
- ISO arithmetic units
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The sensitivity is the standard ISO sensitivity value,<wbr/>
-as defined in ISO 12232:2006.<wbr/></p>
-<p>The sensitivity must be within <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a>,<wbr/> and
-if if it less than <a href="#static_android.sensor.maxAnalogSensitivity">android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity</a>,<wbr/> the camera device
-is guaranteed to use only analog amplification for applying the gain.<wbr/></p>
-<p>If the camera device cannot apply the exact sensitivity
-requested,<wbr/> it will reduce the gain to the nearest supported
-value.<wbr/> The final sensitivity used will be available in the
-output capture result.<wbr/></p>
-<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
-OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>ISO 12232:2006 REI method is acceptable.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.timestamp">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>timestamp
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Time at start of exposure of first
-row of the image sensor active array,<wbr/> in nanoseconds.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p>> 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The timestamps are also included in all image
-buffers produced for the same capture,<wbr/> and will be identical
-on all the outputs.<wbr/></p>
-<p>When <a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> UNKNOWN,<wbr/>
-the timestamps measure time since an unspecified starting point,<wbr/>
-and are monotonically increasing.<wbr/> They can be compared with the
-timestamps for other captures from the same camera device,<wbr/> but are
-not guaranteed to be comparable to any other time source.<wbr/></p>
-<p>When <a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> REALTIME,<wbr/> the
-timestamps measure time in the same timebase as <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,<wbr/> and they can
-be compared to other timestamps from other subsystems that
-are using that base.<wbr/></p>
-<p>For reprocessing,<wbr/> the timestamp will match the start of exposure of
-the input image,<wbr/> i.<wbr/>e.<wbr/> <a href="https://developer.android.com/reference/CaptureResult.html#SENSOR_TIMESTAMP">the
-timestamp</a> in the TotalCaptureResult that was used to create the
-reprocess capture request.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All timestamps must be in reference to the kernel's
-CLOCK_<wbr/>BOOTTIME monotonic clock,<wbr/> which properly accounts for
-time spent asleep.<wbr/> This allows for synchronization with
-sensors that continue to operate while the system is
-otherwise asleep.<wbr/></p>
-<p>If <a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> REALTIME,<wbr/>
-The timestamp must be synchronized with the timestamps from other
-sensor subsystems that are using the same timebase.<wbr/></p>
-<p>For reprocessing,<wbr/> the input image's start of exposure can be looked up
-with <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> from the metadata included in the
-capture request.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.temperature">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>sensor.<wbr/>temperature
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The temperature of the sensor,<wbr/> sampled at the time
-exposure began for this frame.<wbr/></p>
-<p>The thermal diode being queried should be inside the sensor PCB,<wbr/> or
-somewhere close to it.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Celsius
- </td>
-
- <td class="entry_range">
- <p>Optional.<wbr/> This value is missing if no temperature is available.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.neutralColorPoint">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>neutral<wbr/>Color<wbr/>Point
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The estimated camera neutral color in the native sensor colorspace at
-the time of capture.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value gives the neutral color point encoded as an RGB value in the
-native sensor color space.<wbr/> The neutral color point indicates the
-currently estimated white point of the scene illumination.<wbr/> It can be
-used to interpolate between the provided color transforms when
-processing raw sensor data.<wbr/></p>
-<p>The order of the values is R,<wbr/> G,<wbr/> B; where R is in the lowest index.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.noiseProfile">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>noise<wbr/>Profile
- </td>
- <td class="entry_type">
- <span class="entry_type_name">double</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2 x CFA Channels
- </span>
- <span class="entry_type_visibility"> [public as pairDoubleDouble]</span>
-
-
-
-
- <div class="entry_type_notes">Pairs of noise model coefficients</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Noise model coefficients for each CFA mosaic channel.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This key contains two noise model coefficients for each CFA channel
-corresponding to the sensor amplification (S) and sensor readout
-noise (O).<wbr/> These are given as pairs of coefficients for each channel
-in the same order as channels listed for the CFA layout key
-(see <a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>).<wbr/> This is
-represented as an array of Pair<Double,<wbr/> Double>,<wbr/> where
-the first member of the Pair at index n is the S coefficient and the
-second member is the O coefficient for the nth color channel in the CFA.<wbr/></p>
-<p>These coefficients are used in a two parameter noise model to describe
-the amount of noise present in the image for each CFA channel.<wbr/> The
-noise model used here is:</p>
-<p>N(x) = sqrt(Sx + O)</p>
-<p>Where x represents the recorded signal of a CFA channel normalized to
-the range [0,<wbr/> 1],<wbr/> and S and O are the noise model coeffiecients for
-that channel.<wbr/></p>
-<p>A more detailed description of the noise model can be found in the
-Adobe DNG specification for the NoiseProfile tag.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For a CFA layout of RGGB,<wbr/> the list of coefficients would be given as
-an array of doubles S0,<wbr/>O0,<wbr/>S1,<wbr/>O1,...,<wbr/> where S0 and O0 are the coefficients
-for the red channel,<wbr/> S1 and O1 are the coefficients for the first green
-channel,<wbr/> etc.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.profileHueSatMap">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- hue_samples x saturation_samples x value_samples x 3
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
- <div class="entry_type_notes">Mapping for hue,<wbr/> saturation,<wbr/> and value</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A mapping containing a hue shift,<wbr/> saturation scale,<wbr/> and value scale
-for each pixel.<wbr/></p>
- </td>
-
- <td class="entry_units">
-
- The hue shift is given in degrees; saturation and value scale factors are
- unitless and are between 0 and 1 inclusive
-
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>hue_<wbr/>samples,<wbr/> saturation_<wbr/>samples,<wbr/> and value_<wbr/>samples are given in
-<a href="#static_android.sensor.profileHueSatMapDimensions">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map<wbr/>Dimensions</a>.<wbr/></p>
-<p>Each entry of this map contains three floats corresponding to the
-hue shift,<wbr/> saturation scale,<wbr/> and value scale,<wbr/> respectively; where the
-hue shift has the lowest index.<wbr/> The map entries are stored in the key
-in nested loop order,<wbr/> with the value divisions in the outer loop,<wbr/> the
-hue divisions in the middle loop,<wbr/> and the saturation divisions in the
-inner loop.<wbr/> All zero input saturation entries are required to have a
-value scale factor of 1.<wbr/>0.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.profileToneCurve">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>sensor.<wbr/>profile<wbr/>Tone<wbr/>Curve
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- samples x 2
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
- <div class="entry_type_notes">Samples defining a spline for a tone-mapping curve</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A list of x,<wbr/>y samples defining a tone-mapping curve for gamma adjustment.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Each sample has an input range of <code>[0,<wbr/> 1]</code> and an output range of
-<code>[0,<wbr/> 1]</code>.<wbr/> The first sample is required to be <code>(0,<wbr/> 0)</code>,<wbr/> and the last
-sample is required to be <code>(1,<wbr/> 1)</code>.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This key contains a default tone curve that can be applied while
-processing the image as a starting point for user adjustments.<wbr/>
-The curve is specified as a list of value pairs in linear gamma.<wbr/>
-The curve is interpolated using a cubic spline.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.greenSplit">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>green<wbr/>Split
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The worst-case divergence between Bayer green channels.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value is an estimate of the worst case split between the
-Bayer green channels in the red and blue rows in the sensor color
-filter array.<wbr/></p>
-<p>The green split is calculated as follows:</p>
-<ol>
-<li>A 5x5 pixel (or larger) window W within the active sensor array is
-chosen.<wbr/> The term 'pixel' here is taken to mean a group of 4 Bayer
-mosaic channels (R,<wbr/> Gr,<wbr/> Gb,<wbr/> B).<wbr/> The location and size of the window
-chosen is implementation defined,<wbr/> and should be chosen to provide a
-green split estimate that is both representative of the entire image
-for this camera sensor,<wbr/> and can be calculated quickly.<wbr/></li>
-<li>The arithmetic mean of the green channels from the red
-rows (mean_<wbr/>Gr) within W is computed.<wbr/></li>
-<li>The arithmetic mean of the green channels from the blue
-rows (mean_<wbr/>Gb) within W is computed.<wbr/></li>
-<li>The maximum ratio R of the two means is computed as follows:
-<code>R = max((mean_<wbr/>Gr + 1)/<wbr/>(mean_<wbr/>Gb + 1),<wbr/> (mean_<wbr/>Gb + 1)/<wbr/>(mean_<wbr/>Gr + 1))</code></li>
-</ol>
-<p>The ratio R is the green split divergence reported for this property,<wbr/>
-which represents how much the green channels differ in the mosaic
-pattern.<wbr/> This value is typically used to determine the treatment of
-the green mosaic channels when demosaicing.<wbr/></p>
-<p>The green split value can be roughly interpreted as follows:</p>
-<ul>
-<li>R < 1.<wbr/>03 is a negligible split (<3% divergence).<wbr/></li>
-<li>1.<wbr/>20 <= R >= 1.<wbr/>03 will require some software
-correction to avoid demosaic errors (3-20% divergence).<wbr/></li>
-<li>R > 1.<wbr/>20 will require strong software correction to produce
-a usuable image (>20% divergence).<wbr/></li>
-</ul>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The green split given may be a static value based on prior
-characterization of the camera sensor using the green split
-calculation method given here over a large,<wbr/> representative,<wbr/> sample
-set of images.<wbr/> Other methods of calculation that produce equivalent
-results,<wbr/> and can be interpreted in the same manner,<wbr/> may be used.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.testPatternData">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A pixel <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> that supplies the test pattern
-when <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a> is SOLID_<wbr/>COLOR.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Each color channel is treated as an unsigned 32-bit integer.<wbr/>
-The camera device then uses the most significant X bits
-that correspond to how many bits are in its Bayer raw sensor
-output.<wbr/></p>
-<p>For example,<wbr/> a sensor with RAW10 Bayer output would use the
-10 most significant bits from each color channel.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
-
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.testPatternMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No test pattern mode is used,<wbr/> and the camera
-device returns captures from the image sensor.<wbr/></p>
-<p>This is the default if the key is not set.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SOLID_COLOR</span>
- <span class="entry_type_enum_notes"><p>Each pixel in <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> is replaced by its
-respective color channel provided in
-<a href="#controls_android.sensor.testPatternData">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data</a>.<wbr/></p>
-<p>For example:</p>
-<pre><code>android.<wbr/>testPatternData = [0,<wbr/> 0xFFFFFFFF,<wbr/> 0xFFFFFFFF,<wbr/> 0]
-</code></pre>
-<p>All green pixels are 100% green.<wbr/> All red/<wbr/>blue pixels are black.<wbr/></p>
-<pre><code>android.<wbr/>testPatternData = [0xFFFFFFFF,<wbr/> 0,<wbr/> 0xFFFFFFFF,<wbr/> 0]
-</code></pre>
-<p>All red pixels are 100% red.<wbr/> Only the odd green pixels
-are 100% green.<wbr/> All blue pixels are 100% black.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">COLOR_BARS</span>
- <span class="entry_type_enum_notes"><p>All pixel data is replaced with an 8-bar color pattern.<wbr/></p>
-<p>The vertical bars (left-to-right) are as follows:</p>
-<ul>
-<li>100% white</li>
-<li>yellow</li>
-<li>cyan</li>
-<li>green</li>
-<li>magenta</li>
-<li>red</li>
-<li>blue</li>
-<li>black</li>
-</ul>
-<p>In general the image would look like the following:</p>
-<pre><code>W Y C G M R B K
-W Y C G M R B K
-W Y C G M R B K
-W Y C G M R B K
-W Y C G M R B K
-.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
-.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
-.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
-
-(B = Blue,<wbr/> K = Black)
-</code></pre>
-<p>Each bar should take up 1/<wbr/>8 of the sensor pixel array width.<wbr/>
-When this is not possible,<wbr/> the bar size should be rounded
-down to the nearest integer and the pattern can repeat
-on the right side.<wbr/></p>
-<p>Each bar's height must always take up the full sensor
-pixel array height.<wbr/></p>
-<p>Each pixel in this test pattern must be set to either
-0% intensity or 100% intensity.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY</span>
- <span class="entry_type_enum_notes"><p>The test pattern is similar to COLOR_<wbr/>BARS,<wbr/> except that
-each bar should start at its specified color at the top,<wbr/>
-and fade to gray at the bottom.<wbr/></p>
-<p>Furthermore each bar is further subdivided into a left and
-right half.<wbr/> The left half should have a smooth gradient,<wbr/>
-and the right half should have a quantized gradient.<wbr/></p>
-<p>In particular,<wbr/> the right half's should consist of blocks of the
-same color for 1/<wbr/>16th active sensor pixel array width.<wbr/></p>
-<p>The least significant bits in the quantized gradient should
-be copied from the most significant bits of the smooth gradient.<wbr/></p>
-<p>The height of each bar should always be a multiple of 128.<wbr/>
-When this is not the case,<wbr/> the pattern should repeat at the bottom
-of the image.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PN9</span>
- <span class="entry_type_enum_notes"><p>All pixel data is replaced by a pseudo-random sequence
-generated from a PN9 512-bit sequence (typically implemented
-in hardware with a linear feedback shift register).<wbr/></p>
-<p>The generator should be reset at the beginning of each frame,<wbr/>
-and thus each subsequent raw frame with this test pattern should
-be exactly the same as the last.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">CUSTOM1</span>
- <span class="entry_type_enum_value">256</span>
- <span class="entry_type_enum_notes"><p>The first custom test pattern.<wbr/> All custom patterns that are
-available only on this camera device are at least this numeric
-value.<wbr/></p>
-<p>All of the custom test patterns will be static
-(that is the raw image must not vary from frame to frame).<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>When enabled,<wbr/> the sensor sends a test pattern instead of
-doing a real exposure from the camera.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.sensor.availableTestPatternModes">android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When a test pattern is enabled,<wbr/> all manual sensor controls specified
-by android.<wbr/>sensor.<wbr/>* will be ignored.<wbr/> All other controls should
-work as normal.<wbr/></p>
-<p>For example,<wbr/> if manual flash is enabled,<wbr/> flash firing should still
-occur (and that the test pattern remain unmodified,<wbr/> since the flash
-would not actually affect it).<wbr/></p>
-<p>Defaults to OFF.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>All test patterns are specified in the Bayer domain.<wbr/></p>
-<p>The HAL may choose to substitute test patterns from the sensor
-with test patterns from on-device memory.<wbr/> In that case,<wbr/> it should be
-indistinguishable to the ISP whether the data came from the
-sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.rollingShutterSkew">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>rolling<wbr/>Shutter<wbr/>Skew
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Duration between the start of first row exposure
-and the start of last row exposure.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Nanoseconds
- </td>
-
- <td class="entry_range">
- <p>>= 0 and <
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is the exposure time skew between the first and last
-row exposure start times.<wbr/> The first row and the last row are
-the first and last rows inside of the
-<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
-<p>For typical camera sensors that use rolling shutters,<wbr/> this is also equivalent
-to the frame readout time.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The HAL must report <code>0</code> if the sensor is using global shutter,<wbr/> where all pixels begin
-exposure at the same time.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.dynamicBlackLevel">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>dynamic<wbr/>Black<wbr/>Level
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
- <div class="entry_type_notes">2x2 raw count block</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A per-frame dynamic black level offset for each of the color filter
-arrangement (CFA) mosaic channels.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 0 for each.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Camera sensor black levels may vary dramatically for different
-capture settings (e.<wbr/>g.<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>).<wbr/> The fixed black
-level reported by <a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> may be too
-inaccurate to represent the actual value on a per-frame basis.<wbr/> The
-camera device internal pipeline relies on reliable black level values
-to process the raw images appropriately.<wbr/> To get the best image
-quality,<wbr/> the camera device may choose to estimate the per frame black
-level values either based on optically shielded black regions
-(<a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a>) or its internal model.<wbr/></p>
-<p>This key reports the camera device estimated per-frame zero light
-value for each of the CFA mosaic channels in the camera sensor.<wbr/> The
-<a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> may only represent a coarse
-approximation of the actual black level values.<wbr/> This value is the
-black level used in camera device internal image processing pipeline
-and generally more accurate than the fixed black level values.<wbr/>
-However,<wbr/> since they are estimated values by the camera device,<wbr/> they
-may not be as accurate as the black level values calculated from the
-optical black pixels reported by <a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a>.<wbr/></p>
-<p>The values are given in the same order as channels listed for the CFA
-layout key (see <a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>),<wbr/> i.<wbr/>e.<wbr/> the
-nth value given corresponds to the black level offset for the nth
-color channel listed in the CFA.<wbr/></p>
-<p>This key will be available if <a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a> is
-available or the camera device advertises this key via
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The values are given in row-column scan order,<wbr/> with the first value
-corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.sensor.dynamicWhiteLevel">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sensor.<wbr/>dynamic<wbr/>White<wbr/>Level
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum raw value output by sensor for this frame.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Since the <a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> may change for different
-capture settings (e.<wbr/>g.,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>),<wbr/> the white
-level will change accordingly.<wbr/> This key is similar to
-<a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>,<wbr/> but specifies the camera device
-estimated white level for each frame.<wbr/></p>
-<p>This key will be available if <a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a> is
-available or the camera device advertises this key via
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The full bit depth of the sensor must be available in the raw data,<wbr/>
-so the value for linear sensors should not be significantly lower
-than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixel).<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_shading" class="section">shading</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.shading.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>shading.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No lens shading correction is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Apply lens shading corrections,<wbr/> without slowing
-frame rate relative to sensor raw output</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Apply high-quality lens shading correction,<wbr/> at the
-cost of possibly reduced frame rate.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Quality of lens shading correction applied
-to the image data.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.shading.availableModes">android.<wbr/>shading.<wbr/>available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to OFF mode,<wbr/> no lens shading correction will be applied by the
-camera device,<wbr/> and an identity lens shading map data will be provided
-if <code><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON</code>.<wbr/> For example,<wbr/> for lens
-shading map with size of <code>[ 4,<wbr/> 3 ]</code>,<wbr/>
-the output <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a> for this case will be an identity
-map shown below:</p>
-<pre><code>[ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
-</code></pre>
-<p>When set to other modes,<wbr/> lens shading correction will be applied by the camera
-device.<wbr/> Applications can request lens shading map data by setting
-<a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> to ON,<wbr/> and then the camera device will provide lens
-shading map data in <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a>; the returned shading map
-data will be the one applied by the camera device for this capture request.<wbr/></p>
-<p>The shading map data may depend on the auto-exposure (AE) and AWB statistics,<wbr/> therefore
-the reliability of the map data may be affected by the AE and AWB algorithms.<wbr/> When AE and
-AWB are in AUTO modes(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code>
-OFF),<wbr/> to get best results,<wbr/> it is recommended that the applications wait for the AE and AWB
-to be converged before using the returned shading map data.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.shading.strength">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>shading.<wbr/>strength
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Control the amount of shading correction
-applied to the images</p>
- </td>
-
- <td class="entry_units">
- unitless: 1-10; 10 is full shading
- compensation
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.shading.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>shading.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>No lens shading correction is applied.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Apply lens shading corrections,<wbr/> without slowing
-frame rate relative to sensor raw output</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>Apply high-quality lens shading correction,<wbr/> at the
-cost of possibly reduced frame rate.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Quality of lens shading correction applied
-to the image data.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.shading.availableModes">android.<wbr/>shading.<wbr/>available<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to OFF mode,<wbr/> no lens shading correction will be applied by the
-camera device,<wbr/> and an identity lens shading map data will be provided
-if <code><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON</code>.<wbr/> For example,<wbr/> for lens
-shading map with size of <code>[ 4,<wbr/> 3 ]</code>,<wbr/>
-the output <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a> for this case will be an identity
-map shown below:</p>
-<pre><code>[ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
-</code></pre>
-<p>When set to other modes,<wbr/> lens shading correction will be applied by the camera
-device.<wbr/> Applications can request lens shading map data by setting
-<a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> to ON,<wbr/> and then the camera device will provide lens
-shading map data in <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a>; the returned shading map
-data will be the one applied by the camera device for this capture request.<wbr/></p>
-<p>The shading map data may depend on the auto-exposure (AE) and AWB statistics,<wbr/> therefore
-the reliability of the map data may be affected by the AE and AWB algorithms.<wbr/> When AE and
-AWB are in AUTO modes(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code>
-OFF),<wbr/> to get best results,<wbr/> it is recommended that the applications wait for the AE and AWB
-to be converged before using the returned shading map data.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.shading.availableModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>shading.<wbr/>available<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums (android.<wbr/>shading.<wbr/>mode).<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of lens shading modes for <a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a> that are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This list contains lens shading modes that can be set for the camera device.<wbr/>
-Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always
-list OFF and FAST mode.<wbr/> This includes all FULL level devices.<wbr/>
-LEGACY devices will always only support FAST mode.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if lens shading correction control is
-available on the camera device,<wbr/> but the underlying implementation can be the same for
-both modes.<wbr/> That is,<wbr/> if the highest quality implementation on the camera device does not
-slow down capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_statistics" class="section">statistics</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.statistics.faceDetectMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Do not include face detection statistics in capture
-results.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SIMPLE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Return face rectangle and confidence values only.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FULL</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Return all face
-metadata.<wbr/></p>
-<p>In this mode,<wbr/> face rectangles,<wbr/> scores,<wbr/> landmarks,<wbr/> and face IDs are all valid.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for the face detector
-unit.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Whether face detection is enabled,<wbr/> and whether it
-should output just the basic fields or the full set of
-fields.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>SIMPLE mode must fill in <a href="#dynamic_android.statistics.faceRectangles">android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles</a> and
-<a href="#dynamic_android.statistics.faceScores">android.<wbr/>statistics.<wbr/>face<wbr/>Scores</a>.<wbr/>
-FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">android.<wbr/>statistics.<wbr/>face<wbr/>Ids</a>,<wbr/> and
-<a href="#dynamic_android.statistics.faceLandmarks">android.<wbr/>statistics.<wbr/>face<wbr/>Landmarks</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.statistics.histogramMode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>histogram<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for histogram
-generation</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.statistics.sharpnessMapMode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>sharpness<wbr/>Map<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for sharpness map
-generation</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.statistics.hotPixelMapMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Hot pixel map production is disabled.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Hot pixel map production is enabled.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for hot pixel map generation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.statistics.info.availableHotPixelMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If set to <code>true</code>,<wbr/> a hot pixel map is returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/>
-If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.statistics.lensShadingMapMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Do not include a lens shading map in the capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Include a lens shading map in the capture result.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device will output the lens
-shading map in output result metadata.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.statistics.info.availableLensShadingMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to ON,<wbr/>
-<a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> will be provided in
-the output result metadata.<wbr/></p>
-<p>ON is always supported on devices with the RAW capability.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.statistics.info.availableFaceDetectModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">List of enums from android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of face detection modes for <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> that are
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>OFF is always supported.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.histogramBucketCount">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>info.<wbr/>histogram<wbr/>Bucket<wbr/>Count
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Number of histogram buckets
-supported</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>>= 64</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.maxFaceCount">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Face<wbr/>Count
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum number of simultaneously detectable
-faces.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>0 for cameras without available face detection; otherwise:
-<code>>=4</code> for LIMITED or FULL hwlevel devices or
-<code>>0</code> for LEGACY devices.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.maxHistogramCount">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Histogram<wbr/>Count
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum value possible for a histogram
-bucket</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.maxSharpnessMapValue">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Sharpness<wbr/>Map<wbr/>Value
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum value possible for a sharpness map
-region.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.sharpnessMapSize">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>info.<wbr/>sharpness<wbr/>Map<wbr/>Size
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2
- </span>
- <span class="entry_type_visibility"> [system as size]</span>
-
-
-
-
- <div class="entry_type_notes">width x height</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Dimensions of the sharpness
-map</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Must be at least 32 x 32</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.availableHotPixelMapModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of hot pixel map output modes for <a href="#controls_android.statistics.hotPixelMapMode">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode</a> that are
-supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.statistics.hotPixelMapMode">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If no hotpixel map output is available for this camera device,<wbr/> this will contain only
-<code>false</code>.<wbr/></p>
-<p>ON is always supported on devices with the RAW capability.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.statistics.info.availableLensShadingMapModes">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of lens shading map output modes for <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> that
-are supported by this camera device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If no lens shading map output is available for this camera device,<wbr/> this key will
-contain only OFF.<wbr/></p>
-<p>ON is always supported on devices with the RAW capability.<wbr/>
-LEGACY mode devices will always only support OFF.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.statistics.faceDetectMode">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Do not include face detection statistics in capture
-results.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">SIMPLE</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Return face rectangle and confidence values only.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FULL</span>
- <span class="entry_type_enum_optional">[optional]</span>
- <span class="entry_type_enum_notes"><p>Return all face
-metadata.<wbr/></p>
-<p>In this mode,<wbr/> face rectangles,<wbr/> scores,<wbr/> landmarks,<wbr/> and face IDs are all valid.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for the face detector
-unit.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Whether face detection is enabled,<wbr/> and whether it
-should output just the basic fields or the full set of
-fields.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>SIMPLE mode must fill in <a href="#dynamic_android.statistics.faceRectangles">android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles</a> and
-<a href="#dynamic_android.statistics.faceScores">android.<wbr/>statistics.<wbr/>face<wbr/>Scores</a>.<wbr/>
-FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">android.<wbr/>statistics.<wbr/>face<wbr/>Ids</a>,<wbr/> and
-<a href="#dynamic_android.statistics.faceLandmarks">android.<wbr/>statistics.<wbr/>face<wbr/>Landmarks</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.faceIds">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>face<wbr/>Ids
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of unique IDs for detected faces.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Each detected face is given a unique ID that is valid for as long as the face is visible
-to the camera device.<wbr/> A face that leaves the field of view and later returns may be
-assigned a new ID.<wbr/></p>
-<p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> == FULL</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.faceLandmarks">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>face<wbr/>Landmarks
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 6
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">(leftEyeX,<wbr/> leftEyeY,<wbr/> rightEyeX,<wbr/> rightEyeY,<wbr/> mouthX,<wbr/> mouthY)</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of landmarks for detected
-faces.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The coordinate system is that of <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with
-<code>(0,<wbr/> 0)</code> being the top-left pixel of the active array.<wbr/></p>
-<p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> == FULL</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.faceRectangles">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 4
- </span>
- <span class="entry_type_visibility"> [ndk_public as rectangle]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
- <div class="entry_type_notes">(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax).<wbr/> (0,<wbr/>0) is top-left of active pixel area</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of the bounding rectangles for detected
-faces.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The coordinate system is that of <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with
-<code>(0,<wbr/> 0)</code> being the top-left pixel of the active array.<wbr/></p>
-<p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> != OFF</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.faceScores">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>statistics.<wbr/>face<wbr/>Scores
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of the face confidence scores for
-detected faces</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>1-100</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_BC">BC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> != OFF.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The value should be meaningful (for example,<wbr/> setting 100 at
-all times is illegal).<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.faces">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>faces
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [java_public as face]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of the faces detected through camera face detection
-in this capture.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> <code>!=</code> OFF.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.histogram">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>histogram
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 3
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
- <div class="entry_type_notes">count of pixels for each color channel that fall into each histogram bucket,<wbr/> scaled to be between 0 and maxHistogramCount</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A 3-channel histogram based on the raw
-sensor data</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The k'th bucket (0-based) covers the input range
-(with w = <a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>) of [ k * w/<wbr/>N,<wbr/>
-(k + 1) * w /<wbr/> N ).<wbr/> If only a monochrome sharpness map is
-supported,<wbr/> all channels should have the same data</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.histogramMode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>histogram<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for histogram
-generation</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.sharpnessMap">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>sharpness<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x m x 3
- </span>
- <span class="entry_type_visibility"> [system]</span>
-
-
-
-
- <div class="entry_type_notes">estimated sharpness for each region of the input image.<wbr/> Normalized to be between 0 and maxSharpnessMapValue.<wbr/> Higher values mean sharper (better focused)</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A 3-channel sharpness map,<wbr/> based on the raw
-sensor data</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If only a monochrome sharpness map is supported,<wbr/>
-all channels should have the same data</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.sharpnessMapMode">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>statistics.<wbr/>sharpness<wbr/>Map<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [system as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for sharpness map
-generation</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_FUTURE">FUTURE</a></li>
- </ul>
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.lensShadingCorrectionMap">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
-
- <span class="entry_type_visibility"> [java_public as lensShadingMap]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The shading map is a low-resolution floating-point map
-that lists the coefficients used to correct for vignetting,<wbr/> for each
-Bayer color channel.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Each gain factor is >= 1</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The map provided here is the same map that is used by the camera device to
-correct both color shading and vignetting for output non-RAW images.<wbr/></p>
-<p>When there is no lens shading correction applied to RAW
-output images (<a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a> <code>==</code>
-false),<wbr/> this map is the complete lens shading correction
-map; when there is some lens shading correction applied to
-the RAW output image (<a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a><code>==</code> true),<wbr/> this map reports the remaining lens shading
-correction map that needs to be applied to get shading
-corrected images that match the camera device's output for
-non-RAW formats.<wbr/></p>
-<p>For a complete shading correction map,<wbr/> the least shaded
-section of the image will have a gain factor of 1; all
-other sections will have gains above 1.<wbr/></p>
-<p>When <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> = TRANSFORM_<wbr/>MATRIX,<wbr/> the map
-will take into account the colorCorrection settings.<wbr/></p>
-<p>The shading map is for the entire active pixel array,<wbr/> and is not
-affected by the crop region specified in the request.<wbr/> Each shading map
-entry is the value of the shading compensation map over a specific
-pixel on the sensor.<wbr/> Specifically,<wbr/> with a (N x M) resolution shading
-map,<wbr/> and an active pixel array size (W x H),<wbr/> shading map entry
-(x,<wbr/>y) ϵ (0 ...<wbr/> N-1,<wbr/> 0 ...<wbr/> M-1) is the value of the shading map at
-pixel ( ((W-1)/<wbr/>(N-1)) * x,<wbr/> ((H-1)/<wbr/>(M-1)) * y) for the four color channels.<wbr/>
-The map is assumed to be bilinearly interpolated between the sample points.<wbr/></p>
-<p>The channel order is [R,<wbr/> Geven,<wbr/> Godd,<wbr/> B],<wbr/> where Geven is the green
-channel for the even rows of a Bayer pattern,<wbr/> and Godd is the odd rows.<wbr/>
-The shading map is stored in a fully interleaved format.<wbr/></p>
-<p>The shading map will generally have on the order of 30-40 rows and columns,<wbr/>
-and will be smaller than 64x64.<wbr/></p>
-<p>As an example,<wbr/> given a very small map defined as:</p>
-<pre><code>width,<wbr/>height = [ 4,<wbr/> 3 ]
-values =
-[ 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/>
- 1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>3,<wbr/>
- 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>2,<wbr/>
- 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/>
- 1.<wbr/>2,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3 ]
-</code></pre>
-<p>The low-resolution scaling map images for each channel are
-(displayed using nearest-neighbor interpolation):</p>
-<p><img alt="Red lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png"/>
-<img alt="Green (even rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png"/>
-<img alt="Green (odd rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png"/>
-<img alt="Blue lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png"/></p>
-<p>As a visualization only,<wbr/> inverting the full-color map to recover an
-image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
-<p><img alt="Image of a uniform white wall (inverse shading map)" src="images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png"/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.lensShadingMap">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4 x n x m
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">2D array of float gain factors per channel to correct lens shading</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The shading map is a low-resolution floating-point map
-that lists the coefficients used to correct for vignetting and color shading,<wbr/>
-for each Bayer color channel of RAW image data.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Each gain factor is >= 1</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The map provided here is the same map that is used by the camera device to
-correct both color shading and vignetting for output non-RAW images.<wbr/></p>
-<p>When there is no lens shading correction applied to RAW
-output images (<a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a> <code>==</code>
-false),<wbr/> this map is the complete lens shading correction
-map; when there is some lens shading correction applied to
-the RAW output image (<a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a><code>==</code> true),<wbr/> this map reports the remaining lens shading
-correction map that needs to be applied to get shading
-corrected images that match the camera device's output for
-non-RAW formats.<wbr/></p>
-<p>For a complete shading correction map,<wbr/> the least shaded
-section of the image will have a gain factor of 1; all
-other sections will have gains above 1.<wbr/></p>
-<p>When <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> = TRANSFORM_<wbr/>MATRIX,<wbr/> the map
-will take into account the colorCorrection settings.<wbr/></p>
-<p>The shading map is for the entire active pixel array,<wbr/> and is not
-affected by the crop region specified in the request.<wbr/> Each shading map
-entry is the value of the shading compensation map over a specific
-pixel on the sensor.<wbr/> Specifically,<wbr/> with a (N x M) resolution shading
-map,<wbr/> and an active pixel array size (W x H),<wbr/> shading map entry
-(x,<wbr/>y) ϵ (0 ...<wbr/> N-1,<wbr/> 0 ...<wbr/> M-1) is the value of the shading map at
-pixel ( ((W-1)/<wbr/>(N-1)) * x,<wbr/> ((H-1)/<wbr/>(M-1)) * y) for the four color channels.<wbr/>
-The map is assumed to be bilinearly interpolated between the sample points.<wbr/></p>
-<p>The channel order is [R,<wbr/> Geven,<wbr/> Godd,<wbr/> B],<wbr/> where Geven is the green
-channel for the even rows of a Bayer pattern,<wbr/> and Godd is the odd rows.<wbr/>
-The shading map is stored in a fully interleaved format,<wbr/> and its size
-is provided in the camera static metadata by <a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a>.<wbr/></p>
-<p>The shading map will generally have on the order of 30-40 rows and columns,<wbr/>
-and will be smaller than 64x64.<wbr/></p>
-<p>As an example,<wbr/> given a very small map defined as:</p>
-<pre><code><a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a> = [ 4,<wbr/> 3 ]
-<a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> =
-[ 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/>
- 1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>3,<wbr/>
- 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/>
- 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>2,<wbr/>
- 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/>
- 1.<wbr/>2,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3 ]
-</code></pre>
-<p>The low-resolution scaling map images for each channel are
-(displayed using nearest-neighbor interpolation):</p>
-<p><img alt="Red lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png"/>
-<img alt="Green (even rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png"/>
-<img alt="Green (odd rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png"/>
-<img alt="Blue lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png"/></p>
-<p>As a visualization only,<wbr/> inverting the full-color map to recover an
-image of a gray wall (using bicubic interpolation for visual quality)
-as captured by the sensor gives:</p>
-<p><img alt="Image of a uniform white wall (inverse shading map)" src="images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png"/></p>
-<p>Note that the RAW image data might be subject to lens shading
-correction not reported on this map.<wbr/> Query
-<a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a> to see if RAW image data has subject
-to lens shading correction.<wbr/> If <a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a>
-is TRUE,<wbr/> the RAW image data is subject to partial or full lens shading
-correction.<wbr/> In the case full lens shading correction is applied to RAW
-images,<wbr/> the gain factor map reported in this key will contain all 1.<wbr/>0 gains.<wbr/>
-In other words,<wbr/> the map reported in this key is the remaining lens shading
-that needs to be applied on the RAW image to get images without lens shading
-artifacts.<wbr/> See <a href="#static_android.request.maxNumOutputRaw">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Raw</a> for a list of RAW image
-formats.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The lens shading map calculation may depend on exposure and white balance statistics.<wbr/>
-When AE and AWB are in AUTO modes
-(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code> OFF),<wbr/> the HAL
-may have all the information it need to generate most accurate lens shading map.<wbr/> When
-AE or AWB are in manual mode
-(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>==</code> OFF or <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>==</code> OFF),<wbr/> the shading map
-may be adversely impacted by manual exposure or white balance parameters.<wbr/> To avoid
-generating unreliable shading map data,<wbr/> the HAL may choose to lock the shading map with
-the latest known good map generated when the AE and AWB are in AUTO modes.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.predictedColorGains">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>predicted<wbr/>Color<wbr/>Gains
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4
- </span>
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
- <div class="entry_type_notes">A 1D array of floats for 4 color channel gains</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The best-fit color channel gains calculated
-by the camera device's statistics units for the current output frame.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This may be different than the gains used for this frame,<wbr/>
-since statistics processing on data from a new frame
-typically completes after the transform has already been
-applied to that frame.<wbr/></p>
-<p>The 4 channel gains are defined in Bayer domain,<wbr/>
-see <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> for details.<wbr/></p>
-<p>This value should always be calculated by the auto-white balance (AWB) block,<wbr/>
-regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.predictedColorTransform">
- <td class="entry_name
- entry_name_deprecated
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>predicted<wbr/>Color<wbr/>Transform
- </td>
- <td class="entry_type">
- <span class="entry_type_name">rational</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 3 x 3
- </span>
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
- <span class="entry_type_deprecated">[deprecated] </span>
-
- <div class="entry_type_notes">3x3 rational matrix in row-major order</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The best-fit color transform matrix estimate
-calculated by the camera device's statistics units for the current
-output frame.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The camera device will provide the estimate from its
-statistics unit on the white balance transforms to use
-for the next frame.<wbr/> These are the values the camera device believes
-are the best fit for the current output frame.<wbr/> This may
-be different than the transform used for this frame,<wbr/> since
-statistics processing on data from a new frame typically
-completes after the transform has already been applied to
-that frame.<wbr/></p>
-<p>These estimates must be provided for all frames,<wbr/> even if
-capture settings and color transforms are set by the application.<wbr/></p>
-<p>This value should always be calculated by the auto-white balance (AWB) block,<wbr/>
-regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.sceneFlicker">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>scene<wbr/>Flicker
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">NONE</span>
- <span class="entry_type_enum_notes"><p>The camera device does not detect any flickering illumination
-in the current scene.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">50HZ</span>
- <span class="entry_type_enum_notes"><p>The camera device detects illumination flickering at 50Hz
-in the current scene.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">60HZ</span>
- <span class="entry_type_enum_notes"><p>The camera device detects illumination flickering at 60Hz
-in the current scene.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The camera device estimated scene illumination lighting
-frequency.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Many light sources,<wbr/> such as most fluorescent lights,<wbr/> flicker at a rate
-that depends on the local utility power standards.<wbr/> This flicker must be
-accounted for by auto-exposure routines to avoid artifacts in captured images.<wbr/>
-The camera device uses this entry to tell the application what the scene
-illuminant frequency is.<wbr/></p>
-<p>When manual exposure control is enabled
-(<code><a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> == OFF</code> or <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> ==
-OFF</code>),<wbr/> the <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> doesn't perform
-antibanding,<wbr/> and the application can ensure it selects
-exposure times that do not cause banding issues by looking
-into this metadata field.<wbr/> See
-<a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> for more details.<wbr/></p>
-<p>Reports NONE if there doesn't appear to be flickering illumination.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.hotPixelMapMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Hot pixel map production is disabled.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Hot pixel map production is enabled.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Operating mode for hot pixel map generation.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.statistics.info.availableHotPixelMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If set to <code>true</code>,<wbr/> a hot pixel map is returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/>
-If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.hotPixelMap">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 2 x n
- </span>
- <span class="entry_type_visibility"> [public as point]</span>
-
-
-
-
- <div class="entry_type_notes">list of coordinates based on android.<wbr/>sensor.<wbr/>pixel<wbr/>Array<wbr/>Size</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of <code>(x,<wbr/> y)</code> coordinates of hot/<wbr/>defective pixels on the sensor.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>n <= number of pixels on the sensor.<wbr/>
-The <code>(x,<wbr/> y)</code> coordinates must be bounded by
-<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>A coordinate <code>(x,<wbr/> y)</code> must lie between <code>(0,<wbr/> 0)</code>,<wbr/> and
-<code>(width - 1,<wbr/> height - 1)</code> (inclusive),<wbr/> which are the top-left and
-bottom-right of the pixel array,<wbr/> respectively.<wbr/> The width and
-height dimensions are given in <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/>
-This may include hot pixels that lie outside of the active array
-bounds given by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>A hotpixel map contains the coordinates of pixels on the camera
-sensor that do report valid values (usually due to defects in
-the camera sensor).<wbr/> This includes pixels that are stuck at certain
-values,<wbr/> or have a response that does not accuractly encode the
-incoming light from the scene.<wbr/></p>
-<p>To avoid performance issues,<wbr/> there should be significantly fewer hot
-pixels than actual pixels on the camera sensor.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.statistics.lensShadingMapMode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- <span class="entry_type_enum_notes"><p>Do not include a lens shading map in the capture result.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- <span class="entry_type_enum_notes"><p>Include a lens shading map in the capture result.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether the camera device will output the lens
-shading map in output result metadata.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.statistics.info.availableLensShadingMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_RAW">RAW</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to ON,<wbr/>
-<a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> will be provided in
-the output result metadata.<wbr/></p>
-<p>ON is always supported on devices with the RAW capability.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_tonemap" class="section">tonemap</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.tonemap.curveBlue">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the blue
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.tonemap.curveGreen">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>curve<wbr/>Green
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the green
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.tonemap.curveRed">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>tonemap.<wbr/>curve<wbr/>Red
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the red
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>0-1 on both input and output coordinates,<wbr/> normalized
-as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Each channel's curve is defined by an array of control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
- [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
-2 <= N <= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
-<p>These are sorted in order of increasing <code>Pin</code>; it is
-required that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
-define a complete mapping.<wbr/> For input values between control points,<wbr/>
-the camera device must linearly interpolate between the control
-points.<wbr/></p>
-<p>Each curve can have an independent number of points,<wbr/> and the number
-of points can be less than max (that is,<wbr/> the request doesn't have to
-always provide a curve with number of points equivalent to
-<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
-<p>A few examples,<wbr/> and their corresponding graphical mappings; these
-only specify the red channel and the precision is limited to 4
-digits,<wbr/> for conciseness.<wbr/></p>
-<p>Linear mapping:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
-</code></pre>
-<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
-<p>Invert mapping:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 0 ]
-</code></pre>
-<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
-<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
- 0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2920,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4002,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4812,<wbr/>
- 0.<wbr/>2667,<wbr/> 0.<wbr/>5484,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6069,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6594,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7072,<wbr/>
- 0.<wbr/>5333,<wbr/> 0.<wbr/>7515,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7928,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8317,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8685,<wbr/>
- 0.<wbr/>8000,<wbr/> 0.<wbr/>9035,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9370,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9691,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
-</code></pre>
-<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
-<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
- 0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2864,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4007,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4845,<wbr/>
- 0.<wbr/>2667,<wbr/> 0.<wbr/>5532,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6125,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6652,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7130,<wbr/>
- 0.<wbr/>5333,<wbr/> 0.<wbr/>7569,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7977,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8360,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8721,<wbr/>
- 0.<wbr/>8000,<wbr/> 0.<wbr/>9063,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9389,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9701,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
-</code></pre>
-<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For good quality of mapping,<wbr/> at least 128 control points are
-preferred.<wbr/></p>
-<p>A typical use case of this would be a gamma-1/<wbr/>2.<wbr/>2 curve,<wbr/> with as many
-control points used as are available.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.tonemap.curve">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>tonemap.<wbr/>curve
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [java_public as tonemapCurve]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a>
-is CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The tonemapCurve consist of three curves for each of red,<wbr/> green,<wbr/> and blue
-channels respectively.<wbr/> The following example uses the red channel as an
-example.<wbr/> The same logic applies to green and blue channel.<wbr/>
-Each channel's curve is defined by an array of control points:</p>
-<pre><code>curveRed =
- [ P0(in,<wbr/> out),<wbr/> P1(in,<wbr/> out),<wbr/> P2(in,<wbr/> out),<wbr/> P3(in,<wbr/> out),<wbr/> ...,<wbr/> PN(in,<wbr/> out) ]
-2 <= N <= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
-<p>These are sorted in order of increasing <code>Pin</code>; it is always
-guaranteed that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
-define a complete mapping.<wbr/> For input values between control points,<wbr/>
-the camera device must linearly interpolate between the control
-points.<wbr/></p>
-<p>Each curve can have an independent number of points,<wbr/> and the number
-of points can be less than max (that is,<wbr/> the request doesn't have to
-always provide a curve with number of points equivalent to
-<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
-<p>A few examples,<wbr/> and their corresponding graphical mappings; these
-only specify the red channel and the precision is limited to 4
-digits,<wbr/> for conciseness.<wbr/></p>
-<p>Linear mapping:</p>
-<pre><code>curveRed = [ (0,<wbr/> 0),<wbr/> (1.<wbr/>0,<wbr/> 1.<wbr/>0) ]
-</code></pre>
-<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
-<p>Invert mapping:</p>
-<pre><code>curveRed = [ (0,<wbr/> 1.<wbr/>0),<wbr/> (1.<wbr/>0,<wbr/> 0) ]
-</code></pre>
-<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
-<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
-<pre><code>curveRed = [
- (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2920),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4002),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4812),<wbr/>
- (0.<wbr/>2667,<wbr/> 0.<wbr/>5484),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6069),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6594),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7072),<wbr/>
- (0.<wbr/>5333,<wbr/> 0.<wbr/>7515),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7928),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8317),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8685),<wbr/>
- (0.<wbr/>8000,<wbr/> 0.<wbr/>9035),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9370),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9691),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
-</code></pre>
-<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
-<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
-<pre><code>curveRed = [
- (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2864),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4007),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4845),<wbr/>
- (0.<wbr/>2667,<wbr/> 0.<wbr/>5532),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6125),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6652),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7130),<wbr/>
- (0.<wbr/>5333,<wbr/> 0.<wbr/>7569),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7977),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8360),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8721),<wbr/>
- (0.<wbr/>8000,<wbr/> 0.<wbr/>9063),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9389),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9701),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
-</code></pre>
-<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is created by the framework from the curveRed,<wbr/> curveGreen and
-curveBlue entries.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.tonemap.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CONTRAST_CURVE</span>
- <span class="entry_type_enum_notes"><p>Use the tone mapping curve specified in
-the <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>* entries.<wbr/></p>
-<p>All color enhancement and tonemapping must be disabled,<wbr/> except
-for applying the tonemapping curve specified by
-<a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/></p>
-<p>Must not slow down frame rate relative to raw
-sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Advanced gamma mapping and color enhancement may be applied,<wbr/> without
-reducing frame rate compared to raw sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>High-quality gamma mapping and color enhancement will be applied,<wbr/> at
-the cost of possibly reduced frame rate compared to raw sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">GAMMA_VALUE</span>
- <span class="entry_type_enum_notes"><p>Use the gamma value specified in <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a> to peform
-tonemapping.<wbr/></p>
-<p>All color enhancement and tonemapping must be disabled,<wbr/> except
-for applying the tonemapping curve specified by <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a>.<wbr/></p>
-<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PRESET_CURVE</span>
- <span class="entry_type_enum_notes"><p>Use the preset tonemapping curve specified in
-<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a> to peform tonemapping.<wbr/></p>
-<p>All color enhancement and tonemapping must be disabled,<wbr/> except
-for applying the tonemapping curve specified by
-<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a>.<wbr/></p>
-<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>High-level global contrast/<wbr/>gamma/<wbr/>tonemapping control.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.tonemap.availableToneMapModes">android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When switching to an application-defined contrast curve by setting
-<a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> to CONTRAST_<wbr/>CURVE,<wbr/> the curve is defined
-per-channel with a set of <code>(in,<wbr/> out)</code> points that specify the
-mapping from input high-bit-depth pixel value to the output
-low-bit-depth value.<wbr/> Since the actual pixel ranges of both input
-and output may change depending on the camera pipeline,<wbr/> the values
-are specified by normalized floating-point numbers.<wbr/></p>
-<p>More-complex color mapping operations such as 3D color look-up
-tables,<wbr/> selective chroma enhancement,<wbr/> or other non-linear color
-transforms will be disabled when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
-<p>When using either FAST or HIGH_<wbr/>QUALITY,<wbr/> the camera device will
-emit its own tonemap curve in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/>
-These values are always available,<wbr/> and as close as possible to the
-actually used nonlinear/<wbr/>nonglobal transforms.<wbr/></p>
-<p>If a request is sent with CONTRAST_<wbr/>CURVE with the camera device's
-provided curve in FAST or HIGH_<wbr/>QUALITY,<wbr/> the image's tonemap will be
-roughly the same.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.tonemap.gamma">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>gamma
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-GAMMA_<wbr/>VALUE</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The tonemap curve will be defined the following formula:
-* OUT = pow(IN,<wbr/> 1.<wbr/>0 /<wbr/> gamma)
-where IN and OUT is the input pixel value scaled to range [0.<wbr/>0,<wbr/> 1.<wbr/>0],<wbr/>
-pow is the power function and gamma is the gamma value specified by this
-key.<wbr/></p>
-<p>The same curve will be applied to all color channels.<wbr/> The camera device
-may clip the input gamma value to its supported range.<wbr/> The actual applied
-value will be returned in capture result.<wbr/></p>
-<p>The valid range of gamma value varies on different devices,<wbr/> but values
-within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="controls_android.tonemap.presetCurve">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">SRGB</span>
- <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by sRGB</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">REC709</span>
- <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by ITU-R BT.<wbr/>709</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-PRESET_<wbr/>CURVE</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The tonemap curve will be defined by specified standard.<wbr/></p>
-<p>sRGB (approximated by 16 control points):</p>
-<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
-<p>Rec.<wbr/> 709 (approximated by 16 control points):</p>
-<p><img alt="Rec. 709 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png"/></p>
-<p>Note that above figures show a 16 control points approximation of preset
-curves.<wbr/> Camera devices may apply a different approximation to the curve.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.tonemap.maxCurvePoints">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum number of supported points in the
-tonemap curve that can be used for <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If the actual number of points provided by the application (in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>*) is
-less than this maximum,<wbr/> the camera device will resample the curve to its internal
-representation,<wbr/> using linear interpolation.<wbr/></p>
-<p>The output curves in the result metadata may have a different number
-of points than the input curves,<wbr/> and will represent the actual
-hardware curves used as closely as possible when linearly interpolated.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This value must be at least 64.<wbr/> This should be at least 128.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.tonemap.availableToneMapModes">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes
- </td>
- <td class="entry_type">
- <span class="entry_type_name">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [public as enumList]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">list of enums</div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>List of tonemapping modes for <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> that are supported by this camera
-device.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Any value listed in <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always contain
-at least one of below mode combinations:</p>
-<ul>
-<li>CONTRAST_<wbr/>CURVE,<wbr/> FAST and HIGH_<wbr/>QUALITY</li>
-<li>GAMMA_<wbr/>VALUE,<wbr/> PRESET_<wbr/>CURVE,<wbr/> FAST and HIGH_<wbr/>QUALITY</li>
-</ul>
-<p>This includes all FULL level devices.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if automatic tonemap control is available
-on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
-That is,<wbr/> if the highest quality implementation on the camera device does not slow down
-capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.tonemap.curveBlue">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the blue
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.tonemap.curveGreen">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>curve<wbr/>Green
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the green
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.tonemap.curveRed">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>tonemap.<wbr/>curve<wbr/>Red
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 2
- </span>
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
- <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the red
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>0-1 on both input and output coordinates,<wbr/> normalized
-as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Each channel's curve is defined by an array of control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
- [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
-2 <= N <= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
-<p>These are sorted in order of increasing <code>Pin</code>; it is
-required that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
-define a complete mapping.<wbr/> For input values between control points,<wbr/>
-the camera device must linearly interpolate between the control
-points.<wbr/></p>
-<p>Each curve can have an independent number of points,<wbr/> and the number
-of points can be less than max (that is,<wbr/> the request doesn't have to
-always provide a curve with number of points equivalent to
-<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
-<p>A few examples,<wbr/> and their corresponding graphical mappings; these
-only specify the red channel and the precision is limited to 4
-digits,<wbr/> for conciseness.<wbr/></p>
-<p>Linear mapping:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
-</code></pre>
-<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
-<p>Invert mapping:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 0 ]
-</code></pre>
-<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
-<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
- 0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2920,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4002,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4812,<wbr/>
- 0.<wbr/>2667,<wbr/> 0.<wbr/>5484,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6069,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6594,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7072,<wbr/>
- 0.<wbr/>5333,<wbr/> 0.<wbr/>7515,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7928,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8317,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8685,<wbr/>
- 0.<wbr/>8000,<wbr/> 0.<wbr/>9035,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9370,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9691,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
-</code></pre>
-<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
-<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
- 0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2864,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4007,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4845,<wbr/>
- 0.<wbr/>2667,<wbr/> 0.<wbr/>5532,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6125,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6652,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7130,<wbr/>
- 0.<wbr/>5333,<wbr/> 0.<wbr/>7569,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7977,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8360,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8721,<wbr/>
- 0.<wbr/>8000,<wbr/> 0.<wbr/>9063,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9389,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9701,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
-</code></pre>
-<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For good quality of mapping,<wbr/> at least 128 control points are
-preferred.<wbr/></p>
-<p>A typical use case of this would be a gamma-1/<wbr/>2.<wbr/>2 curve,<wbr/> with as many
-control points used as are available.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.tonemap.curve">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>tonemap.<wbr/>curve
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [java_public as tonemapCurve]</span>
-
- <span class="entry_type_synthetic">[synthetic] </span>
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a>
-is CONTRAST_<wbr/>CURVE.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The tonemapCurve consist of three curves for each of red,<wbr/> green,<wbr/> and blue
-channels respectively.<wbr/> The following example uses the red channel as an
-example.<wbr/> The same logic applies to green and blue channel.<wbr/>
-Each channel's curve is defined by an array of control points:</p>
-<pre><code>curveRed =
- [ P0(in,<wbr/> out),<wbr/> P1(in,<wbr/> out),<wbr/> P2(in,<wbr/> out),<wbr/> P3(in,<wbr/> out),<wbr/> ...,<wbr/> PN(in,<wbr/> out) ]
-2 <= N <= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
-<p>These are sorted in order of increasing <code>Pin</code>; it is always
-guaranteed that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
-define a complete mapping.<wbr/> For input values between control points,<wbr/>
-the camera device must linearly interpolate between the control
-points.<wbr/></p>
-<p>Each curve can have an independent number of points,<wbr/> and the number
-of points can be less than max (that is,<wbr/> the request doesn't have to
-always provide a curve with number of points equivalent to
-<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
-<p>A few examples,<wbr/> and their corresponding graphical mappings; these
-only specify the red channel and the precision is limited to 4
-digits,<wbr/> for conciseness.<wbr/></p>
-<p>Linear mapping:</p>
-<pre><code>curveRed = [ (0,<wbr/> 0),<wbr/> (1.<wbr/>0,<wbr/> 1.<wbr/>0) ]
-</code></pre>
-<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
-<p>Invert mapping:</p>
-<pre><code>curveRed = [ (0,<wbr/> 1.<wbr/>0),<wbr/> (1.<wbr/>0,<wbr/> 0) ]
-</code></pre>
-<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
-<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
-<pre><code>curveRed = [
- (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2920),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4002),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4812),<wbr/>
- (0.<wbr/>2667,<wbr/> 0.<wbr/>5484),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6069),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6594),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7072),<wbr/>
- (0.<wbr/>5333,<wbr/> 0.<wbr/>7515),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7928),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8317),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8685),<wbr/>
- (0.<wbr/>8000,<wbr/> 0.<wbr/>9035),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9370),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9691),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
-</code></pre>
-<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
-<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
-<pre><code>curveRed = [
- (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2864),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4007),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4845),<wbr/>
- (0.<wbr/>2667,<wbr/> 0.<wbr/>5532),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6125),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6652),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7130),<wbr/>
- (0.<wbr/>5333,<wbr/> 0.<wbr/>7569),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7977),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8360),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8721),<wbr/>
- (0.<wbr/>8000,<wbr/> 0.<wbr/>9063),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9389),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9701),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
-</code></pre>
-<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This entry is created by the framework from the curveRed,<wbr/> curveGreen and
-curveBlue entries.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.tonemap.mode">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>mode
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CONTRAST_CURVE</span>
- <span class="entry_type_enum_notes"><p>Use the tone mapping curve specified in
-the <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>* entries.<wbr/></p>
-<p>All color enhancement and tonemapping must be disabled,<wbr/> except
-for applying the tonemapping curve specified by
-<a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/></p>
-<p>Must not slow down frame rate relative to raw
-sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FAST</span>
- <span class="entry_type_enum_notes"><p>Advanced gamma mapping and color enhancement may be applied,<wbr/> without
-reducing frame rate compared to raw sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">HIGH_QUALITY</span>
- <span class="entry_type_enum_notes"><p>High-quality gamma mapping and color enhancement will be applied,<wbr/> at
-the cost of possibly reduced frame rate compared to raw sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">GAMMA_VALUE</span>
- <span class="entry_type_enum_notes"><p>Use the gamma value specified in <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a> to peform
-tonemapping.<wbr/></p>
-<p>All color enhancement and tonemapping must be disabled,<wbr/> except
-for applying the tonemapping curve specified by <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a>.<wbr/></p>
-<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">PRESET_CURVE</span>
- <span class="entry_type_enum_notes"><p>Use the preset tonemapping curve specified in
-<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a> to peform tonemapping.<wbr/></p>
-<p>All color enhancement and tonemapping must be disabled,<wbr/> except
-for applying the tonemapping curve specified by
-<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a>.<wbr/></p>
-<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>High-level global contrast/<wbr/>gamma/<wbr/>tonemapping control.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p><a href="#static_android.tonemap.availableToneMapModes">android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes</a></p>
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When switching to an application-defined contrast curve by setting
-<a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> to CONTRAST_<wbr/>CURVE,<wbr/> the curve is defined
-per-channel with a set of <code>(in,<wbr/> out)</code> points that specify the
-mapping from input high-bit-depth pixel value to the output
-low-bit-depth value.<wbr/> Since the actual pixel ranges of both input
-and output may change depending on the camera pipeline,<wbr/> the values
-are specified by normalized floating-point numbers.<wbr/></p>
-<p>More-complex color mapping operations such as 3D color look-up
-tables,<wbr/> selective chroma enhancement,<wbr/> or other non-linear color
-transforms will be disabled when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
-<p>When using either FAST or HIGH_<wbr/>QUALITY,<wbr/> the camera device will
-emit its own tonemap curve in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/>
-These values are always available,<wbr/> and as close as possible to the
-actually used nonlinear/<wbr/>nonglobal transforms.<wbr/></p>
-<p>If a request is sent with CONTRAST_<wbr/>CURVE with the camera device's
-provided curve in FAST or HIGH_<wbr/>QUALITY,<wbr/> the image's tonemap will be
-roughly the same.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.tonemap.gamma">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>gamma
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-GAMMA_<wbr/>VALUE</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The tonemap curve will be defined the following formula:
-* OUT = pow(IN,<wbr/> 1.<wbr/>0 /<wbr/> gamma)
-where IN and OUT is the input pixel value scaled to range [0.<wbr/>0,<wbr/> 1.<wbr/>0],<wbr/>
-pow is the power function and gamma is the gamma value specified by this
-key.<wbr/></p>
-<p>The same curve will be applied to all color channels.<wbr/> The camera device
-may clip the input gamma value to its supported range.<wbr/> The actual applied
-value will be returned in capture result.<wbr/></p>
-<p>The valid range of gamma value varies on different devices,<wbr/> but values
-within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="dynamic_android.tonemap.presetCurve">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">SRGB</span>
- <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by sRGB</p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">REC709</span>
- <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by ITU-R BT.<wbr/>709</p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-PRESET_<wbr/>CURVE</p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The tonemap curve will be defined by specified standard.<wbr/></p>
-<p>sRGB (approximated by 16 control points):</p>
-<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
-<p>Rec.<wbr/> 709 (approximated by 16 control points):</p>
-<p><img alt="Rec. 709 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png"/></p>
-<p>Note that above figures show a 16 control points approximation of preset
-curves.<wbr/> Camera devices may apply a different approximation to the curve.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_led" class="section">led</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.led.transmit">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>led.<wbr/>transmit
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [hidden as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>This LED is nominally used to indicate to the user
-that the camera is powered on and may be streaming images back to the
-Application Processor.<wbr/> In certain rare circumstances,<wbr/> the OS may
-disable this when video is processed locally and not transmitted to
-any untrusted applications.<wbr/></p>
-<p>In particular,<wbr/> the LED <em>must</em> always be on when the data could be
-transmitted off the device.<wbr/> The LED <em>should</em> always be on whenever
-data is stored locally on the device.<wbr/></p>
-<p>The LED <em>may</em> be off if a trusted application is using the data that
-doesn't violate the above rules.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.led.transmit">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>led.<wbr/>transmit
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [hidden as boolean]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>This LED is nominally used to indicate to the user
-that the camera is powered on and may be streaming images back to the
-Application Processor.<wbr/> In certain rare circumstances,<wbr/> the OS may
-disable this when video is processed locally and not transmitted to
-any untrusted applications.<wbr/></p>
-<p>In particular,<wbr/> the LED <em>must</em> always be on when the data could be
-transmitted off the device.<wbr/> The LED <em>should</em> always be on whenever
-data is stored locally on the device.<wbr/></p>
-<p>The LED <em>may</em> be off if a trusted application is using the data that
-doesn't violate the above rules.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.led.availableLeds">
- <td class="entry_name
- " rowspan="1">
- android.<wbr/>led.<wbr/>available<wbr/>Leds
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n
- </span>
- <span class="entry_type_visibility"> [hidden]</span>
-
-
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">TRANSMIT</span>
- <span class="entry_type_enum_notes"><p><a href="#controls_android.led.transmit">android.<wbr/>led.<wbr/>transmit</a> control is used.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>A list of camera LEDs that are available on this system.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_info" class="section">info</td></tr>
-
-
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.info.supportedHardwareLevel">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">LIMITED</span>
- <span class="entry_type_enum_notes"><p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
-better.<wbr/></p>
-<p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
-<p>All <code>LIMITED</code> devices support the <code>BACKWARDS_<wbr/>COMPATIBLE</code> capability,<wbr/> indicating basic
-support for color image capture.<wbr/> The only exception is that the device may
-alternatively support only the <code>DEPTH_<wbr/>OUTPUT</code> capability,<wbr/> if it can only output depth
-measurements and not color images.<wbr/></p>
-<p><code>LIMITED</code> devices and above require the use of <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>
-to lock exposure metering (and calculate flash power,<wbr/> for cameras with flash) before
-capturing a high-quality still image.<wbr/></p>
-<p>A <code>LIMITED</code> device that only lists the <code>BACKWARDS_<wbr/>COMPATIBLE</code> capability is only
-required to support full-automatic operation and post-processing (<code>OFF</code> is not
-supported for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>,<wbr/> or
-<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>)</p>
-<p>Additional capabilities may optionally be supported by a <code>LIMITED</code>-level device,<wbr/> and
-can be checked for in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">FULL</span>
- <span class="entry_type_enum_notes"><p>This camera device is capable of supporting advanced imaging applications.<wbr/></p>
-<p>The stream configurations listed in the <code>FULL</code>,<wbr/> <code>LEGACY</code> and <code>LIMITED</code> tables in the
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
-<p>A <code>FULL</code> device will support below capabilities:</p>
-<ul>
-<li><code>BURST_<wbr/>CAPTURE</code> capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
- <code>BURST_<wbr/>CAPTURE</code>)</li>
-<li>Per frame control (<a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> <code>==</code> PER_<wbr/>FRAME_<wbr/>CONTROL)</li>
-<li>Manual sensor control (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains <code>MANUAL_<wbr/>SENSOR</code>)</li>
-<li>Manual post-processing control (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
- <code>MANUAL_<wbr/>POST_<wbr/>PROCESSING</code>)</li>
-<li>The required exposure time range defined in <a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></li>
-<li>The required maxFrameDuration defined in <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a></li>
-</ul>
-<p>Note:
-Pre-API level 23,<wbr/> FULL devices also supported arbitrary cropping region
-(<a href="#static_android.scaler.croppingType">android.<wbr/>scaler.<wbr/>cropping<wbr/>Type</a> <code>== FREEFORM</code>); this requirement was relaxed in API level
-23,<wbr/> and <code>FULL</code> devices may only support <code>CENTERED</code> cropping.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">LEGACY</span>
- <span class="entry_type_enum_notes"><p>This camera device is running in backward compatibility mode.<wbr/></p>
-<p>Only the stream configurations listed in the <code>LEGACY</code> table in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a>
-documentation are supported.<wbr/></p>
-<p>A <code>LEGACY</code> device does not support per-frame control,<wbr/> manual sensor control,<wbr/> manual
-post-processing,<wbr/> arbitrary cropping regions,<wbr/> and has relaxed performance constraints.<wbr/>
-No additional capabilities beyond <code>BACKWARD_<wbr/>COMPATIBLE</code> will ever be listed by a
-<code>LEGACY</code> device in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
-<p>In addition,<wbr/> the <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is not functional on <code>LEGACY</code>
-devices.<wbr/> Instead,<wbr/> every request that includes a JPEG-format output target is treated
-as triggering a still capture,<wbr/> internally executing a precapture trigger.<wbr/> This may
-fire the flash for flash power metering during precapture,<wbr/> and then fire the flash
-for the final capture,<wbr/> if a flash is available on the device and the AE mode is set to
-enable the flash.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">3</span>
- <span class="entry_type_enum_notes"><p>This camera device is capable of YUV reprocessing and RAW data capture,<wbr/> in addition to
-FULL-level capabilities.<wbr/></p>
-<p>The stream configurations listed in the <code>LEVEL_<wbr/>3</code>,<wbr/> <code>RAW</code>,<wbr/> <code>FULL</code>,<wbr/> <code>LEGACY</code> and
-<code>LIMITED</code> tables in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a>
-documentation are guaranteed to be supported.<wbr/></p>
-<p>The following additional capabilities are guaranteed to be supported:</p>
-<ul>
-<li><code>YUV_<wbr/>REPROCESSING</code> capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
- <code>YUV_<wbr/>REPROCESSING</code>)</li>
-<li><code>RAW</code> capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
- <code>RAW</code>)</li>
-</ul></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Generally classifies the overall set of the camera device functionality.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The supported hardware level is a high-level description of the camera device's
-capabilities,<wbr/> summarizing several capabilities into one field.<wbr/> Each level adds additional
-features to the previous one,<wbr/> and is always a strict superset of the previous level.<wbr/>
-The ordering is <code>LEGACY < LIMITED < FULL < LEVEL_<wbr/>3</code>.<wbr/></p>
-<p>Starting from <code>LEVEL_<wbr/>3</code>,<wbr/> the level enumerations are guaranteed to be in increasing
-numerical value as well.<wbr/> To check if a given device is at least at a given hardware level,<wbr/>
-the following code snippet can be used:</p>
-<pre><code>//<wbr/> Returns true if the device supports the required hardware level,<wbr/> or better.<wbr/>
-boolean isHardwareLevelSupported(CameraCharacteristics c,<wbr/> int requiredLevel) {
- int deviceLevel = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>INFO_<wbr/>SUPPORTED_<wbr/>HARDWARE_<wbr/>LEVEL);
- if (deviceLevel == Camera<wbr/>Characteristics.<wbr/>INFO_<wbr/>SUPPORTED_<wbr/>HARDWARE_<wbr/>LEVEL_<wbr/>LEGACY) {
- return requiredLevel == deviceLevel;
- }
- //<wbr/> deviceLevel is not LEGACY,<wbr/> can use numerical sort
- return requiredLevel <= deviceLevel;
-}
-</code></pre>
-<p>At a high level,<wbr/> the levels are:</p>
-<ul>
-<li><code>LEGACY</code> devices operate in a backwards-compatibility mode for older
- Android devices,<wbr/> and have very limited capabilities.<wbr/></li>
-<li><code>LIMITED</code> devices represent the
- baseline feature set,<wbr/> and may also include additional capabilities that are
- subsets of <code>FULL</code>.<wbr/></li>
-<li><code>FULL</code> devices additionally support per-frame manual control of sensor,<wbr/> flash,<wbr/> lens and
- post-processing settings,<wbr/> and image capture at a high rate.<wbr/></li>
-<li><code>LEVEL_<wbr/>3</code> devices additionally support YUV reprocessing and RAW image capture,<wbr/> along
- with additional output stream configurations.<wbr/></li>
-</ul>
-<p>See the individual level enums for full descriptions of the supported capabilities.<wbr/> The
-<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> entry describes the device's capabilities at a
-finer-grain level,<wbr/> if needed.<wbr/> In addition,<wbr/> many controls have their available settings or
-ranges defined in individual <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a> entries.<wbr/></p>
-<p>Some features are not part of any particular hardware level or capability and must be
-queried separately.<wbr/> These include:</p>
-<ul>
-<li>Calibrated timestamps (<a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> REALTIME)</li>
-<li>Precision lens control (<a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a> <code>==</code> CALIBRATED)</li>
-<li>Face detection (<a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a>)</li>
-<li>Optical or electrical image stabilization
- (<a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a>,<wbr/>
- <a href="#static_android.control.availableVideoStabilizationModes">android.<wbr/>control.<wbr/>available<wbr/>Video<wbr/>Stabilization<wbr/>Modes</a>)</li>
-</ul>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The camera 3 HAL device can implement one of three possible operational modes; LIMITED,<wbr/>
-FULL,<wbr/> and LEVEL_<wbr/>3.<wbr/></p>
-<p>FULL support or better is expected from new higher-end devices.<wbr/> Limited
-mode has hardware requirements roughly in line with those for a camera HAL device v1
-implementation,<wbr/> and is expected from older or inexpensive devices.<wbr/> Each level is a strict
-superset of the previous level,<wbr/> and they share the same essential operational flow.<wbr/></p>
-<p>For full details refer to "S3.<wbr/> Operational Modes" in camera3.<wbr/>h</p>
-<p>Camera HAL3+ must not implement LEGACY mode.<wbr/> It is there for backwards compatibility in
-the <code>android.<wbr/>hardware.<wbr/>camera2</code> user-facing API only on HALv1 devices,<wbr/> and is implemented
-by the camera framework code.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_blackLevel" class="section">blackLevel</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.blackLevel.lock">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>black<wbr/>Level.<wbr/>lock
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether black-level compensation is locked
-to its current values,<wbr/> or is free to vary.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When set to <code>true</code> (ON),<wbr/> the values used for black-level
-compensation will not change until the lock is set to
-<code>false</code> (OFF).<wbr/></p>
-<p>Since changes to certain capture parameters (such as
-exposure time) may require resetting of black level
-compensation,<wbr/> the camera device must report whether setting
-the black level lock was successful in the output result
-metadata.<wbr/></p>
-<p>For example,<wbr/> if a sequence of requests is as follows:</p>
-<ul>
-<li>Request 1: Exposure = 10ms,<wbr/> Black level lock = OFF</li>
-<li>Request 2: Exposure = 10ms,<wbr/> Black level lock = ON</li>
-<li>Request 3: Exposure = 10ms,<wbr/> Black level lock = ON</li>
-<li>Request 4: Exposure = 20ms,<wbr/> Black level lock = ON</li>
-<li>Request 5: Exposure = 20ms,<wbr/> Black level lock = ON</li>
-<li>Request 6: Exposure = 20ms,<wbr/> Black level lock = ON</li>
-</ul>
-<p>And the exposure change in Request 4 requires the camera
-device to reset the black level offsets,<wbr/> then the output
-result metadata is expected to be:</p>
-<ul>
-<li>Result 1: Exposure = 10ms,<wbr/> Black level lock = OFF</li>
-<li>Result 2: Exposure = 10ms,<wbr/> Black level lock = ON</li>
-<li>Result 3: Exposure = 10ms,<wbr/> Black level lock = ON</li>
-<li>Result 4: Exposure = 20ms,<wbr/> Black level lock = OFF</li>
-<li>Result 5: Exposure = 20ms,<wbr/> Black level lock = ON</li>
-<li>Result 6: Exposure = 20ms,<wbr/> Black level lock = ON</li>
-</ul>
-<p>This indicates to the application that on frame 4,<wbr/> black
-levels were reset due to exposure value changes,<wbr/> and pixel
-values may not be consistent across captures.<wbr/></p>
-<p>The camera device will maintain the lock to the extent
-possible,<wbr/> only overriding the lock to OFF when changes to
-other request parameters require a black level recalculation
-or reset.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If for some reason black level locking is no longer possible
-(for example,<wbr/> the analog gain has changed,<wbr/> which forces
-black level offsets to be recalculated),<wbr/> then the HAL must
-override this request (and it must report 'OFF' when this
-does happen) until the next capture for which locking is
-possible again.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.blackLevel.lock">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>black<wbr/>Level.<wbr/>lock
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[full] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OFF</span>
- </li>
- <li>
- <span class="entry_type_enum_name">ON</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Whether black-level compensation is locked
-to its current values,<wbr/> or is free to vary.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_HAL2">HAL2</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Whether the black level offset was locked for this frame.<wbr/> Should be
-ON if <a href="#controls_android.blackLevel.lock">android.<wbr/>black<wbr/>Level.<wbr/>lock</a> was ON in the capture request,<wbr/> unless
-a change in other capture settings forced the camera device to
-perform a black level reset.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If for some reason black level locking is no longer possible
-(for example,<wbr/> the analog gain has changed,<wbr/> which forces
-black level offsets to be recalculated),<wbr/> then the HAL must
-override this request (and it must report 'OFF' when this
-does happen) until the next capture for which locking is
-possible again.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_sync" class="section">sync</td></tr>
-
-
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.sync.frameNumber">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sync.<wbr/>frame<wbr/>Number
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int64</span>
-
- <span class="entry_type_visibility"> [ndk_public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">CONVERGING</span>
- <span class="entry_type_enum_value">-1</span>
- <span class="entry_type_enum_notes"><p>The current result is not yet fully synchronized to any request.<wbr/></p>
-<p>Synchronization is in progress,<wbr/> and reading metadata from this
-result may include a mix of data that have taken effect since the
-last synchronization time.<wbr/></p>
-<p>In some future result,<wbr/> within <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> frames,<wbr/>
-this value will update to the actual frame number frame number
-the result is guaranteed to be synchronized to (as long as the
-request settings remain constant).<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">UNKNOWN</span>
- <span class="entry_type_enum_value">-2</span>
- <span class="entry_type_enum_notes"><p>The current result's synchronization status is unknown.<wbr/></p>
-<p>The result may have already converged,<wbr/> or it may be in
-progress.<wbr/> Reading from this result may include some mix
-of settings from past requests.<wbr/></p>
-<p>After a settings change,<wbr/> the new settings will eventually all
-take effect for the output buffers and results.<wbr/> However,<wbr/> this
-value will not change when that happens.<wbr/> Altering settings
-rapidly may provide outcomes using mixes of settings from recent
-requests.<wbr/></p>
-<p>This value is intended primarily for backwards compatibility with
-the older camera implementations (for android.<wbr/>hardware.<wbr/>Camera).<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The frame number corresponding to the last request
-with which the output result (metadata + buffers) has been fully
-synchronized.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- <p>Either a non-negative value corresponding to a
-<code>frame_<wbr/>number</code>,<wbr/> or one of the two enums (CONVERGING /<wbr/> UNKNOWN).<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>When a request is submitted to the camera device,<wbr/> there is usually a
-delay of several frames before the controls get applied.<wbr/> A camera
-device may either choose to account for this delay by implementing a
-pipeline and carefully submit well-timed atomic control updates,<wbr/> or
-it may start streaming control changes that span over several frame
-boundaries.<wbr/></p>
-<p>In the latter case,<wbr/> whenever a request's settings change relative to
-the previous submitted request,<wbr/> the full set of changes may take
-multiple frame durations to fully take effect.<wbr/> Some settings may
-take effect sooner (in less frame durations) than others.<wbr/></p>
-<p>While a set of control changes are being propagated,<wbr/> this value
-will be CONVERGING.<wbr/></p>
-<p>Once it is fully known that a set of control changes have been
-finished propagating,<wbr/> and the resulting updated control settings
-have been read back by the camera device,<wbr/> this value will be set
-to a non-negative frame number (corresponding to the request to
-which the results have synchronized to).<wbr/></p>
-<p>Older camera device implementations may not have a way to detect
-when all camera controls have been applied,<wbr/> and will always set this
-value to UNKNOWN.<wbr/></p>
-<p>FULL capability devices will always have this value set to the
-frame number of the request corresponding to this result.<wbr/></p>
-<p><em>Further details</em>:</p>
-<ul>
-<li>Whenever a request differs from the last request,<wbr/> any future
-results not yet returned may have this value set to CONVERGING (this
-could include any in-progress captures not yet returned by the camera
-device,<wbr/> for more details see pipeline considerations below).<wbr/></li>
-<li>Submitting a series of multiple requests that differ from the
-previous request (e.<wbr/>g.<wbr/> r1,<wbr/> r2,<wbr/> r3 s.<wbr/>t.<wbr/> r1 != r2 != r3)
-moves the new synchronization frame to the last non-repeating
-request (using the smallest frame number from the contiguous list of
-repeating requests).<wbr/></li>
-<li>Submitting the same request repeatedly will not change this value
-to CONVERGING,<wbr/> if it was already a non-negative value.<wbr/></li>
-<li>When this value changes to non-negative,<wbr/> that means that all of the
-metadata controls from the request have been applied,<wbr/> all of the
-metadata controls from the camera device have been read to the
-updated values (into the result),<wbr/> and all of the graphics buffers
-corresponding to this result are also synchronized to the request.<wbr/></li>
-</ul>
-<p><em>Pipeline considerations</em>:</p>
-<p>Submitting a request with updated controls relative to the previously
-submitted requests may also invalidate the synchronization state
-of all the results corresponding to currently in-flight requests.<wbr/></p>
-<p>In other words,<wbr/> results for this current request and up to
-<a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a> prior requests may have their
-<a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> change to CONVERGING.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>Using UNKNOWN here is illegal unless <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a>
-is also UNKNOWN.<wbr/></p>
-<p>FULL capability devices should simply set this value to the
-<code>frame_<wbr/>number</code> of the request this result corresponds to.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.sync.maxLatency">
- <td class="entry_name
- " rowspan="5">
- android.<wbr/>sync.<wbr/>max<wbr/>Latency
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int32</span>
-
- <span class="entry_type_visibility"> [public]</span>
-
-
- <span class="entry_type_hwlevel">[legacy] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">PER_FRAME_CONTROL</span>
- <span class="entry_type_enum_value">0</span>
- <span class="entry_type_enum_notes"><p>Every frame has the requests immediately applied.<wbr/></p>
-<p>Changing controls over multiple requests one after another will
-produce results that have those controls applied atomically
-each frame.<wbr/></p>
-<p>All FULL capability devices will have this as their maxLatency.<wbr/></p></span>
- </li>
- <li>
- <span class="entry_type_enum_name">UNKNOWN</span>
- <span class="entry_type_enum_value">-1</span>
- <span class="entry_type_enum_notes"><p>Each new frame has some subset (potentially the entire set)
-of the past requests applied to the camera settings.<wbr/></p>
-<p>By submitting a series of identical requests,<wbr/> the camera device
-will eventually have the camera settings applied,<wbr/> but it is
-unknown when that exact point will be.<wbr/></p>
-<p>All LEGACY capability devices will have this as their maxLatency.<wbr/></p></span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximum number of frames that can occur after a request
-(different than the previous) has been submitted,<wbr/> and before the
-result's state becomes synchronized.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Frame counts
- </td>
-
- <td class="entry_range">
- <p>A positive value,<wbr/> PER_<wbr/>FRAME_<wbr/>CONTROL,<wbr/> or UNKNOWN.<wbr/></p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_V1">V1</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This defines the maximum distance (in number of metadata results),<wbr/>
-between the frame number of the request that has new controls to apply
-and the frame number of the result that has all the controls applied.<wbr/></p>
-<p>In other words this acts as an upper boundary for how many frames
-must occur before the camera device knows for a fact that the new
-submitted camera settings have been applied in outgoing frames.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entries_header">
- <th class="th_details" colspan="5">HAL Implementation Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>For example if maxLatency was 2,<wbr/></p>
-<pre><code>initial request = X (repeating)
-request1 = X
-request2 = Y
-request3 = Y
-request4 = Y
-
-where requestN has frameNumber N,<wbr/> and the first of the repeating
-initial request's has frameNumber F (and F < 1).<wbr/>
-
-initial result = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == F }
-result1 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == F }
-result2 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == CONVERGING }
-result3 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == CONVERGING }
-result4 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == 2 }
-
-where resultN has frameNumber N.<wbr/>
-</code></pre>
-<p>Since <code>result4</code> has a <code>frameNumber == 4</code> and
-<code><a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == 2</code>,<wbr/> the distance is clearly
-<code>4 - 2 = 2</code>.<wbr/></p>
-<p>Use <code>frame_<wbr/>count</code> from camera3_<wbr/>request_<wbr/>t instead of
-<a href="#controls_android.request.frameCount">android.<wbr/>request.<wbr/>frame<wbr/>Count</a> or
-<code><a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html#getFrameNumber">CaptureResult#getFrameNumber</a></code>.<wbr/></p>
-<p>LIMITED devices are strongly encouraged to use a non-negative
-value.<wbr/> If UNKNOWN is used here then app developers do not have a way
-to know when sensor settings have been applied.<wbr/></p>
- </td>
- </tr>
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_reprocess" class="section">reprocess</td></tr>
-
-
- <tr><td colspan="6" class="kind">controls</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="controls_android.reprocess.effectiveExposureFactor">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The amount of exposure time increase factor applied to the original output
-frame by the application processing before sending for reprocessing.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Relative exposure time increase factor.<wbr/>
- </td>
-
- <td class="entry_range">
- <p>>= 1.<wbr/>0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is optional,<wbr/> and will be supported if the camera device supports YUV_<wbr/>REPROCESSING
-capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains YUV_<wbr/>REPROCESSING).<wbr/></p>
-<p>For some YUV reprocessing use cases,<wbr/> the application may choose to filter the original
-output frames to effectively reduce the noise to the same level as a frame that was
-captured with longer exposure time.<wbr/> To be more specific,<wbr/> assuming the original captured
-images were captured with a sensitivity of S and an exposure time of T,<wbr/> the model in
-the camera device is that the amount of noise in the image would be approximately what
-would be expected if the original capture parameters had been a sensitivity of
-S/<wbr/>effectiveExposureFactor and an exposure time of T*effectiveExposureFactor,<wbr/> rather
-than S and T respectively.<wbr/> If the captured images were processed by the application
-before being sent for reprocessing,<wbr/> then the application may have used image processing
-algorithms and/<wbr/>or multi-frame image fusion to reduce the noise in the
-application-processed images (input images).<wbr/> By using the effectiveExposureFactor
-control,<wbr/> the application can communicate to the camera device the actual noise level
-improvement in the application-processed image.<wbr/> With this information,<wbr/> the camera
-device can select appropriate noise reduction and edge enhancement parameters to avoid
-excessive noise reduction (<a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a>) and insufficient edge
-enhancement (<a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a>) being applied to the reprocessed frames.<wbr/></p>
-<p>For example,<wbr/> for multi-frame image fusion use case,<wbr/> the application may fuse
-multiple output frames together to a final frame for reprocessing.<wbr/> When N image are
-fused into 1 image for reprocessing,<wbr/> the exposure time increase factor could be up to
-square root of N (based on a simple photon shot noise model).<wbr/> The camera device will
-adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
-produce the best quality images.<wbr/></p>
-<p>This is relative factor,<wbr/> 1.<wbr/>0 indicates the application hasn't processed the input
-buffer in a way that affects its effective exposure time.<wbr/></p>
-<p>This control is only effective for YUV reprocessing capture request.<wbr/> For noise
-reduction reprocessing,<wbr/> it is only effective when <code><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a> != OFF</code>.<wbr/>
-Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective when
-<code><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a> != OFF</code>.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">dynamic</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="dynamic_android.reprocess.effectiveExposureFactor">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor
- </td>
- <td class="entry_type">
- <span class="entry_type_name">float</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The amount of exposure time increase factor applied to the original output
-frame by the application processing before sending for reprocessing.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Relative exposure time increase factor.<wbr/>
- </td>
-
- <td class="entry_range">
- <p>>= 1.<wbr/>0</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This is optional,<wbr/> and will be supported if the camera device supports YUV_<wbr/>REPROCESSING
-capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains YUV_<wbr/>REPROCESSING).<wbr/></p>
-<p>For some YUV reprocessing use cases,<wbr/> the application may choose to filter the original
-output frames to effectively reduce the noise to the same level as a frame that was
-captured with longer exposure time.<wbr/> To be more specific,<wbr/> assuming the original captured
-images were captured with a sensitivity of S and an exposure time of T,<wbr/> the model in
-the camera device is that the amount of noise in the image would be approximately what
-would be expected if the original capture parameters had been a sensitivity of
-S/<wbr/>effectiveExposureFactor and an exposure time of T*effectiveExposureFactor,<wbr/> rather
-than S and T respectively.<wbr/> If the captured images were processed by the application
-before being sent for reprocessing,<wbr/> then the application may have used image processing
-algorithms and/<wbr/>or multi-frame image fusion to reduce the noise in the
-application-processed images (input images).<wbr/> By using the effectiveExposureFactor
-control,<wbr/> the application can communicate to the camera device the actual noise level
-improvement in the application-processed image.<wbr/> With this information,<wbr/> the camera
-device can select appropriate noise reduction and edge enhancement parameters to avoid
-excessive noise reduction (<a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a>) and insufficient edge
-enhancement (<a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a>) being applied to the reprocessed frames.<wbr/></p>
-<p>For example,<wbr/> for multi-frame image fusion use case,<wbr/> the application may fuse
-multiple output frames together to a final frame for reprocessing.<wbr/> When N image are
-fused into 1 image for reprocessing,<wbr/> the exposure time increase factor could be up to
-square root of N (based on a simple photon shot noise model).<wbr/> The camera device will
-adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
-produce the best quality images.<wbr/></p>
-<p>This is relative factor,<wbr/> 1.<wbr/>0 indicates the application hasn't processed the input
-buffer in a way that affects its effective exposure time.<wbr/></p>
-<p>This control is only effective for YUV reprocessing capture request.<wbr/> For noise
-reduction reprocessing,<wbr/> it is only effective when <code><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a> != OFF</code>.<wbr/>
-Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective when
-<code><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a> != OFF</code>.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.reprocess.maxCaptureStall">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [java_public]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The maximal camera capture pipeline stall (in unit of frame count) introduced by a
-reprocess capture request.<wbr/></p>
- </td>
-
- <td class="entry_units">
- Number of frames.<wbr/>
- </td>
-
- <td class="entry_range">
- <p><= 4</p>
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_REPROC">REPROC</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>The key describes the maximal interference that one reprocess (input) request
-can introduce to the camera simultaneous streaming of regular (output) capture
-requests,<wbr/> including repeating requests.<wbr/></p>
-<p>When a reprocessing capture request is submitted while a camera output repeating request
-(e.<wbr/>g.<wbr/> preview) is being served by the camera device,<wbr/> it may preempt the camera capture
-pipeline for at least one frame duration so that the camera device is unable to process
-the following capture request in time for the next sensor start of exposure boundary.<wbr/>
-When this happens,<wbr/> the application may observe a capture time gap (longer than one frame
-duration) between adjacent capture output frames,<wbr/> which usually exhibits as preview
-glitch if the repeating request output targets include a preview surface.<wbr/> This key gives
-the worst-case number of frame stall introduced by one reprocess request with any kind of
-formats/<wbr/>sizes combination.<wbr/></p>
-<p>If this key reports 0,<wbr/> it means a reprocess request doesn't introduce any glitch to the
-ongoing camera repeating request outputs,<wbr/> as if this reprocess request is never issued.<wbr/></p>
-<p>This key is supported if the camera device supports PRIVATE or YUV reprocessing (
-i.<wbr/>e.<wbr/> <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains PRIVATE_<wbr/>REPROCESSING or
-YUV_<wbr/>REPROCESSING).<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
- <tr><td colspan="6" id="section_depth" class="section">depth</td></tr>
-
-
- <tr><td colspan="6" class="kind">static</td></tr>
-
- <thead class="entries_header">
- <tr>
- <th class="th_name">Property Name</th>
- <th class="th_type">Type</th>
- <th class="th_description">Description</th>
- <th class="th_units">Units</th>
- <th class="th_range">Range</th>
- <th class="th_tags">Tags</th>
- </tr>
- </thead>
-
- <tbody>
-
-
-
-
-
-
-
-
-
-
- <tr class="entry" id="static_android.depth.maxDepthSamples">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int32</span>
-
- <span class="entry_type_visibility"> [system]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Maximum number of points that a depth point cloud may contain.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If a camera device supports outputting depth range data in the form of a depth point
-cloud (<a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a>),<wbr/> this is the maximum
-number of points an output buffer may contain.<wbr/></p>
-<p>Any given buffer may contain between 0 and maxDepthSamples points,<wbr/> inclusive.<wbr/>
-If output in the depth point cloud format is not supported,<wbr/> this entry will
-not be defined.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.depth.availableDepthStreamConfigurations">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stream<wbr/>Configurations
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">int32</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- n x 4
- </span>
- <span class="entry_type_visibility"> [ndk_public as streamConfiguration]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">OUTPUT</span>
- </li>
- <li>
- <span class="entry_type_enum_name">INPUT</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>The available depth dataspace stream
-configurations that this camera device supports
-(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>These are output stream configurations for use with
-dataSpace HAL_<wbr/>DATASPACE_<wbr/>DEPTH.<wbr/> The configurations are
-listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code> tuples.<wbr/></p>
-<p>Only devices that support depth output for at least
-the HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>Y16 dense depth map may include
-this entry.<wbr/></p>
-<p>A device that also supports the HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>BLOB
-sparse depth point cloud must report a single entry for
-the format in this list as <code>(HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>BLOB,<wbr/>
-<a href="#static_android.depth.maxDepthSamples">android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples</a>,<wbr/> 1,<wbr/> OUTPUT)</code> in addition to
-the entries for HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>Y16.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.depth.availableDepthMinFrameDurations">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Min<wbr/>Frame<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4 x n
- </span>
- <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>This lists the minimum frame duration for each
-format/<wbr/>size combination for depth output formats.<wbr/></p>
- </td>
-
- <td class="entry_units">
- (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>This should correspond to the frame duration when only that
-stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
-set to either OFF or FAST.<wbr/></p>
-<p>When multiple streams are used in a request,<wbr/> the minimum frame
-duration will be max(individual stream min durations).<wbr/></p>
-<p>The minimum frame duration of a stream (of a particular format,<wbr/> size)
-is the same regardless of whether the stream is input or output.<wbr/></p>
-<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
-<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
-calculating the max frame rate.<wbr/></p>
-<p>(Keep in sync with <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>)</p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.depth.availableDepthStallDurations">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stall<wbr/>Durations
- </td>
- <td class="entry_type">
- <span class="entry_type_name">int64</span>
- <span class="entry_type_container">x</span>
-
- <span class="entry_type_array">
- 4 x n
- </span>
- <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>This lists the maximum stall duration for each
-output format/<wbr/>size combination for depth streams.<wbr/></p>
- </td>
-
- <td class="entry_units">
- (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- <ul class="entry_tags">
- <li><a href="#tag_DEPTH">DEPTH</a></li>
- </ul>
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>A stall duration is how much extra time would get added
-to the normal minimum frame duration for a repeating request
-that has streams with non-zero stall.<wbr/></p>
-<p>This functions similarly to
-<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for depth
-streams.<wbr/></p>
-<p>All depth output stream formats may have a nonzero stall
-duration.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
- <tr class="entry" id="static_android.depth.depthIsExclusive">
- <td class="entry_name
- " rowspan="3">
- android.<wbr/>depth.<wbr/>depth<wbr/>Is<wbr/>Exclusive
- </td>
- <td class="entry_type">
- <span class="entry_type_name entry_type_name_enum">byte</span>
-
- <span class="entry_type_visibility"> [public as boolean]</span>
-
-
- <span class="entry_type_hwlevel">[limited] </span>
-
-
-
- <ul class="entry_type_enum">
- <li>
- <span class="entry_type_enum_name">FALSE</span>
- </li>
- <li>
- <span class="entry_type_enum_name">TRUE</span>
- </li>
- </ul>
-
- </td> <!-- entry_type -->
-
- <td class="entry_description">
- <p>Indicates whether a capture request may target both a
-DEPTH16 /<wbr/> DEPTH_<wbr/>POINT_<wbr/>CLOUD output,<wbr/> and normal color outputs (such as
-YUV_<wbr/>420_<wbr/>888,<wbr/> JPEG,<wbr/> or RAW) simultaneously.<wbr/></p>
- </td>
-
- <td class="entry_units">
- </td>
-
- <td class="entry_range">
- </td>
-
- <td class="entry_tags">
- </td>
-
- </tr>
- <tr class="entries_header">
- <th class="th_details" colspan="5">Details</th>
- </tr>
- <tr class="entry_cont">
- <td class="entry_details" colspan="5">
- <p>If TRUE,<wbr/> including both depth and color outputs in a single
-capture request is not supported.<wbr/> An application must interleave color
-and depth requests.<wbr/> If FALSE,<wbr/> a single request can target both types
-of output.<wbr/></p>
-<p>Typically,<wbr/> this restriction exists on camera devices that
-need to emit a specific pattern or wavelength of light to
-measure depth values,<wbr/> which causes the color image to be
-corrupted during depth measurement.<wbr/></p>
- </td>
- </tr>
-
-
- <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
- <!-- end of entry -->
-
-
-
- <!-- end of kind -->
- </tbody>
-
- <!-- end of section -->
-<!-- </namespace> -->
- </table>
-
- <div class="tags" id="tag_index">
- <h2>Tags</h2>
- <ul>
- <li id="tag_BC">BC -
- Needed for backwards compatibility with old Java API
-
- <ul class="tags_entries">
- <li><a href="#controls_android.control.aeAntibandingMode">android.control.aeAntibandingMode</a> (controls)</li>
- <li><a href="#controls_android.control.aeExposureCompensation">android.control.aeExposureCompensation</a> (controls)</li>
- <li><a href="#controls_android.control.aeLock">android.control.aeLock</a> (controls)</li>
- <li><a href="#controls_android.control.aeMode">android.control.aeMode</a> (controls)</li>
- <li><a href="#controls_android.control.aeRegions">android.control.aeRegions</a> (controls)</li>
- <li><a href="#controls_android.control.aeTargetFpsRange">android.control.aeTargetFpsRange</a> (controls)</li>
- <li><a href="#controls_android.control.aePrecaptureTrigger">android.control.aePrecaptureTrigger</a> (controls)</li>
- <li><a href="#controls_android.control.afMode">android.control.afMode</a> (controls)</li>
- <li><a href="#controls_android.control.afRegions">android.control.afRegions</a> (controls)</li>
- <li><a href="#controls_android.control.afTrigger">android.control.afTrigger</a> (controls)</li>
- <li><a href="#controls_android.control.awbLock">android.control.awbLock</a> (controls)</li>
- <li><a href="#controls_android.control.awbMode">android.control.awbMode</a> (controls)</li>
- <li><a href="#controls_android.control.awbRegions">android.control.awbRegions</a> (controls)</li>
- <li><a href="#controls_android.control.captureIntent">android.control.captureIntent</a> (controls)</li>
- <li><a href="#controls_android.control.effectMode">android.control.effectMode</a> (controls)</li>
- <li><a href="#controls_android.control.mode">android.control.mode</a> (controls)</li>
- <li><a href="#controls_android.control.sceneMode">android.control.sceneMode</a> (controls)</li>
- <li><a href="#controls_android.control.videoStabilizationMode">android.control.videoStabilizationMode</a> (controls)</li>
- <li><a href="#static_android.control.aeAvailableAntibandingModes">android.control.aeAvailableAntibandingModes</a> (static)</li>
- <li><a href="#static_android.control.aeAvailableModes">android.control.aeAvailableModes</a> (static)</li>
- <li><a href="#static_android.control.aeAvailableTargetFpsRanges">android.control.aeAvailableTargetFpsRanges</a> (static)</li>
- <li><a href="#static_android.control.aeCompensationRange">android.control.aeCompensationRange</a> (static)</li>
- <li><a href="#static_android.control.aeCompensationStep">android.control.aeCompensationStep</a> (static)</li>
- <li><a href="#static_android.control.afAvailableModes">android.control.afAvailableModes</a> (static)</li>
- <li><a href="#static_android.control.availableEffects">android.control.availableEffects</a> (static)</li>
- <li><a href="#static_android.control.availableSceneModes">android.control.availableSceneModes</a> (static)</li>
- <li><a href="#static_android.control.availableVideoStabilizationModes">android.control.availableVideoStabilizationModes</a> (static)</li>
- <li><a href="#static_android.control.awbAvailableModes">android.control.awbAvailableModes</a> (static)</li>
- <li><a href="#static_android.control.maxRegions">android.control.maxRegions</a> (static)</li>
- <li><a href="#static_android.control.sceneModeOverrides">android.control.sceneModeOverrides</a> (static)</li>
- <li><a href="#static_android.control.aeLockAvailable">android.control.aeLockAvailable</a> (static)</li>
- <li><a href="#static_android.control.awbLockAvailable">android.control.awbLockAvailable</a> (static)</li>
- <li><a href="#controls_android.flash.mode">android.flash.mode</a> (controls)</li>
- <li><a href="#static_android.flash.info.available">android.flash.info.available</a> (static)</li>
- <li><a href="#controls_android.jpeg.gpsCoordinates">android.jpeg.gpsCoordinates</a> (controls)</li>
- <li><a href="#controls_android.jpeg.gpsProcessingMethod">android.jpeg.gpsProcessingMethod</a> (controls)</li>
- <li><a href="#controls_android.jpeg.gpsTimestamp">android.jpeg.gpsTimestamp</a> (controls)</li>
- <li><a href="#controls_android.jpeg.orientation">android.jpeg.orientation</a> (controls)</li>
- <li><a href="#controls_android.jpeg.quality">android.jpeg.quality</a> (controls)</li>
- <li><a href="#controls_android.jpeg.thumbnailQuality">android.jpeg.thumbnailQuality</a> (controls)</li>
- <li><a href="#controls_android.jpeg.thumbnailSize">android.jpeg.thumbnailSize</a> (controls)</li>
- <li><a href="#static_android.jpeg.availableThumbnailSizes">android.jpeg.availableThumbnailSizes</a> (static)</li>
- <li><a href="#controls_android.lens.focusDistance">android.lens.focusDistance</a> (controls)</li>
- <li><a href="#static_android.lens.info.availableFocalLengths">android.lens.info.availableFocalLengths</a> (static)</li>
- <li><a href="#dynamic_android.lens.focusRange">android.lens.focusRange</a> (dynamic)</li>
- <li><a href="#static_android.request.maxNumOutputStreams">android.request.maxNumOutputStreams</a> (static)</li>
- <li><a href="#controls_android.scaler.cropRegion">android.scaler.cropRegion</a> (controls)</li>
- <li><a href="#static_android.scaler.availableFormats">android.scaler.availableFormats</a> (static)</li>
- <li><a href="#static_android.scaler.availableJpegMinDurations">android.scaler.availableJpegMinDurations</a> (static)</li>
- <li><a href="#static_android.scaler.availableJpegSizes">android.scaler.availableJpegSizes</a> (static)</li>
- <li><a href="#static_android.scaler.availableMaxDigitalZoom">android.scaler.availableMaxDigitalZoom</a> (static)</li>
- <li><a href="#static_android.scaler.availableProcessedMinDurations">android.scaler.availableProcessedMinDurations</a> (static)</li>
- <li><a href="#static_android.scaler.availableProcessedSizes">android.scaler.availableProcessedSizes</a> (static)</li>
- <li><a href="#static_android.scaler.availableRawMinDurations">android.scaler.availableRawMinDurations</a> (static)</li>
- <li><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a> (static)</li>
- <li><a href="#static_android.sensor.info.physicalSize">android.sensor.info.physicalSize</a> (static)</li>
- <li><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a> (static)</li>
- <li><a href="#static_android.sensor.orientation">android.sensor.orientation</a> (static)</li>
- <li><a href="#dynamic_android.sensor.timestamp">android.sensor.timestamp</a> (dynamic)</li>
- <li><a href="#controls_android.statistics.faceDetectMode">android.statistics.faceDetectMode</a> (controls)</li>
- <li><a href="#static_android.statistics.info.maxFaceCount">android.statistics.info.maxFaceCount</a> (static)</li>
- <li><a href="#dynamic_android.statistics.faceIds">android.statistics.faceIds</a> (dynamic)</li>
- <li><a href="#dynamic_android.statistics.faceLandmarks">android.statistics.faceLandmarks</a> (dynamic)</li>
- <li><a href="#dynamic_android.statistics.faceRectangles">android.statistics.faceRectangles</a> (dynamic)</li>
- <li><a href="#dynamic_android.statistics.faceScores">android.statistics.faceScores</a> (dynamic)</li>
- <li><a href="#dynamic_android.lens.focalLength">android.lens.focalLength</a> (dynamic)</li>
- <li><a href="#dynamic_android.lens.focusDistance">android.lens.focusDistance</a> (dynamic)</li>
- </ul>
- </li> <!-- tag_BC -->
- <li id="tag_V1">V1 -
- New features for first camera 2 release (API1)
-
- <ul class="tags_entries">
- <li><a href="#static_android.colorCorrection.availableAberrationModes">android.colorCorrection.availableAberrationModes</a> (static)</li>
- <li><a href="#static_android.control.availableHighSpeedVideoConfigurations">android.control.availableHighSpeedVideoConfigurations</a> (static)</li>
- <li><a href="#controls_android.edge.mode">android.edge.mode</a> (controls)</li>
- <li><a href="#static_android.edge.availableEdgeModes">android.edge.availableEdgeModes</a> (static)</li>
- <li><a href="#controls_android.hotPixel.mode">android.hotPixel.mode</a> (controls)</li>
- <li><a href="#static_android.hotPixel.availableHotPixelModes">android.hotPixel.availableHotPixelModes</a> (static)</li>
- <li><a href="#controls_android.lens.aperture">android.lens.aperture</a> (controls)</li>
- <li><a href="#controls_android.lens.filterDensity">android.lens.filterDensity</a> (controls)</li>
- <li><a href="#controls_android.lens.focalLength">android.lens.focalLength</a> (controls)</li>
- <li><a href="#controls_android.lens.focusDistance">android.lens.focusDistance</a> (controls)</li>
- <li><a href="#controls_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a> (controls)</li>
- <li><a href="#static_android.lens.info.availableApertures">android.lens.info.availableApertures</a> (static)</li>
- <li><a href="#static_android.lens.info.availableFilterDensities">android.lens.info.availableFilterDensities</a> (static)</li>
- <li><a href="#static_android.lens.info.availableFocalLengths">android.lens.info.availableFocalLengths</a> (static)</li>
- <li><a href="#static_android.lens.info.availableOpticalStabilization">android.lens.info.availableOpticalStabilization</a> (static)</li>
- <li><a href="#static_android.lens.info.minimumFocusDistance">android.lens.info.minimumFocusDistance</a> (static)</li>
- <li><a href="#static_android.lens.info.shadingMapSize">android.lens.info.shadingMapSize</a> (static)</li>
- <li><a href="#static_android.lens.info.focusDistanceCalibration">android.lens.info.focusDistanceCalibration</a> (static)</li>
- <li><a href="#dynamic_android.lens.state">android.lens.state</a> (dynamic)</li>
- <li><a href="#controls_android.noiseReduction.mode">android.noiseReduction.mode</a> (controls)</li>
- <li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.noiseReduction.availableNoiseReductionModes</a> (static)</li>
- <li><a href="#controls_android.request.id">android.request.id</a> (controls)</li>
- <li><a href="#static_android.scaler.availableMinFrameDurations">android.scaler.availableMinFrameDurations</a> (static)</li>
- <li><a href="#static_android.scaler.availableStallDurations">android.scaler.availableStallDurations</a> (static)</li>
- <li><a href="#controls_android.sensor.exposureTime">android.sensor.exposureTime</a> (controls)</li>
- <li><a href="#controls_android.sensor.frameDuration">android.sensor.frameDuration</a> (controls)</li>
- <li><a href="#controls_android.sensor.sensitivity">android.sensor.sensitivity</a> (controls)</li>
- <li><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a> (static)</li>
- <li><a href="#static_android.sensor.info.exposureTimeRange">android.sensor.info.exposureTimeRange</a> (static)</li>
- <li><a href="#static_android.sensor.info.maxFrameDuration">android.sensor.info.maxFrameDuration</a> (static)</li>
- <li><a href="#static_android.sensor.info.physicalSize">android.sensor.info.physicalSize</a> (static)</li>
- <li><a href="#static_android.sensor.info.timestampSource">android.sensor.info.timestampSource</a> (static)</li>
- <li><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a> (static)</li>
- <li><a href="#dynamic_android.sensor.rollingShutterSkew">android.sensor.rollingShutterSkew</a> (dynamic)</li>
- <li><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a> (controls)</li>
- <li><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a> (static)</li>
- <li><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a> (dynamic)</li>
- <li><a href="#dynamic_android.sync.frameNumber">android.sync.frameNumber</a> (dynamic)</li>
- <li><a href="#static_android.sync.maxLatency">android.sync.maxLatency</a> (static)</li>
- <li><a href="#dynamic_android.edge.mode">android.edge.mode</a> (dynamic)</li>
- <li><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a> (dynamic)</li>
- <li><a href="#dynamic_android.lens.aperture">android.lens.aperture</a> (dynamic)</li>
- <li><a href="#dynamic_android.lens.filterDensity">android.lens.filterDensity</a> (dynamic)</li>
- <li><a href="#dynamic_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a> (dynamic)</li>
- <li><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a> (dynamic)</li>
- </ul>
- </li> <!-- tag_V1 -->
- <li id="tag_RAW">RAW -
- Needed for useful RAW image processing and DNG file support
-
- <ul class="tags_entries">
- <li><a href="#controls_android.hotPixel.mode">android.hotPixel.mode</a> (controls)</li>
- <li><a href="#static_android.hotPixel.availableHotPixelModes">android.hotPixel.availableHotPixelModes</a> (static)</li>
- <li><a href="#static_android.sensor.info.activeArraySize">android.sensor.info.activeArraySize</a> (static)</li>
- <li><a href="#static_android.sensor.info.colorFilterArrangement">android.sensor.info.colorFilterArrangement</a> (static)</li>
- <li><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a> (static)</li>
- <li><a href="#static_android.sensor.info.whiteLevel">android.sensor.info.whiteLevel</a> (static)</li>
- <li><a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.sensor.info.preCorrectionActiveArraySize</a> (static)</li>
- <li><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a> (static)</li>
- <li><a href="#static_android.sensor.referenceIlluminant2">android.sensor.referenceIlluminant2</a> (static)</li>
- <li><a href="#static_android.sensor.calibrationTransform1">android.sensor.calibrationTransform1</a> (static)</li>
- <li><a href="#static_android.sensor.calibrationTransform2">android.sensor.calibrationTransform2</a> (static)</li>
- <li><a href="#static_android.sensor.colorTransform1">android.sensor.colorTransform1</a> (static)</li>
- <li><a href="#static_android.sensor.colorTransform2">android.sensor.colorTransform2</a> (static)</li>
- <li><a href="#static_android.sensor.forwardMatrix1">android.sensor.forwardMatrix1</a> (static)</li>
- <li><a href="#static_android.sensor.forwardMatrix2">android.sensor.forwardMatrix2</a> (static)</li>
- <li><a href="#static_android.sensor.blackLevelPattern">android.sensor.blackLevelPattern</a> (static)</li>
- <li><a href="#static_android.sensor.profileHueSatMapDimensions">android.sensor.profileHueSatMapDimensions</a> (static)</li>
- <li><a href="#dynamic_android.sensor.neutralColorPoint">android.sensor.neutralColorPoint</a> (dynamic)</li>
- <li><a href="#dynamic_android.sensor.noiseProfile">android.sensor.noiseProfile</a> (dynamic)</li>
- <li><a href="#dynamic_android.sensor.profileHueSatMap">android.sensor.profileHueSatMap</a> (dynamic)</li>
- <li><a href="#dynamic_android.sensor.profileToneCurve">android.sensor.profileToneCurve</a> (dynamic)</li>
- <li><a href="#dynamic_android.sensor.greenSplit">android.sensor.greenSplit</a> (dynamic)</li>
- <li><a href="#dynamic_android.sensor.dynamicBlackLevel">android.sensor.dynamicBlackLevel</a> (dynamic)</li>
- <li><a href="#dynamic_android.sensor.dynamicWhiteLevel">android.sensor.dynamicWhiteLevel</a> (dynamic)</li>
- <li><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a> (controls)</li>
- <li><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a> (static)</li>
- <li><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a> (dynamic)</li>
- <li><a href="#controls_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a> (controls)</li>
- <li><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a> (dynamic)</li>
- </ul>
- </li> <!-- tag_RAW -->
- <li id="tag_HAL2">HAL2 -
- Entry is only used by camera device HAL 2.x
-
- <ul class="tags_entries">
- <li><a href="#controls_android.request.inputStreams">android.request.inputStreams</a> (controls)</li>
- <li><a href="#controls_android.request.outputStreams">android.request.outputStreams</a> (controls)</li>
- <li><a href="#controls_android.request.type">android.request.type</a> (controls)</li>
- <li><a href="#static_android.request.maxNumReprocessStreams">android.request.maxNumReprocessStreams</a> (static)</li>
- <li><a href="#controls_android.blackLevel.lock">android.blackLevel.lock</a> (controls)</li>
- </ul>
- </li> <!-- tag_HAL2 -->
- <li id="tag_FULL">FULL -
- Entry is required for full hardware level devices, and optional for other hardware levels
-
- <ul class="tags_entries">
- <li><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a> (static)</li>
- </ul>
- </li> <!-- tag_FULL -->
- <li id="tag_DEPTH">DEPTH -
- Entry is required for the depth capability.
-
- <ul class="tags_entries">
- <li><a href="#static_android.lens.poseRotation">android.lens.poseRotation</a> (static)</li>
- <li><a href="#static_android.lens.poseTranslation">android.lens.poseTranslation</a> (static)</li>
- <li><a href="#static_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a> (static)</li>
- <li><a href="#static_android.lens.radialDistortion">android.lens.radialDistortion</a> (static)</li>
- <li><a href="#static_android.depth.maxDepthSamples">android.depth.maxDepthSamples</a> (static)</li>
- <li><a href="#static_android.depth.availableDepthStreamConfigurations">android.depth.availableDepthStreamConfigurations</a> (static)</li>
- <li><a href="#static_android.depth.availableDepthMinFrameDurations">android.depth.availableDepthMinFrameDurations</a> (static)</li>
- <li><a href="#static_android.depth.availableDepthStallDurations">android.depth.availableDepthStallDurations</a> (static)</li>
- </ul>
- </li> <!-- tag_DEPTH -->
- <li id="tag_REPROC">REPROC -
- Entry is required for the YUV or PRIVATE reprocessing capability.
-
- <ul class="tags_entries">
- <li><a href="#controls_android.edge.mode">android.edge.mode</a> (controls)</li>
- <li><a href="#static_android.edge.availableEdgeModes">android.edge.availableEdgeModes</a> (static)</li>
- <li><a href="#controls_android.noiseReduction.mode">android.noiseReduction.mode</a> (controls)</li>
- <li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.noiseReduction.availableNoiseReductionModes</a> (static)</li>
- <li><a href="#static_android.request.maxNumInputStreams">android.request.maxNumInputStreams</a> (static)</li>
- <li><a href="#static_android.scaler.availableInputOutputFormatsMap">android.scaler.availableInputOutputFormatsMap</a> (static)</li>
- <li><a href="#controls_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a> (controls)</li>
- <li><a href="#static_android.reprocess.maxCaptureStall">android.reprocess.maxCaptureStall</a> (static)</li>
- <li><a href="#dynamic_android.edge.mode">android.edge.mode</a> (dynamic)</li>
- <li><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a> (dynamic)</li>
- </ul>
- </li> <!-- tag_REPROC -->
- <li id="tag_FUTURE">FUTURE -
- Entry is under-specified and is not required for now. This is for book-keeping purpose,
- do not implement or use it, it may be revised for future.
-
- <ul class="tags_entries">
- <li><a href="#controls_android.demosaic.mode">android.demosaic.mode</a> (controls)</li>
- <li><a href="#controls_android.edge.strength">android.edge.strength</a> (controls)</li>
- <li><a href="#controls_android.flash.firingPower">android.flash.firingPower</a> (controls)</li>
- <li><a href="#controls_android.flash.firingTime">android.flash.firingTime</a> (controls)</li>
- <li><a href="#static_android.flash.info.chargeDuration">android.flash.info.chargeDuration</a> (static)</li>
- <li><a href="#static_android.flash.colorTemperature">android.flash.colorTemperature</a> (static)</li>
- <li><a href="#static_android.flash.maxEnergy">android.flash.maxEnergy</a> (static)</li>
- <li><a href="#dynamic_android.jpeg.size">android.jpeg.size</a> (dynamic)</li>
- <li><a href="#controls_android.noiseReduction.strength">android.noiseReduction.strength</a> (controls)</li>
- <li><a href="#controls_android.request.metadataMode">android.request.metadataMode</a> (controls)</li>
- <li><a href="#static_android.sensor.baseGainFactor">android.sensor.baseGainFactor</a> (static)</li>
- <li><a href="#dynamic_android.sensor.temperature">android.sensor.temperature</a> (dynamic)</li>
- <li><a href="#controls_android.shading.strength">android.shading.strength</a> (controls)</li>
- <li><a href="#controls_android.statistics.histogramMode">android.statistics.histogramMode</a> (controls)</li>
- <li><a href="#controls_android.statistics.sharpnessMapMode">android.statistics.sharpnessMapMode</a> (controls)</li>
- <li><a href="#static_android.statistics.info.histogramBucketCount">android.statistics.info.histogramBucketCount</a> (static)</li>
- <li><a href="#static_android.statistics.info.maxHistogramCount">android.statistics.info.maxHistogramCount</a> (static)</li>
- <li><a href="#static_android.statistics.info.maxSharpnessMapValue">android.statistics.info.maxSharpnessMapValue</a> (static)</li>
- <li><a href="#static_android.statistics.info.sharpnessMapSize">android.statistics.info.sharpnessMapSize</a> (static)</li>
- <li><a href="#dynamic_android.statistics.histogram">android.statistics.histogram</a> (dynamic)</li>
- <li><a href="#dynamic_android.statistics.sharpnessMap">android.statistics.sharpnessMap</a> (dynamic)</li>
- </ul>
- </li> <!-- tag_FUTURE -->
- </ul>
- </div>
-
- [ <a href="#">top</a> ]
-
-</body>
-</html>
diff --git a/camera/metadata/3.3/Android.bp b/camera/metadata/3.3/Android.bp
new file mode 100644
index 0000000..458f895
--- /dev/null
+++ b/camera/metadata/3.3/Android.bp
@@ -0,0 +1,21 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.camera.metadata@3.3",
+ root: "android.hardware",
+ vndk: {
+ enabled: true,
+ },
+ srcs: [
+ "types.hal",
+ ],
+ interfaces: [
+ "android.hardware.camera.metadata@3.2",
+ ],
+ types: [
+ "CameraMetadataEnumAndroidControlAfSceneChange",
+ "CameraMetadataTag",
+ ],
+ gen_java: true,
+}
+
diff --git a/camera/metadata/3.3/types.hal b/camera/metadata/3.3/types.hal
new file mode 100644
index 0000000..7f96d9e
--- /dev/null
+++ b/camera/metadata/3.3/types.hal
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Autogenerated from camera metadata definitions in
+ * /system/media/camera/docs/metadata_definitions.xml
+ * *** DO NOT EDIT BY HAND ***
+ */
+
+package android.hardware.camera.metadata@3.3;
+
+/* Include definitions from all prior minor HAL metadata revisions */
+import android.hardware.camera.metadata@3.2;
+
+// No new metadata sections added in this revision
+
+/**
+ * Main enumeration for defining camera metadata tags added in this revision
+ *
+ * <p>Partial documentation is included for each tag; for complete documentation, reference
+ * '/system/media/camera/docs/docs.html' in the corresponding Android source tree.</p>
+ */
+enum CameraMetadataTag : @3.2::CameraMetadataTag {
+ /** android.control.afSceneChange [dynamic, enum, public]
+ *
+ * <p>Whether a significant scene change is detected within the currently-set AF
+ * region(s).</p>
+ */
+ ANDROID_CONTROL_AF_SCENE_CHANGE = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_CONTROL_END,
+
+ ANDROID_CONTROL_END_3_3,
+
+};
+
+/*
+ * Enumeration definitions for the various entries that need them
+ */
+
+/** android.control.afSceneChange enumeration values
+ * @see ANDROID_CONTROL_AF_SCENE_CHANGE
+ */
+enum CameraMetadataEnumAndroidControlAfSceneChange : uint32_t {
+ ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED,
+ ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED,
+};
diff --git a/camera/provider/2.4/default/Android.bp b/camera/provider/2.4/default/Android.bp
index c0b3591..99c3e92 100644
--- a/camera/provider/2.4/default/Android.bp
+++ b/camera/provider/2.4/default/Android.bp
@@ -12,9 +12,11 @@
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
+ "android.hardware.camera.device@3.4",
"camera.device@1.0-impl",
"camera.device@3.2-impl",
"camera.device@3.3-impl",
+ "camera.device@3.4-impl",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.common@1.0",
"android.hardware.graphics.mapper@2.0",
@@ -22,11 +24,14 @@
"android.hidl.memory@1.0",
"liblog",
"libhardware",
- "libcamera_metadata"
+ "libcamera_metadata",
+ ],
+ header_libs: [
+ "camera.device@3.4-impl_headers",
],
static_libs: [
- "android.hardware.camera.common@1.0-helper"
- ]
+ "android.hardware.camera.common@1.0-helper",
+ ],
}
cc_binary {
@@ -46,6 +51,7 @@
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
+ "android.hardware.camera.device@3.4",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.common@1.0",
],
diff --git a/camera/provider/2.4/default/CameraProvider.cpp b/camera/provider/2.4/default/CameraProvider.cpp
index d50168a..ed974a5 100644
--- a/camera/provider/2.4/default/CameraProvider.cpp
+++ b/camera/provider/2.4/default/CameraProvider.cpp
@@ -21,6 +21,7 @@
#include "CameraProvider.h"
#include "CameraDevice_1_0.h"
#include "CameraDevice_3_3.h"
+#include "CameraDevice_3_4.h"
#include <cutils/properties.h>
#include <string.h>
#include <utils/Trace.h>
@@ -39,6 +40,7 @@
const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/legacy/(.+)");
const char *kHAL3_2 = "3.2";
const char *kHAL3_3 = "3.3";
+const char *kHAL3_4 = "3.4";
const char *kHAL1_0 = "1.0";
const int kMaxCameraDeviceNameLen = 128;
const int kMaxCameraIdLen = 16;
@@ -159,12 +161,16 @@
if (deviceVersion != CAMERA_DEVICE_API_VERSION_1_0 &&
deviceVersion != CAMERA_DEVICE_API_VERSION_3_2 &&
deviceVersion != CAMERA_DEVICE_API_VERSION_3_3 &&
- deviceVersion != CAMERA_DEVICE_API_VERSION_3_4 ) {
+ deviceVersion != CAMERA_DEVICE_API_VERSION_3_4 &&
+ deviceVersion != CAMERA_DEVICE_API_VERSION_3_5) {
return hidl_string("");
}
bool isV1 = deviceVersion == CAMERA_DEVICE_API_VERSION_1_0;
int versionMajor = isV1 ? 1 : 3;
int versionMinor = isV1 ? 0 : mPreferredHal3MinorVersion;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_5) {
+ versionMinor = 4;
+ }
char deviceName[kMaxCameraDeviceNameLen];
snprintf(deviceName, sizeof(deviceName), "device@%d.%d/legacy/%s",
versionMajor, versionMinor, cameraId.c_str());
@@ -220,7 +226,8 @@
break;
default:
ALOGW("Unknown minor camera device HAL version %d in property "
- "'camera.wrapper.hal3TrebleMinorVersion', defaulting to 3", mPreferredHal3MinorVersion);
+ "'camera.wrapper.hal3TrebleMinorVersion', defaulting to 3",
+ mPreferredHal3MinorVersion);
mPreferredHal3MinorVersion = 3;
}
@@ -292,6 +299,7 @@
case CAMERA_DEVICE_API_VERSION_3_2:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
+ case CAMERA_DEVICE_API_VERSION_3_5:
// in support
break;
case CAMERA_DEVICE_API_VERSION_2_0:
@@ -480,10 +488,27 @@
return Void();
}
+ sp<android::hardware::camera::device::V3_2::ICameraDevice> device;
+ if (deviceVersion == kHAL3_4) {
+ ALOGV("Constructing v3.4 camera device");
+ sp<android::hardware::camera::device::V3_2::implementation::CameraDevice> deviceImpl =
+ new android::hardware::camera::device::V3_4::implementation::CameraDevice(
+ mModule, cameraId, mCameraDeviceNames);
+ if (deviceImpl == nullptr || deviceImpl->isInitFailed()) {
+ ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str());
+ device = nullptr;
+ _hidl_cb(Status::INTERNAL_ERROR, nullptr);
+ return Void();
+ }
+
+ device = deviceImpl;
+ _hidl_cb (Status::OK, device);
+ return Void();
+ }
+
// Since some Treble HAL revisions can map to the same legacy HAL version(s), we default
// to the newest possible Treble HAL revision, but allow for override if needed via
// system property.
- sp<android::hardware::camera::device::V3_2::ICameraDevice> device;
switch (mPreferredHal3MinorVersion) {
case 2: { // Map legacy camera device v3 HAL to Treble camera device HAL v3.2
ALOGV("Constructing v3.2 camera device");
diff --git a/camera/provider/2.4/vts/functional/Android.bp b/camera/provider/2.4/vts/functional/Android.bp
index 81d3de1..7bc4253 100644
--- a/camera/provider/2.4/vts/functional/Android.bp
+++ b/camera/provider/2.4/vts/functional/Android.bp
@@ -35,6 +35,7 @@
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
+ "android.hardware.camera.device@3.4",
"android.hardware.camera.provider@2.4",
"android.hardware.graphics.common@1.0",
"android.hardware.graphics.mapper@2.0",
diff --git a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
index e4cf9af..d44a54a 100644
--- a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
+++ b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
@@ -27,6 +27,7 @@
#include <android/hardware/camera/device/1.0/ICameraDevice.h>
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
#include <android/hardware/camera/device/3.3/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <android/hardware/camera/provider/2.4/ICameraProvider.h>
#include <android/hidl/manager/1.0/IServiceManager.h>
#include <binder/MemoryHeapBase.h>
@@ -128,9 +129,11 @@
namespace {
// "device@<version>/legacy/<id>"
const char *kDeviceNameRE = "device@([0-9]+\\.[0-9]+)/%s/(.+)";
+ const int CAMERA_DEVICE_API_VERSION_3_4 = 0x304;
const int CAMERA_DEVICE_API_VERSION_3_3 = 0x303;
const int CAMERA_DEVICE_API_VERSION_3_2 = 0x302;
const int CAMERA_DEVICE_API_VERSION_1_0 = 0x100;
+ const char *kHAL3_4 = "3.4";
const char *kHAL3_3 = "3.3";
const char *kHAL3_2 = "3.2";
const char *kHAL1_0 = "1.0";
@@ -164,7 +167,9 @@
return -1;
}
- if (version.compare(kHAL3_3) == 0) {
+ if (version.compare(kHAL3_4) == 0) {
+ return CAMERA_DEVICE_API_VERSION_3_4;
+ } else if (version.compare(kHAL3_3) == 0) {
return CAMERA_DEVICE_API_VERSION_3_3;
} else if (version.compare(kHAL3_2) == 0) {
return CAMERA_DEVICE_API_VERSION_3_2;
@@ -611,9 +616,11 @@
void openEmptyDeviceSession(const std::string &name,
sp<ICameraProvider> provider,
sp<ICameraDeviceSession> *session /*out*/,
- sp<device::V3_3::ICameraDeviceSession> *session3_3 /*out*/,
camera_metadata_t **staticMeta /*out*/);
- void configurePreviewStream(const std::string &name,
+ void castSession(const sp<ICameraDeviceSession> &session, int32_t deviceVersion,
+ sp<device::V3_3::ICameraDeviceSession> *session3_3 /*out*/,
+ sp<device::V3_4::ICameraDeviceSession> *session3_4 /*out*/);
+ void configurePreviewStream(const std::string &name, int32_t deviceVersion,
sp<ICameraProvider> provider,
const AvailableStream *previewThreshold,
sp<ICameraDeviceSession> *session /*out*/,
@@ -1100,6 +1107,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
Return<void> ret;
@@ -1140,6 +1148,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
@@ -1879,6 +1888,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
@@ -1905,6 +1915,19 @@
// characteristics keys we've defined.
ASSERT_GT(entryCount, 0u);
ALOGI("getCameraCharacteristics metadata entry count is %zu", entryCount);
+
+ camera_metadata_ro_entry entry;
+ int retcode = find_camera_metadata_ro_entry(metadata,
+ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry);
+ if ((0 == retcode) && (entry.count > 0)) {
+ uint8_t hardwareLevel = entry.data.u8[0];
+ ASSERT_TRUE(
+ hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED ||
+ hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
+ hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3);
+ } else {
+ ADD_FAILURE() << "Get camera hardware level failed!";
+ }
});
ASSERT_TRUE(ret.isOk());
}
@@ -1943,6 +1966,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
@@ -2067,6 +2091,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
::android::sp<ICameraDevice> device3_x;
@@ -2130,6 +2155,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
@@ -2152,12 +2178,14 @@
session = newSession;
});
ASSERT_TRUE(ret.isOk());
- // Ensure that a device labeling itself as 3.3 can have its session interface cast
- // to the 3.3 interface, and that lower versions can't be cast to it.
- auto castResult = device::V3_3::ICameraDeviceSession::castFrom(session);
- ASSERT_TRUE(castResult.isOk());
- sp<device::V3_3::ICameraDeviceSession> sessionV3_3 = castResult;
- if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_3) {
+ // Ensure that a device labeling itself as 3.3/3.4 can have its session interface
+ // cast the 3.3/3.4 interface, and that lower versions can't be cast to it.
+ sp<device::V3_3::ICameraDeviceSession> sessionV3_3;
+ sp<device::V3_4::ICameraDeviceSession> sessionV3_4;
+ castSession(session, deviceVersion, &sessionV3_3, &sessionV3_4);
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_4) {
+ ASSERT_TRUE(sessionV3_4.get() != nullptr);
+ } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_3) {
ASSERT_TRUE(sessionV3_3.get() != nullptr);
} else {
ASSERT_TRUE(sessionV3_3.get() == nullptr);
@@ -2213,6 +2241,7 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_2: {
::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
@@ -2301,66 +2330,69 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- camera_metadata_t* staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- sp<device::V3_3::ICameraDeviceSession> session3_3;
- openEmptyDeviceSession(name, mProvider,
- &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/);
-
- outputStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
- ASSERT_NE(0u, outputStreams.size());
-
- int32_t streamId = 0;
- for (auto& it : outputStreams) {
- Stream stream = {streamId,
- StreamType::OUTPUT,
- static_cast<uint32_t>(it.width),
- static_cast<uint32_t>(it.height),
- static_cast<PixelFormat>(it.format),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {stream};
- StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [streamId](Status s, HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- ASSERT_EQ(halConfig.streams[0].id, streamId);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId);
- });
- }
- ASSERT_TRUE(ret.isOk());
- streamId++;
- }
-
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ camera_metadata_t* staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider,
+ &session /*out*/, &staticMeta /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
+
+ outputStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
+ ASSERT_NE(0u, outputStreams.size());
+
+ int32_t streamId = 0;
+ for (auto& it : outputStreams) {
+ Stream stream = {streamId,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(it.width),
+ static_cast<uint32_t>(it.height),
+ static_cast<PixelFormat>(it.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {stream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId);
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [streamId](Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].id, streamId);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+ streamId++;
+ }
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -2371,132 +2403,148 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- camera_metadata_t* staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- sp<device::V3_3::ICameraDeviceSession> session3_3;
- openEmptyDeviceSession(name, mProvider,
- &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/);
-
- outputStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
- ASSERT_NE(0u, outputStreams.size());
-
- int32_t streamId = 0;
- Stream stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(0),
- static_cast<uint32_t>(0),
- static_cast<PixelFormat>(outputStreams[0].format),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {stream};
- StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE};
- if(session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration) {
- ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
- (Status::INTERNAL_ERROR == s));
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration) {
- ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
- (Status::INTERNAL_ERROR == s));
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(UINT32_MAX),
- static_cast<uint32_t>(UINT32_MAX),
- static_cast<PixelFormat>(outputStreams[0].format),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams, StreamConfigurationMode::NORMAL_MODE};
- if(session3_3 == nullptr) {
- ret = session->configureStreams(config, [](Status s,
- HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config, [](Status s,
- device::V3_3::HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- for (auto& it : outputStreams) {
- stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(it.width),
- static_cast<uint32_t>(it.height),
- static_cast<PixelFormat>(UINT32_MAX),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams, StreamConfigurationMode::NORMAL_MODE};
- if(session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(it.width),
- static_cast<uint32_t>(it.height),
- static_cast<PixelFormat>(it.format),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- static_cast<StreamRotation>(UINT32_MAX)};
- streams[0] = stream;
- config = {streams, StreamConfigurationMode::NORMAL_MODE};
- if(session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- }
- ASSERT_TRUE(ret.isOk());
- }
-
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ camera_metadata_t* staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
+
+ outputStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
+ ASSERT_NE(0u, outputStreams.size());
+
+ int32_t streamId = 0;
+ Stream stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(0),
+ static_cast<uint32_t>(0),
+ static_cast<PixelFormat>(outputStreams[0].format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {stream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if(session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ } else if(session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(UINT32_MAX),
+ static_cast<uint32_t>(UINT32_MAX),
+ static_cast<PixelFormat>(outputStreams[0].format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if(session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config, [](Status s,
+ device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else if(session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2, [](Status s,
+ device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2, [](Status s,
+ HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ for (auto& it : outputStreams) {
+ stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(it.width),
+ static_cast<uint32_t>(it.height),
+ static_cast<PixelFormat>(UINT32_MAX),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if(session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else if(session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(it.width),
+ static_cast<uint32_t>(it.height),
+ static_cast<PixelFormat>(it.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ static_cast<StreamRotation>(UINT32_MAX)};
+ streams[0] = stream;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if(session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else if(session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+ }
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -2509,107 +2557,207 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- camera_metadata_t* staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- sp<device::V3_3::ICameraDeviceSession> session3_3;
- openEmptyDeviceSession(name, mProvider,
- &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/);
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
+ }
- Status rc = isZSLModeAvailable(staticMeta);
- if (Status::METHOD_NOT_SUPPORTED == rc) {
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- continue;
+ camera_metadata_t* staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
+
+ Status rc = isZSLModeAvailable(staticMeta);
+ if (Status::METHOD_NOT_SUPPORTED == rc) {
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+ ASSERT_EQ(Status::OK, rc);
+
+ inputStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
+ ASSERT_NE(0u, inputStreams.size());
+
+ inputOutputMap.clear();
+ ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
+ ASSERT_NE(0u, inputOutputMap.size());
+
+ int32_t streamId = 0;
+ for (auto& inputIter : inputOutputMap) {
+ AvailableStream input;
+ ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat,
+ input));
+ ASSERT_NE(0u, inputStreams.size());
+
+ AvailableStream outputThreshold = {INT32_MAX, INT32_MAX,
+ inputIter.outputFormat};
+ std::vector<AvailableStream> outputStreams;
+ ASSERT_EQ(Status::OK,
+ getAvailableOutputStreams(staticMeta, outputStreams,
+ &outputThreshold));
+ for (auto& outputIter : outputStreams) {
+ Stream zslStream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(input.width),
+ static_cast<uint32_t>(input.height),
+ static_cast<PixelFormat>(input.format),
+ GRALLOC_USAGE_HW_CAMERA_ZSL,
+ 0,
+ StreamRotation::ROTATION_0};
+ Stream inputStream = {streamId++,
+ StreamType::INPUT,
+ static_cast<uint32_t>(input.width),
+ static_cast<uint32_t>(input.height),
+ static_cast<PixelFormat>(input.format),
+ 0,
+ 0,
+ StreamRotation::ROTATION_0};
+ Stream outputStream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(outputIter.width),
+ static_cast<uint32_t>(outputIter.height),
+ static_cast<PixelFormat>(outputIter.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+
+ ::android::hardware::hidl_vec<Stream> streams = {inputStream, zslStream,
+ outputStream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(3u, halConfig.streams.size());
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(3u, halConfig.streams.size());
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(3u, halConfig.streams.size());
+ });
}
- ASSERT_EQ(Status::OK, rc);
-
- inputStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
- ASSERT_NE(0u, inputStreams.size());
-
- inputOutputMap.clear();
- ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
- ASSERT_NE(0u, inputOutputMap.size());
-
- int32_t streamId = 0;
- for (auto& inputIter : inputOutputMap) {
- AvailableStream input;
- ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat,
- input));
- ASSERT_NE(0u, inputStreams.size());
-
- AvailableStream outputThreshold = {INT32_MAX, INT32_MAX,
- inputIter.outputFormat};
- std::vector<AvailableStream> outputStreams;
- ASSERT_EQ(Status::OK,
- getAvailableOutputStreams(staticMeta, outputStreams,
- &outputThreshold));
- for (auto& outputIter : outputStreams) {
- Stream zslStream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(input.width),
- static_cast<uint32_t>(input.height),
- static_cast<PixelFormat>(input.format),
- GRALLOC_USAGE_HW_CAMERA_ZSL,
- 0,
- StreamRotation::ROTATION_0};
- Stream inputStream = {streamId++,
- StreamType::INPUT,
- static_cast<uint32_t>(input.width),
- static_cast<uint32_t>(input.height),
- static_cast<PixelFormat>(input.format),
- 0,
- 0,
- StreamRotation::ROTATION_0};
- Stream outputStream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(outputIter.width),
- static_cast<uint32_t>(outputIter.height),
- static_cast<PixelFormat>(outputIter.format),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- StreamRotation::ROTATION_0};
-
- ::android::hardware::hidl_vec<Stream> streams = {inputStream, zslStream,
- outputStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(3u, halConfig.streams.size());
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(3u, halConfig.streams.size());
- });
- }
- ASSERT_TRUE(ret.isOk());
- }
- }
-
- free_camera_metadata(staticMeta);
- ret = session->close();
ASSERT_TRUE(ret.isOk());
}
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
}
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
+}
+
+// Check wehether session parameters are supported. If Hal support for them
+// exist, then try to configure a preview stream using them.
+TEST_F(CameraHidlTest, configureStreamsWithSessionParameters) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(mProvider);
+ std::vector<AvailableStream> outputPreviewStreams;
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+
+ for (const auto& name : cameraDeviceNames) {
+ int deviceVersion = getCameraDeviceVersion(name, mProviderType);
+ if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
+ } else if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_4) {
+ continue;
+ }
+
+ camera_metadata_t* staticMetaBuffer;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
+ ASSERT_NE(session3_4, nullptr);
+
+ const android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
+ staticMetaBuffer);
+ camera_metadata_ro_entry availableSessionKeys = staticMeta.find(
+ ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
+ if (availableSessionKeys.count == 0) {
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+
+ android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
+ ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW,
+ [&previewRequestSettings] (auto status, const auto& req) mutable {
+ ASSERT_EQ(Status::OK, status);
+
+ const camera_metadata_t *metadata = reinterpret_cast<const camera_metadata_t*> (
+ req.data());
+ size_t expectedSize = req.size();
+ int result = validate_camera_metadata_structure(metadata, &expectedSize);
+ ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
+
+ size_t entryCount = get_camera_metadata_entry_count(metadata);
+ ASSERT_GT(entryCount, 0u);
+ previewRequestSettings = metadata;
+ });
+ ASSERT_TRUE(ret.isOk());
+ const android::hardware::camera::common::V1_0::helper::CameraMetadata &constSettings =
+ previewRequestSettings;
+ android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams;
+ for (size_t i = 0; i < availableSessionKeys.count; i++) {
+ camera_metadata_ro_entry entry = constSettings.find(availableSessionKeys.data.i32[i]);
+ if (entry.count > 0) {
+ sessionParams.update(entry);
+ }
+ }
+ if (sessionParams.isEmpty()) {
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
+ &previewThreshold));
+ ASSERT_NE(0u, outputPreviewStreams.size());
+
+ Stream previewStream = {0,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(outputPreviewStreams[0].width),
+ static_cast<uint32_t>(outputPreviewStreams[0].height),
+ static_cast<PixelFormat>(outputPreviewStreams[0].format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {previewStream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ const camera_metadata_t *sessionParamsBuffer = sessionParams.getAndLock();
+ config.sessionParams.setToExternal(
+ reinterpret_cast<uint8_t *> (const_cast<camera_metadata_t *> (sessionParamsBuffer)),
+ get_camera_metadata_size(sessionParamsBuffer));
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ sessionParams.unlock(sessionParamsBuffer);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -2626,82 +2774,82 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- camera_metadata_t* staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- sp<device::V3_3::ICameraDeviceSession> session3_3;
- openEmptyDeviceSession(name, mProvider,
- &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/);
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
+ }
- outputBlobStreams.clear();
- ASSERT_EQ(Status::OK,
- getAvailableOutputStreams(staticMeta, outputBlobStreams,
- &blobThreshold));
- ASSERT_NE(0u, outputBlobStreams.size());
+ camera_metadata_t* staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
- outputPreviewStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputPreviewStreams,
- &previewThreshold));
- ASSERT_NE(0u, outputPreviewStreams.size());
+ outputBlobStreams.clear();
+ ASSERT_EQ(Status::OK,
+ getAvailableOutputStreams(staticMeta, outputBlobStreams,
+ &blobThreshold));
+ ASSERT_NE(0u, outputBlobStreams.size());
- int32_t streamId = 0;
- for (auto& blobIter : outputBlobStreams) {
- for (auto& previewIter : outputPreviewStreams) {
- Stream previewStream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(previewIter.width),
- static_cast<uint32_t>(previewIter.height),
- static_cast<PixelFormat>(previewIter.format),
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
- 0,
- StreamRotation::ROTATION_0};
- Stream blobStream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(blobIter.width),
- static_cast<uint32_t>(blobIter.height),
- static_cast<PixelFormat>(blobIter.format),
- GRALLOC1_CONSUMER_USAGE_CPU_READ,
- 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {previewStream,
- blobStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(2u, halConfig.streams.size());
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(2u, halConfig.streams.size());
- });
- }
- ASSERT_TRUE(ret.isOk());
- }
+ outputPreviewStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputPreviewStreams,
+ &previewThreshold));
+ ASSERT_NE(0u, outputPreviewStreams.size());
+
+ int32_t streamId = 0;
+ for (auto& blobIter : outputBlobStreams) {
+ for (auto& previewIter : outputPreviewStreams) {
+ Stream previewStream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(previewIter.width),
+ static_cast<uint32_t>(previewIter.height),
+ static_cast<PixelFormat>(previewIter.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
+ 0,
+ StreamRotation::ROTATION_0};
+ Stream blobStream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(blobIter.width),
+ static_cast<uint32_t>(blobIter.height),
+ static_cast<PixelFormat>(blobIter.format),
+ GRALLOC1_CONSUMER_USAGE_CPU_READ,
+ 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {previewStream,
+ blobStream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
}
-
- free_camera_metadata(staticMeta);
- ret = session->close();
ASSERT_TRUE(ret.isOk());
}
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
}
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -2713,143 +2861,160 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- camera_metadata_t* staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- sp<device::V3_3::ICameraDeviceSession> session3_3;
- openEmptyDeviceSession(name, mProvider,
- &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/);
-
- Status rc = isConstrainedModeAvailable(staticMeta);
- if (Status::METHOD_NOT_SUPPORTED == rc) {
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- continue;
- }
- ASSERT_EQ(Status::OK, rc);
-
- AvailableStream hfrStream;
- rc = pickConstrainedModeSize(staticMeta, hfrStream);
- ASSERT_EQ(Status::OK, rc);
-
- int32_t streamId = 0;
- Stream stream = {streamId,
- StreamType::OUTPUT,
- static_cast<uint32_t>(hfrStream.width),
- static_cast<uint32_t>(hfrStream.height),
- static_cast<PixelFormat>(hfrStream.format),
- GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
- 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {stream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [streamId](Status s, HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- ASSERT_EQ(halConfig.streams[0].id, streamId);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId);
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(0),
- static_cast<uint32_t>(0),
- static_cast<PixelFormat>(hfrStream.format),
- GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
- 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration) {
- ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
- (Status::INTERNAL_ERROR == s));
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration) {
- ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
- (Status::INTERNAL_ERROR == s));
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(UINT32_MAX),
- static_cast<uint32_t>(UINT32_MAX),
- static_cast<PixelFormat>(hfrStream.format),
- GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
- 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(hfrStream.width),
- static_cast<uint32_t>(hfrStream.height),
- static_cast<PixelFormat>(UINT32_MAX),
- GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
- 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- }
- ASSERT_TRUE(ret.isOk());
-
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ camera_metadata_t* staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
+
+ Status rc = isConstrainedModeAvailable(staticMeta);
+ if (Status::METHOD_NOT_SUPPORTED == rc) {
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+ ASSERT_EQ(Status::OK, rc);
+
+ AvailableStream hfrStream;
+ rc = pickConstrainedModeSize(staticMeta, hfrStream);
+ ASSERT_EQ(Status::OK, rc);
+
+ int32_t streamId = 0;
+ Stream stream = {streamId,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(hfrStream.width),
+ static_cast<uint32_t>(hfrStream.height),
+ static_cast<PixelFormat>(hfrStream.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
+ 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {stream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId);
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [streamId](Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].id, streamId);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(0),
+ static_cast<uint32_t>(0),
+ static_cast<PixelFormat>(hfrStream.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
+ 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config.v3_2 = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(UINT32_MAX),
+ static_cast<uint32_t>(UINT32_MAX),
+ static_cast<PixelFormat>(hfrStream.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
+ 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config.v3_2 = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(hfrStream.width),
+ static_cast<uint32_t>(hfrStream.height),
+ static_cast<PixelFormat>(UINT32_MAX),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
+ 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config.v3_2 = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ }
+ ASSERT_TRUE(ret.isOk());
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -2866,82 +3031,82 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- camera_metadata_t* staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- sp<device::V3_3::ICameraDeviceSession> session3_3;
- openEmptyDeviceSession(name, mProvider,
- &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/);
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
+ }
- outputBlobStreams.clear();
- ASSERT_EQ(Status::OK,
- getAvailableOutputStreams(staticMeta, outputBlobStreams,
- &blobThreshold));
- ASSERT_NE(0u, outputBlobStreams.size());
+ camera_metadata_t* staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/);
+ castSession(session, deviceVersion, &session3_3, &session3_4);
- outputVideoStreams.clear();
- ASSERT_EQ(Status::OK,
- getAvailableOutputStreams(staticMeta, outputVideoStreams,
- &videoThreshold));
- ASSERT_NE(0u, outputVideoStreams.size());
+ outputBlobStreams.clear();
+ ASSERT_EQ(Status::OK,
+ getAvailableOutputStreams(staticMeta, outputBlobStreams,
+ &blobThreshold));
+ ASSERT_NE(0u, outputBlobStreams.size());
- int32_t streamId = 0;
- for (auto& blobIter : outputBlobStreams) {
- for (auto& videoIter : outputVideoStreams) {
- Stream videoStream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(videoIter.width),
- static_cast<uint32_t>(videoIter.height),
- static_cast<PixelFormat>(videoIter.format),
- GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
- 0,
- StreamRotation::ROTATION_0};
- Stream blobStream = {streamId++,
- StreamType::OUTPUT,
- static_cast<uint32_t>(blobIter.width),
- static_cast<uint32_t>(blobIter.height),
- static_cast<PixelFormat>(blobIter.format),
- GRALLOC1_CONSUMER_USAGE_CPU_READ,
- 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {videoStream, blobStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- if (session3_3 == nullptr) {
- ret = session->configureStreams(config,
- [](Status s, HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(2u, halConfig.streams.size());
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
- [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(2u, halConfig.streams.size());
- });
- }
- ASSERT_TRUE(ret.isOk());
- }
+ outputVideoStreams.clear();
+ ASSERT_EQ(Status::OK,
+ getAvailableOutputStreams(staticMeta, outputVideoStreams,
+ &videoThreshold));
+ ASSERT_NE(0u, outputVideoStreams.size());
+
+ int32_t streamId = 0;
+ for (auto& blobIter : outputBlobStreams) {
+ for (auto& videoIter : outputVideoStreams) {
+ Stream videoStream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(videoIter.width),
+ static_cast<uint32_t>(videoIter.height),
+ static_cast<PixelFormat>(videoIter.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER,
+ 0,
+ StreamRotation::ROTATION_0};
+ Stream blobStream = {streamId++,
+ StreamType::OUTPUT,
+ static_cast<uint32_t>(blobIter.width),
+ static_cast<uint32_t>(blobIter.height),
+ static_cast<PixelFormat>(blobIter.format),
+ GRALLOC1_CONSUMER_USAGE_CPU_READ,
+ 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {videoStream, blobStream};
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [](Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
+ } else {
+ ret = session->configureStreams(config.v3_2,
+ [](Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
}
-
- free_camera_metadata(staticMeta);
- ret = session->close();
ASSERT_TRUE(ret.isOk());
}
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
}
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -2956,152 +3121,145 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- bool supportsPartialResults = false;
- uint32_t partialResultCount = 0;
- configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/,
- &previewStream /*out*/, &halStreamConfig /*out*/,
- &supportsPartialResults /*out*/,
- &partialResultCount /*out*/);
-
- std::shared_ptr<ResultMetadataQueue> resultQueue;
- auto resultQueueRet =
- session->getCaptureResultMetadataQueue(
- [&resultQueue](const auto& descriptor) {
- resultQueue = std::make_shared<ResultMetadataQueue>(
- descriptor);
- if (!resultQueue->isValid() ||
- resultQueue->availableToWrite() <= 0) {
- ALOGE("%s: HAL returns empty result metadata fmq,"
- " not use it", __func__);
- resultQueue = nullptr;
- // Don't use the queue onwards.
- }
- });
- ASSERT_TRUE(resultQueueRet.isOk());
-
- InFlightRequest inflightReq = {1, false, supportsPartialResults,
- partialResultCount, resultQueue};
-
- RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
- Return<void> ret;
- ret = session->constructDefaultRequestSettings(reqTemplate,
- [&](auto status, const auto& req) {
- ASSERT_EQ(Status::OK, status);
- settings = req;
- });
- ASSERT_TRUE(ret.isOk());
-
- sp<GraphicBuffer> gb = new GraphicBuffer(
- previewStream.width, previewStream.height,
- static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat), 1,
- android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage,
- halStreamConfig.streams[0].consumerUsage));
- ASSERT_NE(nullptr, gb.get());
- StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
- bufferId,
- hidl_handle(gb->getNativeBuffer()->handle),
- BufferStatus::OK,
- nullptr,
- nullptr};
- ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {outputBuffer};
- StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr,
- nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, outputBuffers};
-
- {
- std::unique_lock<std::mutex> l(mLock);
- mInflightMap.clear();
- mInflightMap.add(frameNumber, &inflightReq);
- }
-
- Status status = Status::INTERNAL_ERROR;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- Return<void> returnStatus = session->processCaptureRequest(
- {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
- uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, status);
- ASSERT_EQ(numRequestProcessed, 1u);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- while (!inflightReq.errorCodeValid &&
- ((0 < inflightReq.numBuffersLeft) ||
- (!inflightReq.haveResultMetadata))) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kStreamBufferTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
- }
-
- ASSERT_FALSE(inflightReq.errorCodeValid);
- ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
- ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId);
-
- request.frameNumber++;
- // Empty settings should be supported after the first call
- // for repeating requests.
- request.settings.setToExternal(nullptr, 0, true);
- // The buffer has been registered to HAL by bufferId, so per
- // API contract we should send a null handle for this buffer
- request.outputBuffers[0].buffer = nullptr;
- mInflightMap.clear();
- inflightReq = {1, false, supportsPartialResults, partialResultCount,
- resultQueue};
- mInflightMap.add(request.frameNumber, &inflightReq);
- }
-
- returnStatus = session->processCaptureRequest(
- {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
- uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, status);
- ASSERT_EQ(numRequestProcessed, 1u);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- while (!inflightReq.errorCodeValid &&
- ((0 < inflightReq.numBuffersLeft) ||
- (!inflightReq.haveResultMetadata))) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kStreamBufferTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
- }
-
- ASSERT_FALSE(inflightReq.errorCodeValid);
- ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
- ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId);
- }
-
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/,
+ &previewStream /*out*/, &halStreamConfig /*out*/,
+ &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
+
+ std::shared_ptr<ResultMetadataQueue> resultQueue;
+ auto resultQueueRet =
+ session->getCaptureResultMetadataQueue(
+ [&resultQueue](const auto& descriptor) {
+ resultQueue = std::make_shared<ResultMetadataQueue>(
+ descriptor);
+ if (!resultQueue->isValid() ||
+ resultQueue->availableToWrite() <= 0) {
+ ALOGE("%s: HAL returns empty result metadata fmq,"
+ " not use it", __func__);
+ resultQueue = nullptr;
+ // Don't use the queue onwards.
+ }
+ });
+ ASSERT_TRUE(resultQueueRet.isOk());
+
+ InFlightRequest inflightReq = {1, false, supportsPartialResults,
+ partialResultCount, resultQueue};
+
+ RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
+ Return<void> ret;
+ ret = session->constructDefaultRequestSettings(reqTemplate,
+ [&](auto status, const auto& req) {
+ ASSERT_EQ(Status::OK, status);
+ settings = req;
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ sp<GraphicBuffer> gb = new GraphicBuffer(
+ previewStream.width, previewStream.height,
+ static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat), 1,
+ android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage,
+ halStreamConfig.streams[0].consumerUsage));
+ ASSERT_NE(nullptr, gb.get());
+ StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
+ bufferId,
+ hidl_handle(gb->getNativeBuffer()->handle),
+ BufferStatus::OK,
+ nullptr,
+ nullptr};
+ ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {outputBuffer};
+ StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr,
+ nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
+ emptyInputBuffer, outputBuffers};
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mInflightMap.clear();
+ mInflightMap.add(frameNumber, &inflightReq);
+ }
+
+ Status status = Status::INTERNAL_ERROR;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ Return<void> returnStatus = session->processCaptureRequest(
+ {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
+ uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_EQ(numRequestProcessed, 1u);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (!inflightReq.errorCodeValid &&
+ ((0 < inflightReq.numBuffersLeft) ||
+ (!inflightReq.haveResultMetadata))) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kStreamBufferTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
+
+ ASSERT_FALSE(inflightReq.errorCodeValid);
+ ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
+ ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId);
+
+ request.frameNumber++;
+ // Empty settings should be supported after the first call
+ // for repeating requests.
+ request.settings.setToExternal(nullptr, 0, true);
+ // The buffer has been registered to HAL by bufferId, so per
+ // API contract we should send a null handle for this buffer
+ request.outputBuffers[0].buffer = nullptr;
+ mInflightMap.clear();
+ inflightReq = {1, false, supportsPartialResults, partialResultCount,
+ resultQueue};
+ mInflightMap.add(request.frameNumber, &inflightReq);
+ }
+
+ returnStatus = session->processCaptureRequest(
+ {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
+ uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_EQ(numRequestProcessed, 1u);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (!inflightReq.errorCodeValid &&
+ ((0 < inflightReq.numBuffersLeft) ||
+ (!inflightReq.haveResultMetadata))) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kStreamBufferTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
+
+ ASSERT_FALSE(inflightReq.errorCodeValid);
+ ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
+ ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId);
+ }
+
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -3118,65 +3276,58 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- bool supportsPartialResults = false;
- uint32_t partialResultCount = 0;
- configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/,
- &previewStream /*out*/, &halStreamConfig /*out*/,
- &supportsPartialResults /*out*/,
- &partialResultCount /*out*/);
-
- sp<GraphicBuffer> gb = new GraphicBuffer(
- previewStream.width, previewStream.height,
- static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat), 1,
- android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage,
- halStreamConfig.streams[0].consumerUsage));
-
- StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
- bufferId,
- hidl_handle(gb->getNativeBuffer()->handle),
- BufferStatus::OK,
- nullptr,
- nullptr};
- ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {outputBuffer};
- StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr,
- nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, outputBuffers};
-
- // Settings were not correctly initialized, we should fail here
- Status status = Status::OK;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- Return<void> ret = session->processCaptureRequest(
- {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
- uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(ret.isOk());
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status);
- ASSERT_EQ(numRequestProcessed, 0u);
-
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/,
+ &previewStream /*out*/, &halStreamConfig /*out*/,
+ &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
+
+ sp<GraphicBuffer> gb = new GraphicBuffer(
+ previewStream.width, previewStream.height,
+ static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat), 1,
+ android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage,
+ halStreamConfig.streams[0].consumerUsage));
+
+ StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
+ bufferId,
+ hidl_handle(gb->getNativeBuffer()->handle),
+ BufferStatus::OK,
+ nullptr,
+ nullptr};
+ ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {outputBuffer};
+ StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr,
+ nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
+ emptyInputBuffer, outputBuffers};
+
+ // Settings were not correctly initialized, we should fail here
+ Status status = Status::OK;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ Return<void> ret = session->processCaptureRequest(
+ {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
+ uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(ret.isOk());
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status);
+ ASSERT_EQ(numRequestProcessed, 0u);
+
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -3192,62 +3343,55 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- bool supportsPartialResults = false;
- uint32_t partialResultCount = 0;
- configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/,
- &previewStream /*out*/, &halStreamConfig /*out*/,
- &supportsPartialResults /*out*/,
- &partialResultCount /*out*/);
-
- RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
- Return<void> ret;
- ret = session->constructDefaultRequestSettings(reqTemplate,
- [&](auto status, const auto& req) {
- ASSERT_EQ(Status::OK, status);
- settings = req;
- });
- ASSERT_TRUE(ret.isOk());
-
- ::android::hardware::hidl_vec<StreamBuffer> emptyOutputBuffers;
- StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr,
- nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, emptyOutputBuffers};
-
- // Output buffers are missing, we should fail here
- Status status = Status::OK;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- ret = session->processCaptureRequest(
- {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
- uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(ret.isOk());
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status);
- ASSERT_EQ(numRequestProcessed, 0u);
-
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/,
+ &previewStream /*out*/, &halStreamConfig /*out*/,
+ &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
+
+ RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
+ Return<void> ret;
+ ret = session->constructDefaultRequestSettings(reqTemplate,
+ [&](auto status, const auto& req) {
+ ASSERT_EQ(Status::OK, status);
+ settings = req;
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ ::android::hardware::hidl_vec<StreamBuffer> emptyOutputBuffers;
+ StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr,
+ nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
+ emptyInputBuffer, emptyOutputBuffers};
+
+ // Output buffers are missing, we should fail here
+ Status status = Status::OK;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ ret = session->processCaptureRequest(
+ {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
+ uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(ret.isOk());
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status);
+ ASSERT_EQ(numRequestProcessed, 0u);
+
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -3263,130 +3407,123 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- bool supportsPartialResults = false;
- uint32_t partialResultCount = 0;
- configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/,
- &previewStream /*out*/, &halStreamConfig /*out*/,
- &supportsPartialResults /*out*/,
- &partialResultCount /*out*/);
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
+ }
- std::shared_ptr<ResultMetadataQueue> resultQueue;
- auto resultQueueRet =
- session->getCaptureResultMetadataQueue(
- [&resultQueue](const auto& descriptor) {
- resultQueue = std::make_shared<ResultMetadataQueue>(
- descriptor);
- if (!resultQueue->isValid() ||
- resultQueue->availableToWrite() <= 0) {
- ALOGE("%s: HAL returns empty result metadata fmq,"
- " not use it", __func__);
- resultQueue = nullptr;
- // Don't use the queue onwards.
- }
- });
- ASSERT_TRUE(resultQueueRet.isOk());
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/,
+ &previewStream /*out*/, &halStreamConfig /*out*/,
+ &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
- InFlightRequest inflightReq = {1, false, supportsPartialResults,
- partialResultCount, resultQueue};
- RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
- Return<void> ret;
- ret = session->constructDefaultRequestSettings(reqTemplate,
- [&](auto status, const auto& req) {
- ASSERT_EQ(Status::OK, status);
- settings = req;
- });
- ASSERT_TRUE(ret.isOk());
-
- sp<GraphicBuffer> gb = new GraphicBuffer(
- previewStream.width, previewStream.height,
- static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat), 1,
- android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage,
- halStreamConfig.streams[0].consumerUsage));
- ASSERT_NE(nullptr, gb.get());
- StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
- bufferId,
- hidl_handle(gb->getNativeBuffer()->handle),
- BufferStatus::OK,
- nullptr,
- nullptr};
- ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {outputBuffer};
- const StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
- BufferStatus::ERROR, nullptr, nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, outputBuffers};
-
- {
- std::unique_lock<std::mutex> l(mLock);
- mInflightMap.clear();
- mInflightMap.add(frameNumber, &inflightReq);
- }
-
- Status status = Status::INTERNAL_ERROR;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- ret = session->processCaptureRequest(
- {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
- uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
-
- ASSERT_TRUE(ret.isOk());
- ASSERT_EQ(Status::OK, status);
- ASSERT_EQ(numRequestProcessed, 1u);
- // Flush before waiting for request to complete.
- Return<Status> returnStatus = session->flush();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- while (!inflightReq.errorCodeValid &&
- ((0 < inflightReq.numBuffersLeft) ||
- (!inflightReq.haveResultMetadata))) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kStreamBufferTimeoutSec);
- ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l,
- timeout));
+ std::shared_ptr<ResultMetadataQueue> resultQueue;
+ auto resultQueueRet =
+ session->getCaptureResultMetadataQueue(
+ [&resultQueue](const auto& descriptor) {
+ resultQueue = std::make_shared<ResultMetadataQueue>(
+ descriptor);
+ if (!resultQueue->isValid() ||
+ resultQueue->availableToWrite() <= 0) {
+ ALOGE("%s: HAL returns empty result metadata fmq,"
+ " not use it", __func__);
+ resultQueue = nullptr;
+ // Don't use the queue onwards.
}
+ });
+ ASSERT_TRUE(resultQueueRet.isOk());
- if (!inflightReq.errorCodeValid) {
- ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
- ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId);
- } else {
- switch (inflightReq.errorCode) {
- case ErrorCode::ERROR_REQUEST:
- case ErrorCode::ERROR_RESULT:
- case ErrorCode::ERROR_BUFFER:
- // Expected
- break;
- case ErrorCode::ERROR_DEVICE:
- default:
- FAIL() << "Unexpected error:"
- << static_cast<uint32_t>(inflightReq.errorCode);
- }
- }
+ InFlightRequest inflightReq = {1, false, supportsPartialResults,
+ partialResultCount, resultQueue};
+ RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
+ Return<void> ret;
+ ret = session->constructDefaultRequestSettings(reqTemplate,
+ [&](auto status, const auto& req) {
+ ASSERT_EQ(Status::OK, status);
+ settings = req;
+ });
+ ASSERT_TRUE(ret.isOk());
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ sp<GraphicBuffer> gb = new GraphicBuffer(
+ previewStream.width, previewStream.height,
+ static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat), 1,
+ android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage,
+ halStreamConfig.streams[0].consumerUsage));
+ ASSERT_NE(nullptr, gb.get());
+ StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
+ bufferId,
+ hidl_handle(gb->getNativeBuffer()->handle),
+ BufferStatus::OK,
+ nullptr,
+ nullptr};
+ ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {outputBuffer};
+ const StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
+ BufferStatus::ERROR, nullptr, nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
+ emptyInputBuffer, outputBuffers};
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mInflightMap.clear();
+ mInflightMap.add(frameNumber, &inflightReq);
+ }
+
+ Status status = Status::INTERNAL_ERROR;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ ret = session->processCaptureRequest(
+ {request}, cachesToRemove, [&status, &numRequestProcessed](auto s,
+ uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+
+ ASSERT_TRUE(ret.isOk());
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_EQ(numRequestProcessed, 1u);
+ // Flush before waiting for request to complete.
+ Return<Status> returnStatus = session->flush();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (!inflightReq.errorCodeValid &&
+ ((0 < inflightReq.numBuffersLeft) ||
+ (!inflightReq.haveResultMetadata))) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kStreamBufferTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l,
+ timeout));
+ }
+
+ if (!inflightReq.errorCodeValid) {
+ ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
+ ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId);
+ } else {
+ switch (inflightReq.errorCode) {
+ case ErrorCode::ERROR_REQUEST:
+ case ErrorCode::ERROR_RESULT:
+ case ErrorCode::ERROR_BUFFER:
+ // Expected
+ break;
+ case ErrorCode::ERROR_DEVICE:
+ default:
+ FAIL() << "Unexpected error:"
+ << static_cast<uint32_t>(inflightReq.errorCode);
}
}
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
}
@@ -3400,44 +3537,37 @@
for (const auto& name : cameraDeviceNames) {
int deviceVersion = getCameraDeviceVersion(name, mProviderType);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- bool supportsPartialResults = false;
- uint32_t partialResultCount = 0;
- configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/,
- &previewStream /*out*/, &halStreamConfig /*out*/,
- &supportsPartialResults /*out*/,
- &partialResultCount /*out*/);
-
- Return<Status> returnStatus = session->flush();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
- ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
- }
-
- Return<void> ret = session->close();
- ASSERT_TRUE(ret.isOk());
- }
- break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- //Not applicable
- }
- break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- }
- break;
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) {
+ continue;
+ } else if (deviceVersion <= 0) {
+ ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
+ ADD_FAILURE();
+ return;
}
+
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/,
+ &previewStream /*out*/, &halStreamConfig /*out*/,
+ &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
+
+ Return<Status> returnStatus = session->flush();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
+ ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
+ }
+
+ Return<void> ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
@@ -3625,7 +3755,7 @@
}
// Open a device session and configure a preview stream.
-void CameraHidlTest::configurePreviewStream(const std::string &name,
+void CameraHidlTest::configurePreviewStream(const std::string &name, int32_t deviceVersion,
sp<ICameraProvider> provider,
const AvailableStream *previewThreshold,
sp<ICameraDeviceSession> *session /*out*/,
@@ -3665,9 +3795,9 @@
});
ASSERT_TRUE(ret.isOk());
- auto castResult = device::V3_3::ICameraDeviceSession::castFrom(*session);
- ASSERT_TRUE(castResult.isOk());
- sp<device::V3_3::ICameraDeviceSession> session3_3 = castResult;
+ sp<device::V3_3::ICameraDeviceSession> session3_3;
+ sp<device::V3_4::ICameraDeviceSession> session3_4;
+ castSession(*session, deviceVersion, &session3_3, &session3_4);
camera_metadata_t *staticMeta;
ret = device3_x->getCameraCharacteristics([&] (Status s,
@@ -3700,17 +3830,10 @@
static_cast<PixelFormat> (outputPreviewStreams[0].format),
GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0, StreamRotation::ROTATION_0};
::android::hardware::hidl_vec<Stream> streams = {*previewStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- if (session3_3 == nullptr) {
- ret = (*session)->configureStreams(config,
- [&] (Status s, HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- *halStreamConfig = halConfig;
- });
- } else {
- ret = session3_3->configureStreams_3_3(config,
+ ::android::hardware::camera::device::V3_4::StreamConfiguration config;
+ config.v3_2 = {streams, StreamConfigurationMode::NORMAL_MODE};
+ if (session3_4 != nullptr) {
+ ret = session3_4->configureStreams_3_4(config,
[&] (Status s, device::V3_3::HalStreamConfiguration halConfig) {
ASSERT_EQ(Status::OK, s);
ASSERT_EQ(1u, halConfig.streams.size());
@@ -3719,15 +3842,57 @@
halStreamConfig->streams[i] = halConfig.streams[i].v3_2;
}
});
+ } else if (session3_3 != nullptr) {
+ ret = session3_3->configureStreams_3_3(config.v3_2,
+ [&] (Status s, device::V3_3::HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ halStreamConfig->streams.resize(halConfig.streams.size());
+ for (size_t i = 0; i < halConfig.streams.size(); i++) {
+ halStreamConfig->streams[i] = halConfig.streams[i].v3_2;
+ }
+ });
+ } else {
+ ret = (*session)->configureStreams(config.v3_2,
+ [&] (Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ *halStreamConfig = halConfig;
+ });
}
ASSERT_TRUE(ret.isOk());
}
+//Cast camera device session to corresponding version
+void CameraHidlTest::castSession(const sp<ICameraDeviceSession> &session, int32_t deviceVersion,
+ sp<device::V3_3::ICameraDeviceSession> *session3_3 /*out*/,
+ sp<device::V3_4::ICameraDeviceSession> *session3_4 /*out*/) {
+ ASSERT_NE(nullptr, session3_3);
+ ASSERT_NE(nullptr, session3_4);
+
+ switch (deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_3_4: {
+ auto castResult = device::V3_4::ICameraDeviceSession::castFrom(session);
+ ASSERT_TRUE(castResult.isOk());
+ *session3_4 = castResult;
+ break;
+ }
+ case CAMERA_DEVICE_API_VERSION_3_3: {
+ auto castResult = device::V3_3::ICameraDeviceSession::castFrom(session);
+ ASSERT_TRUE(castResult.isOk());
+ *session3_3 = castResult;
+ break;
+ }
+ default:
+ //no-op
+ return;
+ }
+}
+
// Open a device session with empty callbacks and return static metadata.
void CameraHidlTest::openEmptyDeviceSession(const std::string &name,
sp<ICameraProvider> provider,
sp<ICameraDeviceSession> *session /*out*/,
- sp<device::V3_3::ICameraDeviceSession> *session3_3 /*out*/,
camera_metadata_t **staticMeta /*out*/) {
ASSERT_NE(nullptr, session);
ASSERT_NE(nullptr, staticMeta);
@@ -3763,12 +3928,6 @@
ASSERT_NE(nullptr, *staticMeta);
});
ASSERT_TRUE(ret.isOk());
-
- if(session3_3 != nullptr) {
- auto castResult = device::V3_3::ICameraDeviceSession::castFrom(*session);
- ASSERT_TRUE(castResult.isOk());
- *session3_3 = castResult;
- }
}
// Open a particular camera device.
diff --git a/current.txt b/current.txt
index dc57c67..c18153a 100644
--- a/current.txt
+++ b/current.txt
@@ -27,14 +27,6 @@
c93cb25a1a92d07aa80a617c01e8d22fc97bf8cefd3962b6a5be386ad4704d89 android.hardware.audio.effect@2.0::IVirtualizerEffect
918f331780c9c7b04f2151a2e563aab088198ede8e6f865302ebaa13905bd9ce android.hardware.audio.effect@2.0::IVisualizerEffect
4caad099f8fc00262b6c03ba41271808b37cea90ac98b534299bbf4ee823af02 android.hardware.audio.effect@2.0::types
-f2904a4c108ad1b93eb2fa4e43b82bd01ce1ff26156316e49d1d9fc80dfecaad android.hardware.automotive.evs@1.0::IEvsCamera
-94cba6ad04c83aa840de2ed52b74ba2126a26dd960225e61ac36703315279a80 android.hardware.automotive.evs@1.0::IEvsCameraStream
-5ea36fb043d9e3b413219de3dfd7b046b48af4fda39f167f3528652e986cb76d android.hardware.automotive.evs@1.0::IEvsDisplay
-14ef8e993a4a7c899b19bb5e39b5b0cafd28312ea2b127e35b3be8f08e23fe8e android.hardware.automotive.evs@1.0::IEvsEnumerator
-3b17c1fdfc389e0abe626c37054954b07201127d890c2bc05d47613ec1f4de4f android.hardware.automotive.evs@1.0::types
-cde0787e4bf4b450a9ceb9011d2698c0061322eb882621e89b70594b0b7c65c5 android.hardware.automotive.vehicle@2.0::IVehicle
-80fb4156fa91ce86e49bd2cabe215078f6b69591d416a09e914532eae6712052 android.hardware.automotive.vehicle@2.0::IVehicleCallback
-248004f0832b48199558bd5d1aacc29e7e6423dd0ff6012be142e23621f03f4b android.hardware.automotive.vehicle@2.0::types
1fbdc1f852f8bd2e4a6c5cb30ac2b78668c98dce118a61762d4034ae859f43d8 android.hardware.biometrics.fingerprint@2.1::IBiometricsFingerprint
aabb5c3c585592d71ee57b77298c14993d77914ddeaa64b2c5109a602b02ea47 android.hardware.biometrics.fingerprint@2.1::IBiometricsFingerprintClientCallback
1ec60d4efddae9a7b2469278a576967b4751e88de5b8d7e9df6eff6bc0da7bc9 android.hardware.biometrics.fingerprint@2.1::types
@@ -189,7 +181,6 @@
# ABI preserving changes to HALs during Android O MR1 (Initial Set)
-26a4dd19a71f3a28249100af29be470f80e08355165fe6a7173aaa1ef264640d android.hardware.automotive.vehicle@2.0::types
150a338ce11fcec70757c9675d83cf6a5d7b40d0c812741b91671fecce59eac9 android.hardware.broadcastradio@1.0::types
dc7e6d4f537b9943e27edc4f86c5a03bb643b18f18f866f8c3c71c0ac4ea8cbc android.hardware.broadcastradio@1.0::types
760485232f6cce07f8bb05e3475509956996b702f77415ee5bff05e2ec5a5bcc android.hardware.dumpstate@1.0::IDumpstateDevice
@@ -199,7 +190,6 @@
c2c50ec74c87a583c683b4493f8f9f2e454a8d41c57af5b3eb88823a999f0ea4 android.hardware.radio@1.0::IRadioResponse # Added for b/65230472 for Android O
4922dd58e89a03181ed1c48a6e118e47633b73b11090bdfed5aa920d25a7592b android.hardware.radio@1.0::IRadioResponse # Added for b/65230472 for Android O DR
28e929b453df3d9f5060af2764e6cdb123ddb893e3e86923c877f6ff7e5f02c9 android.hardware.wifi@1.0::types
-bfd81bcafa3c97519cd56ad29e9fa48e23d1d323b89dbcc85899282a8c628194 android.hardware.automotive.vehicle@2.0::types
df1d7b27e644bfed0a4f606a8c44d35d45cafce82c7c648494c8a25c7cd4a949 android.hardware.wifi@1.0::types
# HALs released in Android O MR1 (Initial Set)
@@ -260,8 +250,9 @@
c8bc853546dd55584611def2a9fa1d99f657e3366c976d2f60fe6b8aa6d2cb87 android.hardware.thermal@1.1::IThermalCallback
# ABI preserving changes to HALs during Android P
+cf72ff5a52bfa4d08e9e1000cf3ab5952a2d280c7f13cdad5ab7905c08050766 android.hardware.camera.metadata@3.2::types
6fa9804a17a8bb7923a56bd10493a5483c20007e4c9026fd04287bee7c945a8c android.hardware.gnss@1.0::IGnssCallback
fb92e2b40f8e9d494e8fd3b4ac18499a3216342e7cff160714c3bbf3660b6e79 android.hardware.gnss@1.0::IGnssConfiguration
251594ea9b27447bfa005ebd806e58fb0ae4aad84a69938129c9800ec0c64eda android.hardware.gnss@1.0::IGnssMeasurementCallback
-d4c10cb28318dba8efb22231a8c23e86ad8853f85775187c40b42a878a5ef4d5 android.hardware.automotive.vehicle@2.0::types
-cf72ff5a52bfa4d08e9e1000cf3ab5952a2d280c7f13cdad5ab7905c08050766 android.hardware.camera.metadata@3.2::types
+4e7169919d24fbe5573e5bcd683d0bd7abf553a4e6c34c41f9dfc1e12050db07 android.hardware.gnss@1.0::IGnssNavigationMessageCallback
+
diff --git a/gnss/1.0/IGnssNavigationMessageCallback.hal b/gnss/1.0/IGnssNavigationMessageCallback.hal
index 3fdae9f..24ee708 100644
--- a/gnss/1.0/IGnssNavigationMessageCallback.hal
+++ b/gnss/1.0/IGnssNavigationMessageCallback.hal
@@ -119,7 +119,8 @@
*
* - For Galileo F/NAV, this refers to the page type in the range 1-6
*
- * - For Galileo I/NAV, this refers to the word type in the range 1-10+
+ * - For Galileo I/NAV, this refers to the word type in the range 0-10+
+ * A value of 0 is only allowed if the Satellite is transmiting a Spare Word.
*/
int16_t submessageId;
diff --git a/graphics/composer/2.1/default/ComposerBase.h b/graphics/composer/2.1/default/ComposerBase.h
index 85b1a4d..e1c9d33 100644
--- a/graphics/composer/2.1/default/ComposerBase.h
+++ b/graphics/composer/2.1/default/ComposerBase.h
@@ -38,6 +38,8 @@
public:
virtual ~ComposerBase() {};
+ virtual bool hasCapability(hwc2_capability_t capability) = 0;
+
virtual void removeClient() = 0;
virtual void enableCallback(bool enable) = 0;
virtual uint32_t getMaxVirtualDisplayCount() = 0;
diff --git a/graphics/composer/2.1/default/ComposerClient.cpp b/graphics/composer/2.1/default/ComposerClient.cpp
index e792034..4e6dd4f 100644
--- a/graphics/composer/2.1/default/ComposerClient.cpp
+++ b/graphics/composer/2.1/default/ComposerClient.cpp
@@ -748,15 +748,17 @@
}
// First try to Present as is.
- int presentFence = -1;
- std::vector<Layer> layers;
- std::vector<int> fences;
- auto err = mHal.presentDisplay(mDisplay, &presentFence, &layers, &fences);
- if (err == Error::NONE) {
- mWriter.setPresentOrValidateResult(1);
- mWriter.setPresentFence(presentFence);
- mWriter.setReleaseFences(layers, fences);
- return true;
+ if (mHal.hasCapability(HWC2_CAPABILITY_SKIP_VALIDATE)) {
+ int presentFence = -1;
+ std::vector<Layer> layers;
+ std::vector<int> fences;
+ auto err = mHal.presentDisplay(mDisplay, &presentFence, &layers, &fences);
+ if (err == Error::NONE) {
+ mWriter.setPresentOrValidateResult(1);
+ mWriter.setPresentFence(presentFence);
+ mWriter.setReleaseFences(layers, fences);
+ return true;
+ }
}
// Present has failed. We need to fallback to validate
@@ -766,9 +768,8 @@
std::vector<Layer> requestedLayers;
std::vector<uint32_t> requestMasks;
- err = mHal.validateDisplay(mDisplay, &changedLayers,
- &compositionTypes, &displayRequestMask,
- &requestedLayers, &requestMasks);
+ auto err = mHal.validateDisplay(mDisplay, &changedLayers, &compositionTypes,
+ &displayRequestMask, &requestedLayers, &requestMasks);
if (err == Error::NONE) {
mWriter.setPresentOrValidateResult(0);
mWriter.setChangedCompositionTypes(changedLayers,
diff --git a/graphics/composer/2.1/default/Hwc.cpp b/graphics/composer/2.1/default/Hwc.cpp
index fdb4af8..cb393ec 100644
--- a/graphics/composer/2.1/default/Hwc.cpp
+++ b/graphics/composer/2.1/default/Hwc.cpp
@@ -47,8 +47,7 @@
}
initCapabilities();
- if (majorVersion >= 2 &&
- hasCapability(Capability::PRESENT_FENCE_IS_NOT_RELIABLE)) {
+ if (majorVersion >= 2 && hasCapability(HWC2_CAPABILITY_PRESENT_FENCE_IS_NOT_RELIABLE)) {
ALOGE("Present fence must be reliable from HWC2 on.");
abort();
}
@@ -114,12 +113,14 @@
uint32_t count = 0;
mDevice->getCapabilities(mDevice, &count, nullptr);
- std::vector<Capability> caps(count);
- mDevice->getCapabilities(mDevice, &count, reinterpret_cast<
- std::underlying_type<Capability>::type*>(caps.data()));
+ std::vector<int32_t> caps(count);
+ mDevice->getCapabilities(mDevice, &count, caps.data());
caps.resize(count);
- mCapabilities.insert(caps.cbegin(), caps.cend());
+ mCapabilities.reserve(count);
+ for (auto cap : caps) {
+ mCapabilities.insert(static_cast<hwc2_capability_t>(cap));
+ }
}
template<typename T>
@@ -188,7 +189,7 @@
initDispatch(HWC2_FUNCTION_SET_LAYER_PLANE_ALPHA,
&mDispatch.setLayerPlaneAlpha);
- if (hasCapability(Capability::SIDEBAND_STREAM)) {
+ if (hasCapability(HWC2_CAPABILITY_SIDEBAND_STREAM)) {
initDispatch(HWC2_FUNCTION_SET_LAYER_SIDEBAND_STREAM,
&mDispatch.setLayerSidebandStream);
}
@@ -208,15 +209,26 @@
initDispatch(HWC2_FUNCTION_VALIDATE_DISPLAY, &mDispatch.validateDisplay);
}
-bool HwcHal::hasCapability(Capability capability) const
-{
+bool HwcHal::hasCapability(hwc2_capability_t capability) {
return (mCapabilities.count(capability) > 0);
}
Return<void> HwcHal::getCapabilities(getCapabilities_cb hidl_cb)
{
- std::vector<Capability> caps(
- mCapabilities.cbegin(), mCapabilities.cend());
+ std::vector<Capability> caps;
+ caps.reserve(mCapabilities.size());
+ for (auto cap : mCapabilities) {
+ switch (cap) {
+ case HWC2_CAPABILITY_SIDEBAND_STREAM:
+ case HWC2_CAPABILITY_SKIP_CLIENT_COLOR_TRANSFORM:
+ case HWC2_CAPABILITY_PRESENT_FENCE_IS_NOT_RELIABLE:
+ caps.push_back(static_cast<Capability>(cap));
+ break;
+ default:
+ // not all HWC2 caps are defined in HIDL
+ break;
+ }
+ }
hidl_vec<Capability> caps_reply;
caps_reply.setToExternal(caps.data(), caps.size());
diff --git a/graphics/composer/2.1/default/Hwc.h b/graphics/composer/2.1/default/Hwc.h
index 32c6b0b..e3f5ce6 100644
--- a/graphics/composer/2.1/default/Hwc.h
+++ b/graphics/composer/2.1/default/Hwc.h
@@ -58,14 +58,13 @@
HwcHal(const hw_module_t* module);
virtual ~HwcHal();
- bool hasCapability(Capability capability) const;
-
// IComposer interface
Return<void> getCapabilities(getCapabilities_cb hidl_cb) override;
Return<void> dumpDebugInfo(dumpDebugInfo_cb hidl_cb) override;
Return<void> createClient(createClient_cb hidl_cb) override;
// ComposerBase interface
+ bool hasCapability(hwc2_capability_t capability) override;
void removeClient() override;
void enableCallback(bool enable) override;
uint32_t getMaxVirtualDisplayCount() override;
@@ -168,7 +167,7 @@
hwc2_device_t* mDevice;
- std::unordered_set<Capability> mCapabilities;
+ std::unordered_set<hwc2_capability_t> mCapabilities;
struct {
HWC2_PFN_ACCEPT_DISPLAY_CHANGES acceptDisplayChanges;
diff --git a/keymaster/3.0/default/Android.mk b/keymaster/3.0/default/Android.mk
index 6b66f7d..9e7d04a 100644
--- a/keymaster/3.0/default/Android.mk
+++ b/keymaster/3.0/default/Android.mk
@@ -12,7 +12,6 @@
libsoftkeymasterdevice \
libcrypto \
libkeymaster_portable \
- libkeymaster_staging \
libpuresoftkeymasterdevice \
libkeymaster3device \
libhidlbase \
diff --git a/keymaster/4.0/Android.bp b/keymaster/4.0/Android.bp
index 378204a..20c40a0 100644
--- a/keymaster/4.0/Android.bp
+++ b/keymaster/4.0/Android.bp
@@ -15,11 +15,17 @@
"android.hidl.base@1.0",
],
types: [
+ "Constants",
+ "ErrorCode",
"HardwareAuthToken",
+ "HmacSharingParameters",
"KeyCharacteristics",
+ "KeyOrigin",
"KeyParameter",
"KeyPurpose",
+ "SecurityLevel",
"Tag",
+ "VerificationToken",
],
gen_java: false,
}
diff --git a/light/2.0/default/Android.bp b/light/2.0/default/Android.bp
index 8d4eca1..8b5f780 100644
--- a/light/2.0/default/Android.bp
+++ b/light/2.0/default/Android.bp
@@ -15,7 +15,7 @@
cc_library_shared {
name: "android.hardware.light@2.0-impl",
defaults: ["hidl_defaults"],
- proprietary: true,
+ vendor: true,
relative_install_path: "hw",
srcs: ["Light.cpp"],
@@ -32,10 +32,10 @@
}
cc_binary {
+ name: "android.hardware.light@2.0-service",
relative_install_path: "hw",
defaults: ["hidl_defaults"],
- proprietary: true,
- name: "android.hardware.light@2.0-service",
+ vendor: true,
init_rc: ["android.hardware.light@2.0-service.rc"],
srcs: ["service.cpp"],
diff --git a/light/2.0/default/Light.cpp b/light/2.0/default/Light.cpp
index cde1536..5484d2d 100644
--- a/light/2.0/default/Light.cpp
+++ b/light/2.0/default/Light.cpp
@@ -18,6 +18,8 @@
#include <log/log.h>
+#include <stdio.h>
+
#include "Light.h"
namespace android {
@@ -107,6 +109,28 @@
{Type::WIFI, LIGHT_ID_WIFI}
};
+Return<void> Light::debug(const hidl_handle& handle, const hidl_vec<hidl_string>& /* options */) {
+ if (handle == nullptr || handle->numFds < 1) {
+ ALOGE("debug called with no handle\n");
+ return Void();
+ }
+
+ int fd = handle->data[0];
+ if (fd < 0) {
+ ALOGE("invalid FD: %d\n", handle->data[0]);
+ return Void();
+ }
+
+ dprintf(fd, "The following lights are registered: ");
+ for (auto const& pair : mLights) {
+ const Type type = pair.first;
+ dprintf(fd, "%s,", kLogicalLights.at(type));
+ }
+ dprintf(fd, ".\n");
+ fsync(fd);
+ return Void();
+}
+
light_device_t* getLightDevice(const char* name) {
light_device_t* lightDevice;
const hw_module_t* hwModule = NULL;
diff --git a/light/2.0/default/Light.h b/light/2.0/default/Light.h
index 8987036..8851461 100644
--- a/light/2.0/default/Light.h
+++ b/light/2.0/default/Light.h
@@ -42,11 +42,12 @@
struct Light : public ILight {
Light(std::map<Type, light_device_t*> &&lights);
- // Methods from ::android::hardware::light::V2_0::ILight follow.
Return<Status> setLight(Type type, const LightState& state) override;
Return<void> getSupportedTypes(getSupportedTypes_cb _hidl_cb) override;
-private:
+ Return<void> debug(const hidl_handle& handle, const hidl_vec<hidl_string>& options) override;
+
+ private:
std::map<Type, light_device_t*> mLights;
};
diff --git a/media/1.0/xml/Android.mk b/media/1.0/xml/Android.mk
new file mode 100644
index 0000000..bc44b9e
--- /dev/null
+++ b/media/1.0/xml/Android.mk
@@ -0,0 +1,14 @@
+LOCAL_PATH := $(call my-dir)
+
+#######################################
+# media_profiles_V1_0.dtd
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := media_profiles_V1_0.dtd
+LOCAL_SRC_FILES := media_profiles.dtd
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)
+
+include $(BUILD_PREBUILT)
+
diff --git a/media/1.0/media_profiles.dtd b/media/1.0/xml/media_profiles.dtd
similarity index 100%
rename from media/1.0/media_profiles.dtd
rename to media/1.0/xml/media_profiles.dtd
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_test.cpp b/radio/1.0/vts/functional/radio_hidl_hal_test.cpp
index 247e12c..1c7653b 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_test.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_test.cpp
@@ -48,8 +48,6 @@
EXPECT_EQ(CardState::ABSENT, cardStatus.cardState);
}
-void RadioHidlTest::TearDown() {}
-
void RadioHidlTest::notify() {
std::unique_lock<std::mutex> lock(mtx);
count++;
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_utils_v1_0.h b/radio/1.0/vts/functional/radio_hidl_hal_utils_v1_0.h
index a5e8701..6b95ab0 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_utils_v1_0.h
+++ b/radio/1.0/vts/functional/radio_hidl_hal_utils_v1_0.h
@@ -521,8 +521,6 @@
public:
virtual void SetUp() override;
- virtual void TearDown() override;
-
/* Used as a mechanism to inform the test about data/event callback */
void notify();
diff --git a/radio/1.1/vts/functional/radio_hidl_hal_test.cpp b/radio/1.1/vts/functional/radio_hidl_hal_test.cpp
index e1da591..773d165 100644
--- a/radio/1.1/vts/functional/radio_hidl_hal_test.cpp
+++ b/radio/1.1/vts/functional/radio_hidl_hal_test.cpp
@@ -49,8 +49,6 @@
EXPECT_EQ(CardState::ABSENT, cardStatus.cardState);
}
-void RadioHidlTest_v1_1::TearDown() {}
-
void RadioHidlTest_v1_1::notify() {
std::unique_lock<std::mutex> lock(mtx);
count++;
diff --git a/radio/1.1/vts/functional/radio_hidl_hal_utils_v1_1.h b/radio/1.1/vts/functional/radio_hidl_hal_utils_v1_1.h
index 523b9ba..a081ab9 100644
--- a/radio/1.1/vts/functional/radio_hidl_hal_utils_v1_1.h
+++ b/radio/1.1/vts/functional/radio_hidl_hal_utils_v1_1.h
@@ -544,20 +544,12 @@
public:
virtual void SetUp() override;
- virtual void TearDown() override;
-
/* Used as a mechanism to inform the test about data/event callback */
void notify();
/* Test code calls this function to wait for response */
std::cv_status wait(int sec = TIMEOUT_PERIOD);
- /* Used for checking General Errors */
- bool CheckGeneralError();
-
- /* Used for checking OEM Errors */
- bool CheckOEMError();
-
sp<::android::hardware::radio::V1_1::IRadio> radio_v1_1;
sp<RadioResponse_v1_1> radioRsp_v1_1;
sp<RadioIndication_v1_1> radioInd_v1_1;
diff --git a/wifi/1.2/default/wifi_chip.h b/wifi/1.2/default/wifi_chip.h
index 97c434e..b5dcc8c 100644
--- a/wifi/1.2/default/wifi_chip.h
+++ b/wifi/1.2/default/wifi_chip.h
@@ -20,7 +20,7 @@
#include <map>
#include <android-base/macros.h>
-#include <android/hardware/wifi/1.1/IWifiChip.h>
+#include <android/hardware/wifi/1.2/IWifiChip.h>
#include "hidl_callback_util.h"
#include "wifi_ap_iface.h"
@@ -44,7 +44,7 @@
* Since there is only a single chip instance used today, there is no
* identifying handle information stored here.
*/
-class WifiChip : public V1_1::IWifiChip {
+class WifiChip : public V1_2::IWifiChip {
public:
WifiChip(
ChipId chip_id,
diff --git a/wifi/supplicant/1.0/vts/functional/Android.bp b/wifi/supplicant/1.0/vts/functional/Android.bp
index 24b9f6f..f742ecd 100644
--- a/wifi/supplicant/1.0/vts/functional/Android.bp
+++ b/wifi/supplicant/1.0/vts/functional/Android.bp
@@ -14,19 +14,37 @@
// limitations under the License.
//
+cc_library_static {
+ name: "VtsHalWifiSupplicantV1_0TargetTestUtil",
+ defaults: ["VtsHalTargetTestDefaults"],
+ srcs: ["supplicant_hidl_test_utils.cpp"],
+ export_include_dirs: [
+ "."
+ ],
+ static_libs: [
+ "VtsHalWifiV1_0TargetTestUtil",
+ "android.hardware.wifi.supplicant@1.0",
+ "android.hardware.wifi@1.0",
+ "libcrypto",
+ "libgmock",
+ "libwifi-system",
+ "libwifi-system-iface",
+ ],
+}
+
cc_test {
name: "VtsHalWifiSupplicantV1_0TargetTest",
defaults: ["VtsHalTargetTestDefaults"],
srcs: [
"VtsHalWifiSupplicantV1_0TargetTest.cpp",
"supplicant_hidl_test.cpp",
- "supplicant_hidl_test_utils.cpp",
"supplicant_p2p_iface_hidl_test.cpp",
"supplicant_sta_iface_hidl_test.cpp",
"supplicant_sta_network_hidl_test.cpp",
],
static_libs: [
"VtsHalWifiV1_0TargetTestUtil",
+ "VtsHalWifiSupplicantV1_0TargetTestUtil",
"android.hardware.wifi.supplicant@1.0",
"android.hardware.wifi@1.0",
"libcrypto",
diff --git a/wifi/supplicant/1.1/ISupplicant.hal b/wifi/supplicant/1.1/ISupplicant.hal
index 5c60b35..508a545 100644
--- a/wifi/supplicant/1.1/ISupplicant.hal
+++ b/wifi/supplicant/1.1/ISupplicant.hal
@@ -17,6 +17,8 @@
package android.hardware.wifi.supplicant@1.1;
import @1.0::ISupplicant;
+import @1.0::ISupplicantIface;
+import @1.0::SupplicantStatus;
/**
* Interface exposed by the supplicant HIDL service registered
@@ -24,4 +26,32 @@
* This is the root level object for any the supplicant interactions.
*/
interface ISupplicant extends @1.0::ISupplicant {
+ /**
+ * Registers a wireless interface in supplicant.
+ *
+ * @param ifaceInfo Combination of the interface type and name(e.g wlan0).
+ * @return status Status of the operation.
+ * Possible status codes:
+ * |SupplicantStatusCode.SUCCESS|,
+ * |SupplicantStatusCode.FAILURE_ARGS_INVALID|,
+ * |SupplicantStatusCode.FAILURE_UNKNOWN|,
+ * |SupplicantStatusCode.FAILURE_IFACE_EXISTS|
+ * @return iface HIDL interface object representing the interface if
+ * successful, null otherwise.
+ */
+ addInterface(IfaceInfo ifaceInfo)
+ generates (SupplicantStatus status, ISupplicantIface iface);
+
+ /**
+ * Deregisters a wireless interface from supplicant.
+ *
+ * @param ifaceInfo Combination of the interface type and name(e.g wlan0).
+ * @return status Status of the operation.
+ * Possible status codes:
+ * |SupplicantStatusCode.SUCCESS|,
+ * |SupplicantStatusCode.FAILURE_ARGS_INVALID|,
+ * |SupplicantStatusCode.FAILURE_UNKNOWN|,
+ * |SupplicantStatusCode.FAILURE_IFACE_UNKOWN|
+ */
+ removeInterface(IfaceInfo ifaceInfo) generates (SupplicantStatus status);
};
diff --git a/wifi/supplicant/1.1/vts/Android.mk b/wifi/supplicant/1.1/vts/Android.mk
new file mode 100644
index 0000000..6361f9b
--- /dev/null
+++ b/wifi/supplicant/1.1/vts/Android.mk
@@ -0,0 +1,2 @@
+LOCAL_PATH := $(call my-dir)
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/wifi/supplicant/1.1/vts/functional/Android.bp b/wifi/supplicant/1.1/vts/functional/Android.bp
new file mode 100644
index 0000000..9375cf5
--- /dev/null
+++ b/wifi/supplicant/1.1/vts/functional/Android.bp
@@ -0,0 +1,56 @@
+//
+// Copyright (C) 2017 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+cc_library_static {
+ name: "VtsHalWifiSupplicantV1_1TargetTestUtil",
+ defaults: ["VtsHalTargetTestDefaults"],
+ srcs: ["supplicant_hidl_test_utils_1_1.cpp"],
+ export_include_dirs: [
+ "."
+ ],
+ static_libs: [
+ "VtsHalWifiV1_0TargetTestUtil",
+ "VtsHalWifiSupplicantV1_0TargetTestUtil",
+ "android.hardware.wifi.supplicant@1.0",
+ "android.hardware.wifi.supplicant@1.1",
+ "android.hardware.wifi@1.0",
+ "libcrypto",
+ "libgmock",
+ "libwifi-system",
+ "libwifi-system-iface",
+ ],
+}
+
+cc_test {
+ name: "VtsHalWifiSupplicantV1_1TargetTest",
+ defaults: ["VtsHalTargetTestDefaults"],
+ srcs: [
+ "VtsHalWifiSupplicantV1_1TargetTest.cpp",
+ "supplicant_hidl_test.cpp",
+ ],
+ static_libs: [
+ "VtsHalWifiV1_0TargetTestUtil",
+ "VtsHalWifiSupplicantV1_0TargetTestUtil",
+ "VtsHalWifiSupplicantV1_1TargetTestUtil",
+ "android.hardware.wifi.supplicant@1.0",
+ "android.hardware.wifi.supplicant@1.1",
+ "android.hardware.wifi@1.0",
+ "libcrypto",
+ "libgmock",
+ "libwifi-system",
+ "libwifi-system-iface",
+ ],
+}
diff --git a/wifi/supplicant/1.1/vts/functional/VtsHalWifiSupplicantV1_1TargetTest.cpp b/wifi/supplicant/1.1/vts/functional/VtsHalWifiSupplicantV1_1TargetTest.cpp
new file mode 100644
index 0000000..81893e5
--- /dev/null
+++ b/wifi/supplicant/1.1/vts/functional/VtsHalWifiSupplicantV1_1TargetTest.cpp
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/logging.h>
+
+#include <VtsHalHidlTargetTestBase.h>
+
+#include "supplicant_hidl_test_utils.h"
+
+class SupplicantHidlEnvironment : public ::testing::Environment {
+ public:
+ virtual void SetUp() override { stopSupplicant(); }
+ virtual void TearDown() override {}
+};
+
+int main(int argc, char** argv) {
+ ::testing::AddGlobalTestEnvironment(new SupplicantHidlEnvironment);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = RUN_ALL_TESTS();
+ LOG(INFO) << "Test result = " << status;
+ return status;
+}
diff --git a/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test.cpp b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test.cpp
new file mode 100644
index 0000000..c29fd0a
--- /dev/null
+++ b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/logging.h>
+#include <cutils/properties.h>
+
+#include <VtsHalHidlTargetTestBase.h>
+
+#include <android/hardware/wifi/supplicant/1.0/types.h>
+#include <android/hardware/wifi/supplicant/1.1/ISupplicant.h>
+
+#include "supplicant_hidl_test_utils.h"
+#include "supplicant_hidl_test_utils_1_1.h"
+
+using ::android::hardware::hidl_vec;
+using ::android::hardware::wifi::supplicant::V1_0::ISupplicantIface;
+using ::android::hardware::wifi::supplicant::V1_0::SupplicantStatus;
+using ::android::hardware::wifi::supplicant::V1_0::SupplicantStatusCode;
+using ::android::hardware::wifi::supplicant::V1_0::IfaceType;
+using ::android::hardware::wifi::supplicant::V1_1::ISupplicant;
+using ::android::sp;
+
+class SupplicantHidlTest : public ::testing::VtsHalHidlTargetTestBase {
+ public:
+ virtual void SetUp() override {
+ startSupplicantAndWaitForHidlService();
+ supplicant_ = getSupplicant_1_1();
+ ASSERT_NE(supplicant_.get(), nullptr);
+ }
+
+ virtual void TearDown() override { stopSupplicant(); }
+
+ protected:
+ // ISupplicant object used for all tests in this fixture.
+ sp<ISupplicant> supplicant_;
+
+ std::string getWlan0IfaceName() {
+ std::array<char, PROPERTY_VALUE_MAX> buffer;
+ property_get("wifi.interface", buffer.data(), "wlan0");
+ return buffer.data();
+ }
+
+ std::string getP2pIfaceName() {
+ std::array<char, PROPERTY_VALUE_MAX> buffer;
+ property_get("wifi.direct.interface", buffer.data(), "p2p0");
+ return buffer.data();
+ }
+};
+
+/*
+ * AddStaInterface
+ */
+TEST_F(SupplicantHidlTest, AddStaInterface) {
+ ISupplicant::IfaceInfo iface_info;
+ iface_info.name = getWlan0IfaceName();
+ iface_info.type = IfaceType::STA;
+ supplicant_->addInterface(
+ iface_info,
+ [&](const SupplicantStatus& status, const sp<ISupplicantIface>& iface) {
+ EXPECT_TRUE(
+ (status.code == SupplicantStatusCode::SUCCESS) ||
+ (status.code == SupplicantStatusCode::FAILURE_IFACE_EXISTS));
+ EXPECT_NE(nullptr, iface.get());
+ });
+}
+
+/*
+ * AddP2pInterface
+ */
+TEST_F(SupplicantHidlTest, AddP2pInterface) {
+ ISupplicant::IfaceInfo iface_info;
+ iface_info.name = getP2pIfaceName();
+ iface_info.type = IfaceType::P2P;
+ supplicant_->addInterface(
+ iface_info,
+ [&](const SupplicantStatus& status, const sp<ISupplicantIface>& iface) {
+ EXPECT_TRUE(
+ (status.code == SupplicantStatusCode::SUCCESS) ||
+ (status.code == SupplicantStatusCode::FAILURE_IFACE_EXISTS));
+ EXPECT_NE(nullptr, iface.get());
+ });
+}
+
+/*
+ * RemoveStaInterface
+ */
+TEST_F(SupplicantHidlTest, RemoveStaInterface) {
+ ISupplicant::IfaceInfo iface_info;
+ iface_info.name = getWlan0IfaceName();
+ iface_info.type = IfaceType::STA;
+
+ supplicant_->addInterface(
+ iface_info,
+ [&](const SupplicantStatus& status, const sp<ISupplicantIface>& iface) {
+ EXPECT_TRUE(
+ (status.code == SupplicantStatusCode::SUCCESS) ||
+ (status.code == SupplicantStatusCode::FAILURE_IFACE_EXISTS));
+ EXPECT_NE(nullptr, iface.get());
+ });
+ supplicant_->removeInterface(
+ iface_info, [&](const SupplicantStatus& status) {
+ EXPECT_EQ(SupplicantStatusCode::SUCCESS, status.code);
+ });
+}
+
+/*
+ * RemoveP2pInterface
+ */
+TEST_F(SupplicantHidlTest, RemoveP2pInterface) {
+ ISupplicant::IfaceInfo iface_info;
+ iface_info.name = getP2pIfaceName();
+ iface_info.type = IfaceType::P2P;
+
+ supplicant_->addInterface(
+ iface_info,
+ [&](const SupplicantStatus& status, const sp<ISupplicantIface>& iface) {
+ EXPECT_TRUE(
+ (status.code == SupplicantStatusCode::SUCCESS) ||
+ (status.code == SupplicantStatusCode::FAILURE_IFACE_EXISTS));
+ EXPECT_NE(nullptr, iface.get());
+ });
+ supplicant_->removeInterface(
+ iface_info, [&](const SupplicantStatus& status) {
+ EXPECT_EQ(SupplicantStatusCode::SUCCESS, status.code);
+ });
+}
diff --git a/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.cpp b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.cpp
new file mode 100644
index 0000000..8cc4a9f
--- /dev/null
+++ b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VtsHalHidlTargetTestBase.h>
+#include <android-base/logging.h>
+
+#include "supplicant_hidl_test_utils.h"
+#include "supplicant_hidl_test_utils_1_1.h"
+
+using ::android::hardware::wifi::supplicant::V1_1::ISupplicant;
+using ::android::sp;
+
+sp<ISupplicant> getSupplicant_1_1() {
+ return ISupplicant::castFrom(getSupplicant());
+}
diff --git a/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h
new file mode 100644
index 0000000..c42a35b
--- /dev/null
+++ b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SUPPLICANT_HIDL_TEST_UTILS_1_1_H
+#define SUPPLICANT_HIDL_TEST_UTILS_1_1_H
+
+#include <android/hardware/wifi/supplicant/1.1/ISupplicant.h>
+
+android::sp<android::hardware::wifi::supplicant::V1_1::ISupplicant>
+ getSupplicant_1_1();
+
+#endif /* SUPPLICANT_HIDL_TEST_UTILS_1_1_H */