Merge "wifi(implementation): Allow chip reconfiguration"
diff --git a/broadcastradio/1.2/Android.bp b/broadcastradio/1.2/Android.bp
index 913da8c..40eb4e0 100644
--- a/broadcastradio/1.2/Android.bp
+++ b/broadcastradio/1.2/Android.bp
@@ -18,6 +18,7 @@
         "android.hidl.base@1.0",
     ],
     types: [
+        "IdentifierType",
     ],
     gen_java: false,
 }
diff --git a/broadcastradio/2.0/Android.bp b/broadcastradio/2.0/Android.bp
new file mode 100644
index 0000000..5146932
--- /dev/null
+++ b/broadcastradio/2.0/Android.bp
@@ -0,0 +1,33 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+    name: "android.hardware.broadcastradio@2.0",
+    root: "android.hardware",
+    vndk: {
+        enabled: true,
+    },
+    srcs: [
+        "types.hal",
+        "IBroadcastRadio.hal",
+        "ITunerCallback.hal",
+        "ITunerSession.hal",
+    ],
+    interfaces: [
+        "android.hidl.base@1.0",
+    ],
+    types: [
+        "Constants",
+        "IdentifierType",
+        "Metadata",
+        "MetadataKey",
+        "ProgramIdentifier",
+        "ProgramInfo",
+        "ProgramInfoFlags",
+        "ProgramSelector",
+        "Properties",
+        "Result",
+        "VendorKeyValue",
+    ],
+    gen_java: true,
+}
+
diff --git a/broadcastradio/2.0/IBroadcastRadio.hal b/broadcastradio/2.0/IBroadcastRadio.hal
new file mode 100644
index 0000000..3ab1cc2
--- /dev/null
+++ b/broadcastradio/2.0/IBroadcastRadio.hal
@@ -0,0 +1,81 @@
+/* Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.broadcastradio@2.0;
+
+import ITunerCallback;
+import ITunerSession;
+
+/**
+ * Represents a hardware broadcast radio module. A single module may contain
+ * multiple hardware tuners (i.e. with an additional background tuner), but the
+ * layers above the HAL see them as a single logical unit.
+ */
+interface IBroadcastRadio {
+    /**
+     * Returns module properties: a description of a module and its
+     * capabilities. This method must not fail.
+     *
+     * @return properties Module description.
+     */
+    getProperties() generates (Properties properties);
+
+    /**
+     * Opens a new tuner session.
+     *
+     * There may be only one session active at a time. If the new session was
+     * requested when the old one was active, the old must be terminated
+     * (aggressive open).
+     *
+     * @param callback The callback interface.
+     * @return result OK in case of success.
+     * @return session The session interface.
+     */
+    openSession(ITunerCallback callback)
+            generates (Result result, ITunerSession session);
+
+    /**
+     * Fetch image from radio module cache.
+     *
+     * This is out-of-band transport mechanism for images carried with metadata.
+     * The metadata vector only passes the identifier, so the client may cache
+     * images or even not fetch them.
+     *
+     * The identifier may be any arbitrary number (i.e. sha256 prefix) selected
+     * by the vendor. It must be stable across sessions so the application may
+     * cache it.
+     *
+     * The data must be a valid PNG, JPEG, GIF or BMP file.
+     * Image data with an invalid format must be handled gracefully in the same
+     * way as a missing image.
+     *
+     * The image identifier may become invalid after some time from passing it
+     * with metadata struct (due to resource cleanup at the HAL implementation).
+     * However, it must remain valid for a currently tuned program at least
+     * until onCurrentProgramInfoChanged is called.
+     *
+     * There is still a race condition possible between
+     * onCurrentProgramInfoChanged callback and the HAL implementation eagerly
+     * clearing the cache (because the next onCurrentProgramInfoChanged came).
+     * In such case, client application may expect the new
+     * onCurrentProgramInfoChanged callback with updated image identifier.
+     *
+     * @param id Identifier of an image (value of Constants::INVALID_IMAGE is
+     *           reserved and must be treated as invalid image).
+     * @return image A binary blob with image data
+     *               or a zero-length vector if identifier doesn't exist.
+     */
+    getImage(uint32_t id) generates (vec<uint8_t> image);
+};
diff --git a/broadcastradio/2.0/ITunerCallback.hal b/broadcastradio/2.0/ITunerCallback.hal
new file mode 100644
index 0000000..1aefc4e
--- /dev/null
+++ b/broadcastradio/2.0/ITunerCallback.hal
@@ -0,0 +1,67 @@
+/* Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.broadcastradio@2.0;
+
+interface ITunerCallback {
+    /**
+     * Method called by the HAL when a tuning operation fails
+     * following a step(), scan() or tune() command.
+     *
+     * @param result OK if tune succeeded;
+     *               TIMEOUT in case of time out.
+     * @param selector A ProgramSelector structure passed from tune(),
+     *                 empty for step() and scan().
+     */
+    oneway onTuneFailed(Result result, ProgramSelector selector);
+
+    /**
+     * Method called by the HAL when current program information (including
+     * metadata) is updated.
+     *
+     * This is also called when the radio tuned to the static (not a valid
+     * station), see the TUNED flag of ProgramInfoFlags.
+     *
+     * @param info Current program information.
+     */
+    oneway onCurrentProgramInfoChanged(ProgramInfo info);
+
+    /**
+     * Method called by the HAL when the antenna gets connected or disconnected.
+     *
+     * For a new tuner session, client must assume the antenna is connected.
+     * If it's not, then antennaStateChange must be called within
+     * Constants::ANTENNA_DISCONNECTED_TIMEOUT_MS to indicate that.
+     *
+     * @param connected True if the antenna is now connected, false otherwise.
+     */
+    oneway onAntennaStateChange(bool connected);
+
+    /**
+     * Generic callback for passing updates to vendor-specific parameter values.
+     * The framework does not interpret the parameters, they are passed
+     * in an opaque manner between a vendor application and HAL.
+     *
+     * It's up to the HAL implementation if and how to implement this callback,
+     * as long as it obeys the prefix rule. In particular, only selected keys
+     * may be notified this way. However, setParameters must not trigger
+     * this callback, while an internal event can change parameters
+     * asynchronously.
+     *
+     * @param parameters Vendor-specific key-value pairs,
+     *                   opaque to Android framework.
+     */
+    oneway onParametersUpdated(vec<VendorKeyValue> parameters);
+};
diff --git a/broadcastradio/2.0/ITunerSession.hal b/broadcastradio/2.0/ITunerSession.hal
new file mode 100644
index 0000000..ae6cbb5
--- /dev/null
+++ b/broadcastradio/2.0/ITunerSession.hal
@@ -0,0 +1,137 @@
+/* Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.broadcastradio@2.0;
+
+interface ITunerSession {
+    /**
+     * Tune to a specified program.
+     *
+     * Automatically cancels pending scan, step or tune.
+     * If the method returns OK, tuneFailed or currentProgramInfoChanged
+     * callback must be called.
+     *
+     * @param program Program to tune to.
+     * @return result OK if successfully started tuning.
+     *                NOT_SUPPORTED if the program selector doesn't contain any
+     *                supported identifier.
+     *                INVALID_ARGUMENTS if the program selector contains
+     *                identifiers in invalid format (i.e. out of range).
+     */
+    tune(ProgramSelector program) generates (Result result);
+
+    /**
+     * Tune to the next valid program.
+     *
+     * Automatically cancels pending scan, step or tune.
+     * If the method returns OK, tuneFailed or currentProgramInfoChanged
+     * callback must be called.
+     *
+     * The skipSubChannel parameter is used to skip digital radio subchannels:
+     *  - HD Radio SPS;
+     *  - DAB secondary service.
+     *
+     * As an implementation detail, the HAL has the option to perform an actual
+     * scan or select the next program from the list retrieved in the
+     * background, if one is not stale.
+     *
+     * @param directionUp True to change towards higher numeric values
+     *                    (frequency, channel number), false towards lower.
+     * @param skipSubChannel Don't tune to subchannels.
+     * @return result OK if the scan has successfully started.
+     */
+    scan(bool directionUp, bool skipSubChannel) generates (Result result);
+
+    /**
+     * Tune to the adjacent channel, which may not be occupied by any program.
+     *
+     * Automatically cancels pending scan, step or tune.
+     * If the method returns OK, tuneFailed or currentProgramInfoChanged
+     * callback must be called.
+     *
+     * @param directionUp True to change towards higher numeric values
+     *                    (frequency, channel number), false towards lower.
+     * @return result OK successfully started tuning.
+     *                NOT_SUPPORTED if tuning to an unoccupied channel is not
+     *                supported (i.e. for satellite radio).
+     */
+    step(bool directionUp) generates (Result result);
+
+    /**
+     * Cancel a scan, step or tune operation.
+     *
+     * If there is no such operation running, the call must be ignored.
+     */
+    cancel();
+
+    /**
+     * Generic method for setting vendor-specific parameter values.
+     * The framework does not interpret the parameters, they are passed
+     * in an opaque manner between a vendor application and HAL.
+     *
+     * Framework does not make any assumptions on the keys or values, other than
+     * ones stated in VendorKeyValue documentation (a requirement of key
+     * prefixes).
+     *
+     * For each pair in the result vector, the key must be one of the keys
+     * contained in the input (possibly with wildcards expanded), and the value
+     * must be a vendor-specific result status (i.e. the string "OK" or an error
+     * code). The implementation may choose to return an empty vector, or only
+     * return a status for a subset of the provided inputs, at its discretion.
+     *
+     * Application and HAL must not use keys with unknown prefix. In particular,
+     * it must not place a key-value pair in results vector for unknown key from
+     * parameters vector - instead, an unknown key should simply be ignored.
+     * In other words, results vector may contain a subset of parameter keys
+     * (however, the framework doesn't enforce a strict subset - the only
+     * formal requirement is vendor domain prefix for keys).
+     *
+     * @param parameters Vendor-specific key-value pairs.
+     * @return results Operation completion status for parameters being set.
+     */
+    setParameters(vec<VendorKeyValue> parameters)
+            generates (vec<VendorKeyValue> results);
+
+    /**
+     * Generic method for retrieving vendor-specific parameter values.
+     * The framework does not interpret the parameters, they are passed
+     * in an opaque manner between a vendor application and HAL.
+     *
+     * Framework does not cache set/get requests, so it's allowed for
+     * getParameter to return a different value than previous setParameter call.
+     *
+     * The syntax and semantics of keys are up to the vendor (as long as prefix
+     * rules are obeyed). For instance, vendors may include some form of
+     * wildcard support. In such case, result vector may be of different size
+     * than requested keys vector. However, wildcards are not recognized by
+     * framework and they are passed as-is to the HAL implementation.
+     *
+     * Unknown keys must be ignored and not placed into results vector.
+     *
+     * @param keys Parameter keys to fetch.
+     * @return parameters Vendor-specific key-value pairs.
+     */
+    getParameters(vec<string> keys) generates (vec<VendorKeyValue> parameters);
+
+    /**
+     * Closes the session.
+     *
+     * The call must not fail and must only be issued once.
+     *
+     * After the close call is executed, no other calls to this interface
+     * are allowed.
+     */
+    close();
+};
diff --git a/broadcastradio/2.0/types.hal b/broadcastradio/2.0/types.hal
new file mode 100644
index 0000000..4b9878b
--- /dev/null
+++ b/broadcastradio/2.0/types.hal
@@ -0,0 +1,411 @@
+/* Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.broadcastradio@2.0;
+
+/** Constants used by broadcast radio HAL. */
+enum Constants : int32_t {
+    /** Invalid identifier for IBroadcastRadio::getImage. */
+    INVALID_IMAGE = 0,
+
+    /**
+     * If the antenna is disconnected from the beginning, the
+     * onAntennaStateChange callback must be called within this time.
+     */
+    ANTENNA_DISCONNECTED_TIMEOUT_MS = 100,
+};
+
+enum Result : int32_t {
+    OK,
+    UNKNOWN_ERROR,
+    INVALID_ARGUMENTS,
+    INVALID_STATE,
+    NOT_SUPPORTED,
+    TIMEOUT,
+};
+
+/**
+ * A key-value pair for vendor-specific information to be passed as-is through
+ * Android framework to the front-end application.
+ */
+struct VendorKeyValue {
+    /**
+     * Key must start with unique vendor Java-style namespace,
+     * eg. 'com.somecompany.parameter1'.
+     */
+    string key;
+
+    /**
+     * Value must be passed through the framework without any changes.
+     * Format of this string can vary across vendors.
+     */
+    string value;
+};
+
+/**
+ * Properties of a given broadcast radio module.
+ */
+struct Properties {
+    /**
+     * A company name who made the radio module. Must be a valid, registered
+     * name of the company itself.
+     *
+     * It must be opaque to the Android framework.
+     */
+    string maker;
+
+    /**
+     * A product name. Must be unique within the company.
+     *
+     * It must be opaque to the Android framework.
+     */
+    string product;
+
+    /**
+     * Version of the hardware module.
+     *
+     * It must be opaque to the Android framework.
+     */
+    string version;
+
+    /**
+     * Hardware serial number (for subscription services).
+     *
+     * It must be opaque to the Android framework.
+     */
+    string serial;
+
+    /**
+     * A list of supported IdentifierType values.
+     *
+     * If an identifier is supported by radio module, it means it can use it for
+     * tuning to ProgramSelector with either primary or secondary Identifier of
+     * a given type.
+     *
+     * Support for VENDOR identifier type does not guarantee compatibility, as
+     * other module properties (implementor, product, version) must be checked.
+     */
+    vec<uint32_t> supportedIdentifierTypes;
+
+    /**
+     * Vendor-specific information.
+     *
+     * It may be used for extra features, not supported by the platform,
+     * for example: com.me.preset-slots=6; com.me.ultra-hd-capable=false.
+     */
+    vec<VendorKeyValue> vendorInfo;
+};
+
+/**
+ * Program (channel, station) information.
+ *
+ * Carries both user-visible information (like station name) and technical
+ * details (tuning selector).
+ */
+struct ProgramInfo {
+    /**
+     * An identifier used to point at the program (primarily to tune to it).
+     */
+    ProgramSelector selector;
+
+    bitfield<ProgramInfoFlags> infoFlags;
+
+    /**
+     * Signal quality measured in 0% to 100% range to be shown in the UI.
+     *
+     * The purpose of this field is primarily informative, must not be used to
+     * determine to which frequency should it tune to.
+     */
+    uint32_t signalQuality;
+
+    /**
+     * Program metadata (station name, PTY, song title).
+     */
+    vec<Metadata> metadata;
+
+    /**
+     * Vendor-specific information.
+     *
+     * It may be used for extra features, not supported by the platform,
+     * for example: paid-service=true; bitrate=320kbps.
+     */
+    vec<VendorKeyValue> vendorInfo;
+};
+
+enum ProgramInfoFlags : uint32_t {
+    /**
+     * Set when the program is currently playing live stream.
+     * This may result in a slightly altered reception parameters,
+     * usually targetted at reduced latency.
+     */
+    LIVE = 1 << 0,
+
+    /**
+     * Radio stream is not playing, ie. due to bad reception conditions or
+     * buffering. In this state volume knob MAY be disabled to prevent user
+     * increasing volume too much.
+     */
+    MUTED = 1 << 1,
+
+    /**
+     * Station broadcasts traffic information regularly,
+     * but not necessarily right now.
+     */
+    TRAFFIC_PROGRAM = 1 << 2,
+
+    /**
+     * Station is broadcasting traffic information at the very moment.
+     */
+    TRAFFIC_ANNOUNCEMENT = 1 << 3,
+
+    /**
+     * Tuned to a program (not playing a static).
+     *
+     * It's the same condition that would stop scan() operation.
+     */
+    TUNED = 1 << 4,
+
+    /**
+     * Audio stream is MONO if this bit is not set.
+     */
+    STEREO = 1 << 5,
+};
+
+/**
+ * Type of program identifier component.
+ *
+ * Each identifier type corresponds to exactly one radio technology,
+ * i.e. DAB_ENSEMBLE is specifically for DAB.
+ *
+ * VENDOR identifier types must be opaque to the framework.
+ *
+ * The value format for each (but VENDOR_*) identifier is strictly defined
+ * to maintain interoperability between devices made by different vendors.
+ *
+ * All other values are reserved for future use.
+ * Values not matching any enumerated constant must be ignored.
+ */
+enum IdentifierType : uint32_t {
+    /**
+     * Primary/secondary identifier for vendor-specific radio technology.
+     * The value format is determined by a vendor.
+     *
+     * The vendor identifiers have limited serialization capabilities - see
+     * ProgramSelector description.
+     */
+    VENDOR_START = 1000,
+
+    /** See VENDOR_START */
+    VENDOR_END = 1999,
+
+    /**
+     * Primary identifier for analogue (without RDS) AM/FM stations:
+     * frequency in kHz.
+     *
+     * This identifier also contains band information:
+     *  - <500kHz: AM LW;
+     *  - 500kHz - 1705kHz: AM MW;
+     *  - 1.71MHz - 30MHz: AM SW;
+     *  - >60MHz: FM.
+     */
+    AMFM_FREQUENCY = 1,
+
+    /**
+     * 16bit primary identifier for FM RDS station.
+     */
+    RDS_PI,
+
+    /**
+     * 64bit compound primary identifier for HD Radio.
+     *
+     * Consists of (from the LSB):
+     * - 32bit: Station ID number;
+     * - 4bit: HD Radio subchannel;
+     * - 18bit: AMFM_FREQUENCY. // TODO(b/69958777): is it necessary?
+     *
+     * HD Radio subchannel is a value in range 0-7.
+     * This index is 0-based (where 0 is MPS and 1..7 are SPS),
+     * as opposed to HD Radio standard (where it's 1-based).
+     *
+     * The remaining bits should be set to zeros when writing on the chip side
+     * and ignored when read.
+     */
+    HD_STATION_ID_EXT,
+
+    /**
+     * 28bit compound primary identifier for Digital Audio Broadcasting.
+     *
+     * Consists of (from the LSB):
+     * - 16bit: SId;
+     * - 8bit: ECC code;
+     * - 4bit: SCIdS.
+     *
+     * SCIdS (Service Component Identifier within the Service) value
+     * of 0 represents the main service, while 1 and above represents
+     * secondary services.
+     *
+     * The remaining bits should be set to zeros when writing on the chip side
+     * and ignored when read.
+     */
+    DAB_SID_EXT = HD_STATION_ID_EXT + 2,
+
+    /** 16bit */
+    DAB_ENSEMBLE,
+
+    /** 12bit */
+    DAB_SCID,
+
+    /** kHz (see AMFM_FREQUENCY) */
+    DAB_FREQUENCY,
+
+    /**
+     * 24bit primary identifier for Digital Radio Mondiale.
+     */
+    DRMO_SERVICE_ID,
+
+    /** kHz (see AMFM_FREQUENCY) */
+    DRMO_FREQUENCY,
+
+    /**
+     * 32bit primary identifier for SiriusXM Satellite Radio.
+     */
+    SXM_SERVICE_ID = DRMO_FREQUENCY + 2,
+
+    /** 0-999 range */
+    SXM_CHANNEL,
+};
+
+/**
+ * A single program identifier component, i.e. frequency or channel ID.
+ */
+struct ProgramIdentifier {
+    /**
+     * Maps to IdentifierType enum. The enum may be extended in future versions
+     * of the HAL. Values out of the enum range must not be used when writing
+     * and ignored when reading.
+     */
+    uint32_t type;
+
+    /**
+     * The uint64_t value field holds the value in format described in comments
+     * for IdentifierType enum.
+     */
+    uint64_t value;
+};
+
+/**
+ * A set of identifiers necessary to tune to a given station.
+ *
+ * This can hold a combination of various identifiers, like:
+ * - AM/FM frequency,
+ * - HD Radio subchannel,
+ * - DAB service ID.
+ *
+ * The type of radio technology is determined by the primary identifier - if the
+ * primary identifier is for DAB, the program is DAB. However, a program of a
+ * specific radio technology may have additional secondary identifiers for other
+ * technologies, i.e. a satellite program may have FM fallback frequency,
+ * if a station broadcasts both via satellite and FM.
+ *
+ * The identifiers from VENDOR_START..VENDOR_END range have limited
+ * serialization capabilities: they are serialized locally, but ignored by the
+ * cloud services. If a program has primary id from vendor range, it's not
+ * synchronized with other devices at all.
+ */
+struct ProgramSelector {
+    /**
+     * Primary program identifier.
+     *
+     * This identifier uniquely identifies a station and can be used for
+     * equality check.
+     *
+     * It can hold only a subset of identifier types, one per each
+     * radio technology:
+     *  - analogue AM/FM: AMFM_FREQUENCY;
+     *  - FM RDS: RDS_PI;
+     *  - HD Radio: HD_STATION_ID_EXT;
+     *  - DAB: DAB_SID_EXT;
+     *  - Digital Radio Mondiale: DRMO_SERVICE_ID;
+     *  - SiriusXM: SXM_SERVICE_ID;
+     *  - vendor-specific: VENDOR_START..VENDOR_END.
+     *
+     * The list may change in future versions, so the implementation must obey,
+     * but not rely on it.
+     */
+    ProgramIdentifier primaryId;
+
+    /**
+     * Secondary program identifiers.
+     *
+     * These identifiers are supplementary and can speed up tuning process,
+     * but the primary ID must be sufficient (i.e. RDS PI is enough to select
+     * a station from the list after a full band scan).
+     *
+     * Two selectors with different secondary IDs, but the same primary ID are
+     * considered equal. In particular, secondary IDs vector may get updated for
+     * an entry on the program list (ie. when a better frequency for a given
+     * station is found).
+     */
+    vec<ProgramIdentifier> secondaryIds;
+};
+
+enum MetadataKey : int32_t {
+    /** RDS PS (string) */
+    RDS_PS = 1,
+
+    /** RDS PTY (uint8_t) */
+    RDS_PTY,
+
+    /** RBDS PTY (uint8_t) */
+    RBDS_PTY,
+
+    /** RDS RT (string) */
+    RDS_RT,
+
+    /** Song title (string) */
+    SONG_TITLE,
+
+    /** Artist name (string) */
+    SONG_ARTIST,
+
+    /** Album name (string) */
+    SONG_ALBUM,
+
+    /** Station icon (uint32_t, see IBroadcastRadio::getImage) */
+    STATION_ICON,
+
+    /** Album art (uint32_t, see IBroadcastRadio::getImage) */
+    ALBUM_ART,
+};
+
+/**
+ * An element of metadata vector.
+ *
+ * Contains one of the entries explained in MetadataKey.
+ *
+ * Depending on a type described in the comment for a specific key, either the
+ * intValue or stringValue field must be populated.
+ */
+struct Metadata {
+    /**
+     * Maps to MetadataKey enum. The enum may be extended in future versions
+     * of the HAL. Values out of the enum range must not be used when writing
+     * and ignored when reading.
+     */
+    uint32_t key;
+
+    int64_t intValue;
+    string stringValue;
+};
diff --git a/camera/metadata/3.2/types.hal b/camera/metadata/3.2/types.hal
index 17d1d5e..67b4e44 100644
--- a/camera/metadata/3.2/types.hal
+++ b/camera/metadata/3.2/types.hal
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,13 +14,17 @@
  * limitations under the License.
  */
 
+/*
+ * Autogenerated from camera metadata definitions in
+ * /system/media/camera/docs/metadata_definitions.xml
+ * *** DO NOT EDIT BY HAND ***
+ */
+
 package android.hardware.camera.metadata@3.2;
 
 /**
  * Top level hierarchy definitions for camera metadata. *_INFO sections are for
  * the static metadata that can be retrived without opening the camera device.
- * New sections must be added right before ANDROID_SECTION_COUNT to maintain
- * existing enumerations.
  */
 enum CameraMetadataSection : uint32_t {
     ANDROID_COLOR_CORRECTION,
@@ -82,7 +86,7 @@
 };
 
 /**
- * Hierarchy positions in enum space. All vendor extension tags must be
+ * Hierarchy positions in enum space. All vendor extension sections must be
  * defined with tag >= VENDOR_SECTION_START
  */
 enum CameraMetadataSectionStart : uint32_t {
@@ -143,1175 +147,2325 @@
 };
 
 /**
- * Main enum for defining camera metadata tags. New entries must always go
- * before the section _END tag to preserve existing enumeration values. In
- * addition, the name and type of the tag needs to be added to
- * system/media/camera/src/camera_metadata_tag_info.c
+ * Main enumeration for defining camera metadata tags added in this revision
+ *
+ * <p>Partial documentation is included for each tag; for complete documentation, reference
+ * '/system/media/camera/docs/docs.html' in the corresponding Android source tree.</p>
  */
 enum CameraMetadataTag : uint32_t {
+    /** android.colorCorrection.mode [dynamic, enum, public]
+     *
+     * <p>The mode control selects how the image data is converted from the
+     * sensor's native color into linear sRGB color.</p>
+     */
     ANDROID_COLOR_CORRECTION_MODE = CameraMetadataSectionStart:ANDROID_COLOR_CORRECTION_START,
 
+    /** android.colorCorrection.transform [dynamic, rational[], public]
+     *
+     * <p>A color transform matrix to use to transform
+     * from sensor RGB color space to output linear sRGB color space.</p>
+     */
     ANDROID_COLOR_CORRECTION_TRANSFORM,
 
+    /** android.colorCorrection.gains [dynamic, float[], public]
+     *
+     * <p>Gains applying to Bayer raw color channels for
+     * white-balance.</p>
+     */
     ANDROID_COLOR_CORRECTION_GAINS,
 
+    /** android.colorCorrection.aberrationMode [dynamic, enum, public]
+     *
+     * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
+     */
     ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
 
+    /** android.colorCorrection.availableAberrationModes [static, byte[], public]
+     *
+     * <p>List of aberration correction modes for ANDROID_COLOR_CORRECTION_ABERRATION_MODE that are
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_COLOR_CORRECTION_ABERRATION_MODE
+     */
     ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
 
     ANDROID_COLOR_CORRECTION_END,
 
+    /** android.control.aeAntibandingMode [dynamic, enum, public]
+     *
+     * <p>The desired setting for the camera device's auto-exposure
+     * algorithm's antibanding compensation.</p>
+     */
     ANDROID_CONTROL_AE_ANTIBANDING_MODE = CameraMetadataSectionStart:ANDROID_CONTROL_START,
 
+    /** android.control.aeExposureCompensation [dynamic, int32, public]
+     *
+     * <p>Adjustment to auto-exposure (AE) target image
+     * brightness.</p>
+     */
     ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
 
+    /** android.control.aeLock [dynamic, enum, public]
+     *
+     * <p>Whether auto-exposure (AE) is currently locked to its latest
+     * calculated values.</p>
+     */
     ANDROID_CONTROL_AE_LOCK,
 
+    /** android.control.aeMode [dynamic, enum, public]
+     *
+     * <p>The desired mode for the camera device's
+     * auto-exposure routine.</p>
+     */
     ANDROID_CONTROL_AE_MODE,
 
+    /** android.control.aeRegions [dynamic, int32[], public]
+     *
+     * <p>List of metering areas to use for auto-exposure adjustment.</p>
+     */
     ANDROID_CONTROL_AE_REGIONS,
 
+    /** android.control.aeTargetFpsRange [dynamic, int32[], public]
+     *
+     * <p>Range over which the auto-exposure routine can
+     * adjust the capture frame rate to maintain good
+     * exposure.</p>
+     */
     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
 
+    /** android.control.aePrecaptureTrigger [dynamic, enum, public]
+     *
+     * <p>Whether the camera device will trigger a precapture
+     * metering sequence when it processes this request.</p>
+     */
     ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
 
+    /** android.control.afMode [dynamic, enum, public]
+     *
+     * <p>Whether auto-focus (AF) is currently enabled, and what
+     * mode it is set to.</p>
+     */
     ANDROID_CONTROL_AF_MODE,
 
+    /** android.control.afRegions [dynamic, int32[], public]
+     *
+     * <p>List of metering areas to use for auto-focus.</p>
+     */
     ANDROID_CONTROL_AF_REGIONS,
 
+    /** android.control.afTrigger [dynamic, enum, public]
+     *
+     * <p>Whether the camera device will trigger autofocus for this request.</p>
+     */
     ANDROID_CONTROL_AF_TRIGGER,
 
+    /** android.control.awbLock [dynamic, enum, public]
+     *
+     * <p>Whether auto-white balance (AWB) is currently locked to its
+     * latest calculated values.</p>
+     */
     ANDROID_CONTROL_AWB_LOCK,
 
+    /** android.control.awbMode [dynamic, enum, public]
+     *
+     * <p>Whether auto-white balance (AWB) is currently setting the color
+     * transform fields, and what its illumination target
+     * is.</p>
+     */
     ANDROID_CONTROL_AWB_MODE,
 
+    /** android.control.awbRegions [dynamic, int32[], public]
+     *
+     * <p>List of metering areas to use for auto-white-balance illuminant
+     * estimation.</p>
+     */
     ANDROID_CONTROL_AWB_REGIONS,
 
+    /** android.control.captureIntent [dynamic, enum, public]
+     *
+     * <p>Information to the camera device 3A (auto-exposure,
+     * auto-focus, auto-white balance) routines about the purpose
+     * of this capture, to help the camera device to decide optimal 3A
+     * strategy.</p>
+     */
     ANDROID_CONTROL_CAPTURE_INTENT,
 
+    /** android.control.effectMode [dynamic, enum, public]
+     *
+     * <p>A special color effect to apply.</p>
+     */
     ANDROID_CONTROL_EFFECT_MODE,
 
+    /** android.control.mode [dynamic, enum, public]
+     *
+     * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
+     * routines.</p>
+     */
     ANDROID_CONTROL_MODE,
 
+    /** android.control.sceneMode [dynamic, enum, public]
+     *
+     * <p>Control for which scene mode is currently active.</p>
+     */
     ANDROID_CONTROL_SCENE_MODE,
 
+    /** android.control.videoStabilizationMode [dynamic, enum, public]
+     *
+     * <p>Whether video stabilization is
+     * active.</p>
+     */
     ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
 
+    /** android.control.aeAvailableAntibandingModes [static, byte[], public]
+     *
+     * <p>List of auto-exposure antibanding modes for ANDROID_CONTROL_AE_ANTIBANDING_MODE that are
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_CONTROL_AE_ANTIBANDING_MODE
+     */
     ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
 
+    /** android.control.aeAvailableModes [static, byte[], public]
+     *
+     * <p>List of auto-exposure modes for ANDROID_CONTROL_AE_MODE that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_CONTROL_AE_MODE
+     */
     ANDROID_CONTROL_AE_AVAILABLE_MODES,
 
+    /** android.control.aeAvailableTargetFpsRanges [static, int32[], public]
+     *
+     * <p>List of frame rate ranges for ANDROID_CONTROL_AE_TARGET_FPS_RANGE supported by
+     * this camera device.</p>
+     *
+     * @see ANDROID_CONTROL_AE_TARGET_FPS_RANGE
+     */
     ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
 
+    /** android.control.aeCompensationRange [static, int32[], public]
+     *
+     * <p>Maximum and minimum exposure compensation values for
+     * ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, in counts of ANDROID_CONTROL_AE_COMPENSATION_STEP,
+     * that are supported by this camera device.</p>
+     *
+     * @see ANDROID_CONTROL_AE_COMPENSATION_STEP
+     * @see ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION
+     */
     ANDROID_CONTROL_AE_COMPENSATION_RANGE,
 
+    /** android.control.aeCompensationStep [static, rational, public]
+     *
+     * <p>Smallest step by which the exposure compensation
+     * can be changed.</p>
+     */
     ANDROID_CONTROL_AE_COMPENSATION_STEP,
 
+    /** android.control.afAvailableModes [static, byte[], public]
+     *
+     * <p>List of auto-focus (AF) modes for ANDROID_CONTROL_AF_MODE that are
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_CONTROL_AF_MODE
+     */
     ANDROID_CONTROL_AF_AVAILABLE_MODES,
 
+    /** android.control.availableEffects [static, byte[], public]
+     *
+     * <p>List of color effects for ANDROID_CONTROL_EFFECT_MODE that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_CONTROL_EFFECT_MODE
+     */
     ANDROID_CONTROL_AVAILABLE_EFFECTS,
 
+    /** android.control.availableSceneModes [static, byte[], public]
+     *
+     * <p>List of scene modes for ANDROID_CONTROL_SCENE_MODE that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_CONTROL_SCENE_MODE
+     */
     ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
 
+    /** android.control.availableVideoStabilizationModes [static, byte[], public]
+     *
+     * <p>List of video stabilization modes for ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
+     * that are supported by this camera device.</p>
+     *
+     * @see ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
+     */
     ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
 
+    /** android.control.awbAvailableModes [static, byte[], public]
+     *
+     * <p>List of auto-white-balance modes for ANDROID_CONTROL_AWB_MODE that are supported by this
+     * camera device.</p>
+     *
+     * @see ANDROID_CONTROL_AWB_MODE
+     */
     ANDROID_CONTROL_AWB_AVAILABLE_MODES,
 
+    /** android.control.maxRegions [static, int32[], ndk_public]
+     *
+     * <p>List of the maximum number of regions that can be used for metering in
+     * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
+     * this corresponds to the the maximum number of elements in
+     * ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AWB_REGIONS,
+     * and ANDROID_CONTROL_AF_REGIONS.</p>
+     *
+     * @see ANDROID_CONTROL_AE_REGIONS
+     * @see ANDROID_CONTROL_AF_REGIONS
+     * @see ANDROID_CONTROL_AWB_REGIONS
+     */
     ANDROID_CONTROL_MAX_REGIONS,
 
+    /** android.control.sceneModeOverrides [static, byte[], system]
+     *
+     * <p>Ordered list of auto-exposure, auto-white balance, and auto-focus
+     * settings to use with each available scene mode.</p>
+     */
     ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
 
+    /** android.control.aePrecaptureId [dynamic, int32, system]
+     *
+     * <p>The ID sent with the latest
+     * CAMERA2_TRIGGER_PRECAPTURE_METERING call</p>
+     */
     ANDROID_CONTROL_AE_PRECAPTURE_ID,
 
+    /** android.control.aeState [dynamic, enum, public]
+     *
+     * <p>Current state of the auto-exposure (AE) algorithm.</p>
+     */
     ANDROID_CONTROL_AE_STATE,
 
+    /** android.control.afState [dynamic, enum, public]
+     *
+     * <p>Current state of auto-focus (AF) algorithm.</p>
+     */
     ANDROID_CONTROL_AF_STATE,
 
+    /** android.control.afTriggerId [dynamic, int32, system]
+     *
+     * <p>The ID sent with the latest
+     * CAMERA2_TRIGGER_AUTOFOCUS call</p>
+     */
     ANDROID_CONTROL_AF_TRIGGER_ID,
 
+    /** android.control.awbState [dynamic, enum, public]
+     *
+     * <p>Current state of auto-white balance (AWB) algorithm.</p>
+     */
     ANDROID_CONTROL_AWB_STATE,
 
+    /** android.control.availableHighSpeedVideoConfigurations [static, int32[], hidden]
+     *
+     * <p>List of available high speed video size, fps range and max batch size configurations
+     * supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).</p>
+     */
     ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
 
+    /** android.control.aeLockAvailable [static, enum, public]
+     *
+     * <p>Whether the camera device supports ANDROID_CONTROL_AE_LOCK</p>
+     *
+     * @see ANDROID_CONTROL_AE_LOCK
+     */
     ANDROID_CONTROL_AE_LOCK_AVAILABLE,
 
+    /** android.control.awbLockAvailable [static, enum, public]
+     *
+     * <p>Whether the camera device supports ANDROID_CONTROL_AWB_LOCK</p>
+     *
+     * @see ANDROID_CONTROL_AWB_LOCK
+     */
     ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
 
+    /** android.control.availableModes [static, byte[], public]
+     *
+     * <p>List of control modes for ANDROID_CONTROL_MODE that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_CONTROL_MODE
+     */
     ANDROID_CONTROL_AVAILABLE_MODES,
 
+    /** android.control.postRawSensitivityBoostRange [static, int32[], public]
+     *
+     * <p>Range of boosts for ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST supported
+     * by this camera device.</p>
+     *
+     * @see ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST
+     */
     ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
 
+    /** android.control.postRawSensitivityBoost [dynamic, int32, public]
+     *
+     * <p>The amount of additional sensitivity boost applied to output images
+     * after RAW sensor data is captured.</p>
+     */
     ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
 
+    /** android.control.enableZsl [dynamic, enum, public]
+     *
+     * <p>Allow camera device to enable zero-shutter-lag mode for requests with
+     * ANDROID_CONTROL_CAPTURE_INTENT == STILL_CAPTURE.</p>
+     *
+     * @see ANDROID_CONTROL_CAPTURE_INTENT
+     */
     ANDROID_CONTROL_ENABLE_ZSL,
 
     ANDROID_CONTROL_END,
 
+    /** android.demosaic.mode [controls, enum, system]
+     *
+     * <p>Controls the quality of the demosaicing
+     * processing.</p>
+     */
     ANDROID_DEMOSAIC_MODE = CameraMetadataSectionStart:ANDROID_DEMOSAIC_START,
 
     ANDROID_DEMOSAIC_END,
 
+    /** android.edge.mode [dynamic, enum, public]
+     *
+     * <p>Operation mode for edge
+     * enhancement.</p>
+     */
     ANDROID_EDGE_MODE = CameraMetadataSectionStart:ANDROID_EDGE_START,
 
+    /** android.edge.strength [controls, byte, system]
+     *
+     * <p>Control the amount of edge enhancement
+     * applied to the images</p>
+     */
     ANDROID_EDGE_STRENGTH,
 
+    /** android.edge.availableEdgeModes [static, byte[], public]
+     *
+     * <p>List of edge enhancement modes for ANDROID_EDGE_MODE that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_EDGE_MODE
+     */
     ANDROID_EDGE_AVAILABLE_EDGE_MODES,
 
     ANDROID_EDGE_END,
 
+    /** android.flash.firingPower [dynamic, byte, system]
+     *
+     * <p>Power for flash firing/torch</p>
+     */
     ANDROID_FLASH_FIRING_POWER = CameraMetadataSectionStart:ANDROID_FLASH_START,
 
+    /** android.flash.firingTime [dynamic, int64, system]
+     *
+     * <p>Firing time of flash relative to start of
+     * exposure</p>
+     */
     ANDROID_FLASH_FIRING_TIME,
 
+    /** android.flash.mode [dynamic, enum, public]
+     *
+     * <p>The desired mode for for the camera device's flash control.</p>
+     */
     ANDROID_FLASH_MODE,
 
+    /** android.flash.colorTemperature [static, byte, system]
+     *
+     * <p>The x,y whitepoint of the
+     * flash</p>
+     */
     ANDROID_FLASH_COLOR_TEMPERATURE,
 
+    /** android.flash.maxEnergy [static, byte, system]
+     *
+     * <p>Max energy output of the flash for a full
+     * power single flash</p>
+     */
     ANDROID_FLASH_MAX_ENERGY,
 
+    /** android.flash.state [dynamic, enum, public]
+     *
+     * <p>Current state of the flash
+     * unit.</p>
+     */
     ANDROID_FLASH_STATE,
 
     ANDROID_FLASH_END,
 
+    /** android.flash.info.available [static, enum, public]
+     *
+     * <p>Whether this camera device has a
+     * flash unit.</p>
+     */
     ANDROID_FLASH_INFO_AVAILABLE = CameraMetadataSectionStart:ANDROID_FLASH_INFO_START,
 
+    /** android.flash.info.chargeDuration [static, int64, system]
+     *
+     * <p>Time taken before flash can fire
+     * again</p>
+     */
     ANDROID_FLASH_INFO_CHARGE_DURATION,
 
     ANDROID_FLASH_INFO_END,
 
+    /** android.hotPixel.mode [dynamic, enum, public]
+     *
+     * <p>Operational mode for hot pixel correction.</p>
+     */
     ANDROID_HOT_PIXEL_MODE = CameraMetadataSectionStart:ANDROID_HOT_PIXEL_START,
 
+    /** android.hotPixel.availableHotPixelModes [static, byte[], public]
+     *
+     * <p>List of hot pixel correction modes for ANDROID_HOT_PIXEL_MODE that are supported by this
+     * camera device.</p>
+     *
+     * @see ANDROID_HOT_PIXEL_MODE
+     */
     ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
 
     ANDROID_HOT_PIXEL_END,
 
+    /** android.jpeg.gpsCoordinates [dynamic, double[], ndk_public]
+     *
+     * <p>GPS coordinates to include in output JPEG
+     * EXIF.</p>
+     */
     ANDROID_JPEG_GPS_COORDINATES = CameraMetadataSectionStart:ANDROID_JPEG_START,
 
+    /** android.jpeg.gpsProcessingMethod [dynamic, byte, ndk_public]
+     *
+     * <p>32 characters describing GPS algorithm to
+     * include in EXIF.</p>
+     */
     ANDROID_JPEG_GPS_PROCESSING_METHOD,
 
+    /** android.jpeg.gpsTimestamp [dynamic, int64, ndk_public]
+     *
+     * <p>Time GPS fix was made to include in
+     * EXIF.</p>
+     */
     ANDROID_JPEG_GPS_TIMESTAMP,
 
+    /** android.jpeg.orientation [dynamic, int32, public]
+     *
+     * <p>The orientation for a JPEG image.</p>
+     */
     ANDROID_JPEG_ORIENTATION,
 
+    /** android.jpeg.quality [dynamic, byte, public]
+     *
+     * <p>Compression quality of the final JPEG
+     * image.</p>
+     */
     ANDROID_JPEG_QUALITY,
 
+    /** android.jpeg.thumbnailQuality [dynamic, byte, public]
+     *
+     * <p>Compression quality of JPEG
+     * thumbnail.</p>
+     */
     ANDROID_JPEG_THUMBNAIL_QUALITY,
 
+    /** android.jpeg.thumbnailSize [dynamic, int32[], public]
+     *
+     * <p>Resolution of embedded JPEG thumbnail.</p>
+     */
     ANDROID_JPEG_THUMBNAIL_SIZE,
 
+    /** android.jpeg.availableThumbnailSizes [static, int32[], public]
+     *
+     * <p>List of JPEG thumbnail sizes for ANDROID_JPEG_THUMBNAIL_SIZE supported by this
+     * camera device.</p>
+     *
+     * @see ANDROID_JPEG_THUMBNAIL_SIZE
+     */
     ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
 
+    /** android.jpeg.maxSize [static, int32, system]
+     *
+     * <p>Maximum size in bytes for the compressed
+     * JPEG buffer</p>
+     */
     ANDROID_JPEG_MAX_SIZE,
 
+    /** android.jpeg.size [dynamic, int32, system]
+     *
+     * <p>The size of the compressed JPEG image, in
+     * bytes</p>
+     */
     ANDROID_JPEG_SIZE,
 
     ANDROID_JPEG_END,
 
+    /** android.lens.aperture [dynamic, float, public]
+     *
+     * <p>The desired lens aperture size, as a ratio of lens focal length to the
+     * effective aperture diameter.</p>
+     */
     ANDROID_LENS_APERTURE = CameraMetadataSectionStart:ANDROID_LENS_START,
 
+    /** android.lens.filterDensity [dynamic, float, public]
+     *
+     * <p>The desired setting for the lens neutral density filter(s).</p>
+     */
     ANDROID_LENS_FILTER_DENSITY,
 
+    /** android.lens.focalLength [dynamic, float, public]
+     *
+     * <p>The desired lens focal length; used for optical zoom.</p>
+     */
     ANDROID_LENS_FOCAL_LENGTH,
 
+    /** android.lens.focusDistance [dynamic, float, public]
+     *
+     * <p>Desired distance to plane of sharpest focus,
+     * measured from frontmost surface of the lens.</p>
+     */
     ANDROID_LENS_FOCUS_DISTANCE,
 
+    /** android.lens.opticalStabilizationMode [dynamic, enum, public]
+     *
+     * <p>Sets whether the camera device uses optical image stabilization (OIS)
+     * when capturing images.</p>
+     */
     ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
 
+    /** android.lens.facing [static, enum, public]
+     *
+     * <p>Direction the camera faces relative to
+     * device screen.</p>
+     */
     ANDROID_LENS_FACING,
 
+    /** android.lens.poseRotation [dynamic, float[], public]
+     *
+     * <p>The orientation of the camera relative to the sensor
+     * coordinate system.</p>
+     */
     ANDROID_LENS_POSE_ROTATION,
 
+    /** android.lens.poseTranslation [dynamic, float[], public]
+     *
+     * <p>Position of the camera optical center.</p>
+     */
     ANDROID_LENS_POSE_TRANSLATION,
 
+    /** android.lens.focusRange [dynamic, float[], public]
+     *
+     * <p>The range of scene distances that are in
+     * sharp focus (depth of field).</p>
+     */
     ANDROID_LENS_FOCUS_RANGE,
 
+    /** android.lens.state [dynamic, enum, public]
+     *
+     * <p>Current lens status.</p>
+     */
     ANDROID_LENS_STATE,
 
+    /** android.lens.intrinsicCalibration [dynamic, float[], public]
+     *
+     * <p>The parameters for this camera device's intrinsic
+     * calibration.</p>
+     */
     ANDROID_LENS_INTRINSIC_CALIBRATION,
 
+    /** android.lens.radialDistortion [dynamic, float[], public]
+     *
+     * <p>The correction coefficients to correct for this camera device's
+     * radial and tangential lens distortion.</p>
+     */
     ANDROID_LENS_RADIAL_DISTORTION,
 
     ANDROID_LENS_END,
 
+    /** android.lens.info.availableApertures [static, float[], public]
+     *
+     * <p>List of aperture size values for ANDROID_LENS_APERTURE that are
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_LENS_APERTURE
+     */
     ANDROID_LENS_INFO_AVAILABLE_APERTURES = CameraMetadataSectionStart:ANDROID_LENS_INFO_START,
 
+    /** android.lens.info.availableFilterDensities [static, float[], public]
+     *
+     * <p>List of neutral density filter values for
+     * ANDROID_LENS_FILTER_DENSITY that are supported by this camera device.</p>
+     *
+     * @see ANDROID_LENS_FILTER_DENSITY
+     */
     ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
 
+    /** android.lens.info.availableFocalLengths [static, float[], public]
+     *
+     * <p>List of focal lengths for ANDROID_LENS_FOCAL_LENGTH that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_LENS_FOCAL_LENGTH
+     */
     ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
 
+    /** android.lens.info.availableOpticalStabilization [static, byte[], public]
+     *
+     * <p>List of optical image stabilization (OIS) modes for
+     * ANDROID_LENS_OPTICAL_STABILIZATION_MODE that are supported by this camera device.</p>
+     *
+     * @see ANDROID_LENS_OPTICAL_STABILIZATION_MODE
+     */
     ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
 
+    /** android.lens.info.hyperfocalDistance [static, float, public]
+     *
+     * <p>Hyperfocal distance for this lens.</p>
+     */
     ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
 
+    /** android.lens.info.minimumFocusDistance [static, float, public]
+     *
+     * <p>Shortest distance from frontmost surface
+     * of the lens that can be brought into sharp focus.</p>
+     */
     ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
 
+    /** android.lens.info.shadingMapSize [static, int32[], ndk_public]
+     *
+     * <p>Dimensions of lens shading map.</p>
+     */
     ANDROID_LENS_INFO_SHADING_MAP_SIZE,
 
+    /** android.lens.info.focusDistanceCalibration [static, enum, public]
+     *
+     * <p>The lens focus distance calibration quality.</p>
+     */
     ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
 
     ANDROID_LENS_INFO_END,
 
+    /** android.noiseReduction.mode [dynamic, enum, public]
+     *
+     * <p>Mode of operation for the noise reduction algorithm.</p>
+     */
     ANDROID_NOISE_REDUCTION_MODE = CameraMetadataSectionStart:ANDROID_NOISE_REDUCTION_START,
 
+    /** android.noiseReduction.strength [controls, byte, system]
+     *
+     * <p>Control the amount of noise reduction
+     * applied to the images</p>
+     */
     ANDROID_NOISE_REDUCTION_STRENGTH,
 
+    /** android.noiseReduction.availableNoiseReductionModes [static, byte[], public]
+     *
+     * <p>List of noise reduction modes for ANDROID_NOISE_REDUCTION_MODE that are supported
+     * by this camera device.</p>
+     *
+     * @see ANDROID_NOISE_REDUCTION_MODE
+     */
     ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
 
     ANDROID_NOISE_REDUCTION_END,
 
+    /** android.quirks.meteringCropRegion [static, byte, system]
+     *
+     * <p>If set to 1, the camera service does not
+     * scale 'normalized' coordinates with respect to the crop
+     * region. This applies to metering input (a{e,f,wb}Region
+     * and output (face rectangles).</p>
+     */
     ANDROID_QUIRKS_METERING_CROP_REGION = CameraMetadataSectionStart:ANDROID_QUIRKS_START,
 
+    /** android.quirks.triggerAfWithAuto [static, byte, system]
+     *
+     * <p>If set to 1, then the camera service always
+     * switches to FOCUS_MODE_AUTO before issuing a AF
+     * trigger.</p>
+     */
     ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO,
 
+    /** android.quirks.useZslFormat [static, byte, system]
+     *
+     * <p>If set to 1, the camera service uses
+     * CAMERA2_PIXEL_FORMAT_ZSL instead of
+     * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
+     * shutter lag stream</p>
+     */
     ANDROID_QUIRKS_USE_ZSL_FORMAT,
 
+    /** android.quirks.usePartialResult [static, byte, hidden]
+     *
+     * <p>If set to 1, the HAL will always split result
+     * metadata for a single capture into multiple buffers,
+     * returned using multiple process_capture_result calls.</p>
+     */
     ANDROID_QUIRKS_USE_PARTIAL_RESULT,
 
+    /** android.quirks.partialResult [dynamic, enum, hidden]
+     *
+     * <p>Whether a result given to the framework is the
+     * final one for the capture, or only a partial that contains a
+     * subset of the full set of dynamic metadata
+     * values.</p>
+     */
     ANDROID_QUIRKS_PARTIAL_RESULT,
 
     ANDROID_QUIRKS_END,
 
+    /** android.request.frameCount [dynamic, int32, hidden]
+     *
+     * <p>A frame counter set by the framework. This value monotonically
+     * increases with every new result (that is, each new result has a unique
+     * frameCount value).</p>
+     */
     ANDROID_REQUEST_FRAME_COUNT = CameraMetadataSectionStart:ANDROID_REQUEST_START,
 
+    /** android.request.id [dynamic, int32, hidden]
+     *
+     * <p>An application-specified ID for the current
+     * request. Must be maintained unchanged in output
+     * frame</p>
+     */
     ANDROID_REQUEST_ID,
 
+    /** android.request.inputStreams [controls, int32[], system]
+     *
+     * <p>List which camera reprocess stream is used
+     * for the source of reprocessing data.</p>
+     */
     ANDROID_REQUEST_INPUT_STREAMS,
 
+    /** android.request.metadataMode [dynamic, enum, system]
+     *
+     * <p>How much metadata to produce on
+     * output</p>
+     */
     ANDROID_REQUEST_METADATA_MODE,
 
+    /** android.request.outputStreams [dynamic, int32[], system]
+     *
+     * <p>Lists which camera output streams image data
+     * from this capture must be sent to</p>
+     */
     ANDROID_REQUEST_OUTPUT_STREAMS,
 
+    /** android.request.type [controls, enum, system]
+     *
+     * <p>The type of the request; either CAPTURE or
+     * REPROCESS. For legacy HAL3, this tag is redundant.</p>
+     */
     ANDROID_REQUEST_TYPE,
 
+    /** android.request.maxNumOutputStreams [static, int32[], ndk_public]
+     *
+     * <p>The maximum numbers of different types of output streams
+     * that can be configured and used simultaneously by a camera device.</p>
+     */
     ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
 
+    /** android.request.maxNumReprocessStreams [static, int32[], system]
+     *
+     * <p>How many reprocessing streams of any type
+     * can be allocated at the same time.</p>
+     */
     ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS,
 
+    /** android.request.maxNumInputStreams [static, int32, java_public]
+     *
+     * <p>The maximum numbers of any type of input streams
+     * that can be configured and used simultaneously by a camera device.</p>
+     */
     ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
 
+    /** android.request.pipelineDepth [dynamic, byte, public]
+     *
+     * <p>Specifies the number of pipeline stages the frame went
+     * through from when it was exposed to when the final completed result
+     * was available to the framework.</p>
+     */
     ANDROID_REQUEST_PIPELINE_DEPTH,
 
+    /** android.request.pipelineMaxDepth [static, byte, public]
+     *
+     * <p>Specifies the number of maximum pipeline stages a frame
+     * has to go through from when it's exposed to when it's available
+     * to the framework.</p>
+     */
     ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
 
+    /** android.request.partialResultCount [static, int32, public]
+     *
+     * <p>Defines how many sub-components
+     * a result will be composed of.</p>
+     */
     ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
 
+    /** android.request.availableCapabilities [static, enum[], public]
+     *
+     * <p>List of capabilities that this camera device
+     * advertises as fully supporting.</p>
+     */
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
 
+    /** android.request.availableRequestKeys [static, int32[], ndk_public]
+     *
+     * <p>A list of all keys that the camera device has available
+     * to use with {@link ACaptureRequest }.</p>
+     */
     ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
 
+    /** android.request.availableResultKeys [static, int32[], ndk_public]
+     *
+     * <p>A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.</p>
+     */
     ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
 
+    /** android.request.availableCharacteristicsKeys [static, int32[], ndk_public]
+     *
+     * <p>A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.</p>
+     */
     ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
 
     ANDROID_REQUEST_END,
 
+    /** android.scaler.cropRegion [dynamic, int32[], public]
+     *
+     * <p>The desired region of the sensor to read out for this capture.</p>
+     */
     ANDROID_SCALER_CROP_REGION = CameraMetadataSectionStart:ANDROID_SCALER_START,
 
+    /** android.scaler.availableFormats [static, enum[], hidden]
+     *
+     * <p>The list of image formats that are supported by this
+     * camera device for output streams.</p>
+     */
     ANDROID_SCALER_AVAILABLE_FORMATS,
 
+    /** android.scaler.availableJpegMinDurations [static, int64[], hidden]
+     *
+     * <p>The minimum frame duration that is supported
+     * for each resolution in ANDROID_SCALER_AVAILABLE_JPEG_SIZES.</p>
+     *
+     * @see ANDROID_SCALER_AVAILABLE_JPEG_SIZES
+     */
     ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
 
+    /** android.scaler.availableJpegSizes [static, int32[], hidden]
+     *
+     * <p>The JPEG resolutions that are supported by this camera device.</p>
+     */
     ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
 
+    /** android.scaler.availableMaxDigitalZoom [static, float, public]
+     *
+     * <p>The maximum ratio between both active area width
+     * and crop region width, and active area height and
+     * crop region height, for ANDROID_SCALER_CROP_REGION.</p>
+     *
+     * @see ANDROID_SCALER_CROP_REGION
+     */
     ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
 
+    /** android.scaler.availableProcessedMinDurations [static, int64[], hidden]
+     *
+     * <p>For each available processed output size (defined in
+     * ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES), this property lists the
+     * minimum supportable frame duration for that size.</p>
+     *
+     * @see ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES
+     */
     ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
 
+    /** android.scaler.availableProcessedSizes [static, int32[], hidden]
+     *
+     * <p>The resolutions available for use with
+     * processed output streams, such as YV12, NV12, and
+     * platform opaque YUV/RGB streams to the GPU or video
+     * encoders.</p>
+     */
     ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
 
+    /** android.scaler.availableRawMinDurations [static, int64[], system]
+     *
+     * <p>For each available raw output size (defined in
+     * ANDROID_SCALER_AVAILABLE_RAW_SIZES), this property lists the minimum
+     * supportable frame duration for that size.</p>
+     *
+     * @see ANDROID_SCALER_AVAILABLE_RAW_SIZES
+     */
     ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
 
+    /** android.scaler.availableRawSizes [static, int32[], system]
+     *
+     * <p>The resolutions available for use with raw
+     * sensor output streams, listed as width,
+     * height</p>
+     */
     ANDROID_SCALER_AVAILABLE_RAW_SIZES,
 
+    /** android.scaler.availableInputOutputFormatsMap [static, int32, hidden]
+     *
+     * <p>The mapping of image formats that are supported by this
+     * camera device for input streams, to their corresponding output formats.</p>
+     */
     ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
 
+    /** android.scaler.availableStreamConfigurations [static, enum[], ndk_public]
+     *
+     * <p>The available stream configurations that this
+     * camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     */
     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
 
+    /** android.scaler.availableMinFrameDurations [static, int64[], ndk_public]
+     *
+     * <p>This lists the minimum frame duration for each
+     * format/size combination.</p>
+     */
     ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
 
+    /** android.scaler.availableStallDurations [static, int64[], ndk_public]
+     *
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination.</p>
+     */
     ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
 
+    /** android.scaler.croppingType [static, enum, public]
+     *
+     * <p>The crop type that this camera device supports.</p>
+     */
     ANDROID_SCALER_CROPPING_TYPE,
 
     ANDROID_SCALER_END,
 
+    /** android.sensor.exposureTime [dynamic, int64, public]
+     *
+     * <p>Duration each pixel is exposed to
+     * light.</p>
+     */
     ANDROID_SENSOR_EXPOSURE_TIME = CameraMetadataSectionStart:ANDROID_SENSOR_START,
 
+    /** android.sensor.frameDuration [dynamic, int64, public]
+     *
+     * <p>Duration from start of frame exposure to
+     * start of next frame exposure.</p>
+     */
     ANDROID_SENSOR_FRAME_DURATION,
 
+    /** android.sensor.sensitivity [dynamic, int32, public]
+     *
+     * <p>The amount of gain applied to sensor data
+     * before processing.</p>
+     */
     ANDROID_SENSOR_SENSITIVITY,
 
+    /** android.sensor.referenceIlluminant1 [static, enum, public]
+     *
+     * <p>The standard reference illuminant used as the scene light source when
+     * calculating the ANDROID_SENSOR_COLOR_TRANSFORM1,
+     * ANDROID_SENSOR_CALIBRATION_TRANSFORM1, and
+     * ANDROID_SENSOR_FORWARD_MATRIX1 matrices.</p>
+     *
+     * @see ANDROID_SENSOR_CALIBRATION_TRANSFORM1
+     * @see ANDROID_SENSOR_COLOR_TRANSFORM1
+     * @see ANDROID_SENSOR_FORWARD_MATRIX1
+     */
     ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
 
+    /** android.sensor.referenceIlluminant2 [static, byte, public]
+     *
+     * <p>The standard reference illuminant used as the scene light source when
+     * calculating the ANDROID_SENSOR_COLOR_TRANSFORM2,
+     * ANDROID_SENSOR_CALIBRATION_TRANSFORM2, and
+     * ANDROID_SENSOR_FORWARD_MATRIX2 matrices.</p>
+     *
+     * @see ANDROID_SENSOR_CALIBRATION_TRANSFORM2
+     * @see ANDROID_SENSOR_COLOR_TRANSFORM2
+     * @see ANDROID_SENSOR_FORWARD_MATRIX2
+     */
     ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
 
+    /** android.sensor.calibrationTransform1 [static, rational[], public]
+     *
+     * <p>A per-device calibration transform matrix that maps from the
+     * reference sensor colorspace to the actual device sensor colorspace.</p>
+     */
     ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
 
+    /** android.sensor.calibrationTransform2 [static, rational[], public]
+     *
+     * <p>A per-device calibration transform matrix that maps from the
+     * reference sensor colorspace to the actual device sensor colorspace
+     * (this is the colorspace of the raw buffer data).</p>
+     */
     ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
 
+    /** android.sensor.colorTransform1 [static, rational[], public]
+     *
+     * <p>A matrix that transforms color values from CIE XYZ color space to
+     * reference sensor color space.</p>
+     */
     ANDROID_SENSOR_COLOR_TRANSFORM1,
 
+    /** android.sensor.colorTransform2 [static, rational[], public]
+     *
+     * <p>A matrix that transforms color values from CIE XYZ color space to
+     * reference sensor color space.</p>
+     */
     ANDROID_SENSOR_COLOR_TRANSFORM2,
 
+    /** android.sensor.forwardMatrix1 [static, rational[], public]
+     *
+     * <p>A matrix that transforms white balanced camera colors from the reference
+     * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
+     */
     ANDROID_SENSOR_FORWARD_MATRIX1,
 
+    /** android.sensor.forwardMatrix2 [static, rational[], public]
+     *
+     * <p>A matrix that transforms white balanced camera colors from the reference
+     * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
+     */
     ANDROID_SENSOR_FORWARD_MATRIX2,
 
+    /** android.sensor.baseGainFactor [static, rational, system]
+     *
+     * <p>Gain factor from electrons to raw units when
+     * ISO=100</p>
+     */
     ANDROID_SENSOR_BASE_GAIN_FACTOR,
 
+    /** android.sensor.blackLevelPattern [static, int32[], public]
+     *
+     * <p>A fixed black level offset for each of the color filter arrangement
+     * (CFA) mosaic channels.</p>
+     */
     ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
 
+    /** android.sensor.maxAnalogSensitivity [static, int32, public]
+     *
+     * <p>Maximum sensitivity that is implemented
+     * purely through analog gain.</p>
+     */
     ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
 
+    /** android.sensor.orientation [static, int32, public]
+     *
+     * <p>Clockwise angle through which the output image needs to be rotated to be
+     * upright on the device screen in its native orientation.</p>
+     */
     ANDROID_SENSOR_ORIENTATION,
 
+    /** android.sensor.profileHueSatMapDimensions [static, int32[], system]
+     *
+     * <p>The number of input samples for each dimension of
+     * ANDROID_SENSOR_PROFILE_HUE_SAT_MAP.</p>
+     *
+     * @see ANDROID_SENSOR_PROFILE_HUE_SAT_MAP
+     */
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,
 
+    /** android.sensor.timestamp [dynamic, int64, public]
+     *
+     * <p>Time at start of exposure of first
+     * row of the image sensor active array, in nanoseconds.</p>
+     */
     ANDROID_SENSOR_TIMESTAMP,
 
+    /** android.sensor.temperature [dynamic, float, system]
+     *
+     * <p>The temperature of the sensor, sampled at the time
+     * exposure began for this frame.</p>
+     * <p>The thermal diode being queried should be inside the sensor PCB, or
+     * somewhere close to it.</p>
+     */
     ANDROID_SENSOR_TEMPERATURE,
 
+    /** android.sensor.neutralColorPoint [dynamic, rational[], public]
+     *
+     * <p>The estimated camera neutral color in the native sensor colorspace at
+     * the time of capture.</p>
+     */
     ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
 
+    /** android.sensor.noiseProfile [dynamic, double[], public]
+     *
+     * <p>Noise model coefficients for each CFA mosaic channel.</p>
+     */
     ANDROID_SENSOR_NOISE_PROFILE,
 
+    /** android.sensor.profileHueSatMap [dynamic, float[], system]
+     *
+     * <p>A mapping containing a hue shift, saturation scale, and value scale
+     * for each pixel.</p>
+     */
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,
 
+    /** android.sensor.profileToneCurve [dynamic, float[], system]
+     *
+     * <p>A list of x,y samples defining a tone-mapping curve for gamma adjustment.</p>
+     */
     ANDROID_SENSOR_PROFILE_TONE_CURVE,
 
+    /** android.sensor.greenSplit [dynamic, float, public]
+     *
+     * <p>The worst-case divergence between Bayer green channels.</p>
+     */
     ANDROID_SENSOR_GREEN_SPLIT,
 
+    /** android.sensor.testPatternData [dynamic, int32[], public]
+     *
+     * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
+     * when ANDROID_SENSOR_TEST_PATTERN_MODE is SOLID_COLOR.</p>
+     *
+     * @see ANDROID_SENSOR_TEST_PATTERN_MODE
+     */
     ANDROID_SENSOR_TEST_PATTERN_DATA,
 
+    /** android.sensor.testPatternMode [dynamic, enum, public]
+     *
+     * <p>When enabled, the sensor sends a test pattern instead of
+     * doing a real exposure from the camera.</p>
+     */
     ANDROID_SENSOR_TEST_PATTERN_MODE,
 
+    /** android.sensor.availableTestPatternModes [static, int32[], public]
+     *
+     * <p>List of sensor test pattern modes for ANDROID_SENSOR_TEST_PATTERN_MODE
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_SENSOR_TEST_PATTERN_MODE
+     */
     ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
 
+    /** android.sensor.rollingShutterSkew [dynamic, int64, public]
+     *
+     * <p>Duration between the start of first row exposure
+     * and the start of last row exposure.</p>
+     */
     ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
 
+    /** android.sensor.opticalBlackRegions [static, int32[], public]
+     *
+     * <p>List of disjoint rectangles indicating the sensor
+     * optically shielded black pixel regions.</p>
+     */
     ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
 
+    /** android.sensor.dynamicBlackLevel [dynamic, float[], public]
+     *
+     * <p>A per-frame dynamic black level offset for each of the color filter
+     * arrangement (CFA) mosaic channels.</p>
+     */
     ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
 
+    /** android.sensor.dynamicWhiteLevel [dynamic, int32, public]
+     *
+     * <p>Maximum raw value output by sensor for this frame.</p>
+     */
     ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
 
+    /** android.sensor.opaqueRawSize [static, int32[], system]
+     *
+     * <p>Size in bytes for all the listed opaque RAW buffer sizes</p>
+     */
     ANDROID_SENSOR_OPAQUE_RAW_SIZE,
 
     ANDROID_SENSOR_END,
 
+    /** android.sensor.info.activeArraySize [static, int32[], public]
+     *
+     * <p>The area of the image sensor which corresponds to active pixels after any geometric
+     * distortion correction has been applied.</p>
+     */
     ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE = CameraMetadataSectionStart:ANDROID_SENSOR_INFO_START,
 
+    /** android.sensor.info.sensitivityRange [static, int32[], public]
+     *
+     * <p>Range of sensitivities for ANDROID_SENSOR_SENSITIVITY supported by this
+     * camera device.</p>
+     *
+     * @see ANDROID_SENSOR_SENSITIVITY
+     */
     ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
 
+    /** android.sensor.info.colorFilterArrangement [static, enum, public]
+     *
+     * <p>The arrangement of color filters on sensor;
+     * represents the colors in the top-left 2x2 section of
+     * the sensor, in reading order.</p>
+     */
     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
 
+    /** android.sensor.info.exposureTimeRange [static, int64[], public]
+     *
+     * <p>The range of image exposure times for ANDROID_SENSOR_EXPOSURE_TIME supported
+     * by this camera device.</p>
+     *
+     * @see ANDROID_SENSOR_EXPOSURE_TIME
+     */
     ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
 
+    /** android.sensor.info.maxFrameDuration [static, int64, public]
+     *
+     * <p>The maximum possible frame duration (minimum frame rate) for
+     * ANDROID_SENSOR_FRAME_DURATION that is supported this camera device.</p>
+     *
+     * @see ANDROID_SENSOR_FRAME_DURATION
+     */
     ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
 
+    /** android.sensor.info.physicalSize [static, float[], public]
+     *
+     * <p>The physical dimensions of the full pixel
+     * array.</p>
+     */
     ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
 
+    /** android.sensor.info.pixelArraySize [static, int32[], public]
+     *
+     * <p>Dimensions of the full pixel array, possibly
+     * including black calibration pixels.</p>
+     */
     ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
 
+    /** android.sensor.info.whiteLevel [static, int32, public]
+     *
+     * <p>Maximum raw value output by sensor.</p>
+     */
     ANDROID_SENSOR_INFO_WHITE_LEVEL,
 
+    /** android.sensor.info.timestampSource [static, enum, public]
+     *
+     * <p>The time base source for sensor capture start timestamps.</p>
+     */
     ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
 
+    /** android.sensor.info.lensShadingApplied [static, enum, public]
+     *
+     * <p>Whether the RAW images output from this camera device are subject to
+     * lens shading correction.</p>
+     */
     ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED,
 
+    /** android.sensor.info.preCorrectionActiveArraySize [static, int32[], public]
+     *
+     * <p>The area of the image sensor which corresponds to active pixels prior to the
+     * application of any geometric distortion correction.</p>
+     */
     ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
 
     ANDROID_SENSOR_INFO_END,
 
+    /** android.shading.mode [dynamic, enum, public]
+     *
+     * <p>Quality of lens shading correction applied
+     * to the image data.</p>
+     */
     ANDROID_SHADING_MODE = CameraMetadataSectionStart:ANDROID_SHADING_START,
 
+    /** android.shading.strength [controls, byte, system]
+     *
+     * <p>Control the amount of shading correction
+     * applied to the images</p>
+     */
     ANDROID_SHADING_STRENGTH,
 
+    /** android.shading.availableModes [static, byte[], public]
+     *
+     * <p>List of lens shading modes for ANDROID_SHADING_MODE that are supported by this camera device.</p>
+     *
+     * @see ANDROID_SHADING_MODE
+     */
     ANDROID_SHADING_AVAILABLE_MODES,
 
     ANDROID_SHADING_END,
 
+    /** android.statistics.faceDetectMode [dynamic, enum, public]
+     *
+     * <p>Operating mode for the face detector
+     * unit.</p>
+     */
     ANDROID_STATISTICS_FACE_DETECT_MODE = CameraMetadataSectionStart:ANDROID_STATISTICS_START,
 
+    /** android.statistics.histogramMode [dynamic, enum, system]
+     *
+     * <p>Operating mode for histogram
+     * generation</p>
+     */
     ANDROID_STATISTICS_HISTOGRAM_MODE,
 
+    /** android.statistics.sharpnessMapMode [dynamic, enum, system]
+     *
+     * <p>Operating mode for sharpness map
+     * generation</p>
+     */
     ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
 
+    /** android.statistics.hotPixelMapMode [dynamic, enum, public]
+     *
+     * <p>Operating mode for hot pixel map generation.</p>
+     */
     ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
 
+    /** android.statistics.faceIds [dynamic, int32[], ndk_public]
+     *
+     * <p>List of unique IDs for detected faces.</p>
+     */
     ANDROID_STATISTICS_FACE_IDS,
 
+    /** android.statistics.faceLandmarks [dynamic, int32[], ndk_public]
+     *
+     * <p>List of landmarks for detected
+     * faces.</p>
+     */
     ANDROID_STATISTICS_FACE_LANDMARKS,
 
+    /** android.statistics.faceRectangles [dynamic, int32[], ndk_public]
+     *
+     * <p>List of the bounding rectangles for detected
+     * faces.</p>
+     */
     ANDROID_STATISTICS_FACE_RECTANGLES,
 
+    /** android.statistics.faceScores [dynamic, byte[], ndk_public]
+     *
+     * <p>List of the face confidence scores for
+     * detected faces</p>
+     */
     ANDROID_STATISTICS_FACE_SCORES,
 
+    /** android.statistics.histogram [dynamic, int32[], system]
+     *
+     * <p>A 3-channel histogram based on the raw
+     * sensor data</p>
+     */
     ANDROID_STATISTICS_HISTOGRAM,
 
+    /** android.statistics.sharpnessMap [dynamic, int32[], system]
+     *
+     * <p>A 3-channel sharpness map, based on the raw
+     * sensor data</p>
+     */
     ANDROID_STATISTICS_SHARPNESS_MAP,
 
+    /** android.statistics.lensShadingCorrectionMap [dynamic, byte, java_public]
+     *
+     * <p>The shading map is a low-resolution floating-point map
+     * that lists the coefficients used to correct for vignetting, for each
+     * Bayer color channel.</p>
+     */
     ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP,
 
+    /** android.statistics.lensShadingMap [dynamic, float[], ndk_public]
+     *
+     * <p>The shading map is a low-resolution floating-point map
+     * that lists the coefficients used to correct for vignetting and color shading,
+     * for each Bayer color channel of RAW image data.</p>
+     */
     ANDROID_STATISTICS_LENS_SHADING_MAP,
 
+    /** android.statistics.predictedColorGains [dynamic, float[], hidden]
+     *
+     * <p>The best-fit color channel gains calculated
+     * by the camera device's statistics units for the current output frame.</p>
+     */
     ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
 
+    /** android.statistics.predictedColorTransform [dynamic, rational[], hidden]
+     *
+     * <p>The best-fit color transform matrix estimate
+     * calculated by the camera device's statistics units for the current
+     * output frame.</p>
+     */
     ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
 
+    /** android.statistics.sceneFlicker [dynamic, enum, public]
+     *
+     * <p>The camera device estimated scene illumination lighting
+     * frequency.</p>
+     */
     ANDROID_STATISTICS_SCENE_FLICKER,
 
+    /** android.statistics.hotPixelMap [dynamic, int32[], public]
+     *
+     * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
+     */
     ANDROID_STATISTICS_HOT_PIXEL_MAP,
 
+    /** android.statistics.lensShadingMapMode [dynamic, enum, public]
+     *
+     * <p>Whether the camera device will output the lens
+     * shading map in output result metadata.</p>
+     */
     ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
 
     ANDROID_STATISTICS_END,
 
-    ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
-            CameraMetadataSectionStart:ANDROID_STATISTICS_INFO_START,
+    /** android.statistics.info.availableFaceDetectModes [static, byte[], public]
+     *
+     * <p>List of face detection modes for ANDROID_STATISTICS_FACE_DETECT_MODE that are
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_STATISTICS_FACE_DETECT_MODE
+     */
+    ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = CameraMetadataSectionStart:ANDROID_STATISTICS_INFO_START,
 
+    /** android.statistics.info.histogramBucketCount [static, int32, system]
+     *
+     * <p>Number of histogram buckets
+     * supported</p>
+     */
     ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
 
+    /** android.statistics.info.maxFaceCount [static, int32, public]
+     *
+     * <p>The maximum number of simultaneously detectable
+     * faces.</p>
+     */
     ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
 
+    /** android.statistics.info.maxHistogramCount [static, int32, system]
+     *
+     * <p>Maximum value possible for a histogram
+     * bucket</p>
+     */
     ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
 
+    /** android.statistics.info.maxSharpnessMapValue [static, int32, system]
+     *
+     * <p>Maximum value possible for a sharpness map
+     * region.</p>
+     */
     ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
 
+    /** android.statistics.info.sharpnessMapSize [static, int32[], system]
+     *
+     * <p>Dimensions of the sharpness
+     * map</p>
+     */
     ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
 
+    /** android.statistics.info.availableHotPixelMapModes [static, byte[], public]
+     *
+     * <p>List of hot pixel map output modes for ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE that are
+     * supported by this camera device.</p>
+     *
+     * @see ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE
+     */
     ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
 
+    /** android.statistics.info.availableLensShadingMapModes [static, byte[], public]
+     *
+     * <p>List of lens shading map output modes for ANDROID_STATISTICS_LENS_SHADING_MAP_MODE that
+     * are supported by this camera device.</p>
+     *
+     * @see ANDROID_STATISTICS_LENS_SHADING_MAP_MODE
+     */
     ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
 
     ANDROID_STATISTICS_INFO_END,
 
+    /** android.tonemap.curveBlue [dynamic, float[], ndk_public]
+     *
+     * <p>Tonemapping / contrast / gamma curve for the blue
+     * channel, to use when ANDROID_TONEMAP_MODE is
+     * CONTRAST_CURVE.</p>
+     *
+     * @see ANDROID_TONEMAP_MODE
+     */
     ANDROID_TONEMAP_CURVE_BLUE = CameraMetadataSectionStart:ANDROID_TONEMAP_START,
 
+    /** android.tonemap.curveGreen [dynamic, float[], ndk_public]
+     *
+     * <p>Tonemapping / contrast / gamma curve for the green
+     * channel, to use when ANDROID_TONEMAP_MODE is
+     * CONTRAST_CURVE.</p>
+     *
+     * @see ANDROID_TONEMAP_MODE
+     */
     ANDROID_TONEMAP_CURVE_GREEN,
 
+    /** android.tonemap.curveRed [dynamic, float[], ndk_public]
+     *
+     * <p>Tonemapping / contrast / gamma curve for the red
+     * channel, to use when ANDROID_TONEMAP_MODE is
+     * CONTRAST_CURVE.</p>
+     *
+     * @see ANDROID_TONEMAP_MODE
+     */
     ANDROID_TONEMAP_CURVE_RED,
 
+    /** android.tonemap.mode [dynamic, enum, public]
+     *
+     * <p>High-level global contrast/gamma/tonemapping control.</p>
+     */
     ANDROID_TONEMAP_MODE,
 
+    /** android.tonemap.maxCurvePoints [static, int32, public]
+     *
+     * <p>Maximum number of supported points in the
+     * tonemap curve that can be used for ANDROID_TONEMAP_CURVE.</p>
+     *
+     * @see ANDROID_TONEMAP_CURVE
+     */
     ANDROID_TONEMAP_MAX_CURVE_POINTS,
 
+    /** android.tonemap.availableToneMapModes [static, byte[], public]
+     *
+     * <p>List of tonemapping modes for ANDROID_TONEMAP_MODE that are supported by this camera
+     * device.</p>
+     *
+     * @see ANDROID_TONEMAP_MODE
+     */
     ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
 
+    /** android.tonemap.gamma [dynamic, float, public]
+     *
+     * <p>Tonemapping curve to use when ANDROID_TONEMAP_MODE is
+     * GAMMA_VALUE</p>
+     *
+     * @see ANDROID_TONEMAP_MODE
+     */
     ANDROID_TONEMAP_GAMMA,
 
+    /** android.tonemap.presetCurve [dynamic, enum, public]
+     *
+     * <p>Tonemapping curve to use when ANDROID_TONEMAP_MODE is
+     * PRESET_CURVE</p>
+     *
+     * @see ANDROID_TONEMAP_MODE
+     */
     ANDROID_TONEMAP_PRESET_CURVE,
 
     ANDROID_TONEMAP_END,
 
+    /** android.led.transmit [dynamic, enum, hidden]
+     *
+     * <p>This LED is nominally used to indicate to the user
+     * that the camera is powered on and may be streaming images back to the
+     * Application Processor. In certain rare circumstances, the OS may
+     * disable this when video is processed locally and not transmitted to
+     * any untrusted applications.</p>
+     * <p>In particular, the LED <em>must</em> always be on when the data could be
+     * transmitted off the device. The LED <em>should</em> always be on whenever
+     * data is stored locally on the device.</p>
+     * <p>The LED <em>may</em> be off if a trusted application is using the data that
+     * doesn't violate the above rules.</p>
+     */
     ANDROID_LED_TRANSMIT = CameraMetadataSectionStart:ANDROID_LED_START,
 
+    /** android.led.availableLeds [static, enum[], hidden]
+     *
+     * <p>A list of camera LEDs that are available on this system.</p>
+     */
     ANDROID_LED_AVAILABLE_LEDS,
 
     ANDROID_LED_END,
 
+    /** android.info.supportedHardwareLevel [static, enum, public]
+     *
+     * <p>Generally classifies the overall set of the camera device functionality.</p>
+     */
     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL = CameraMetadataSectionStart:ANDROID_INFO_START,
 
     ANDROID_INFO_END,
 
+    /** android.blackLevel.lock [dynamic, enum, public]
+     *
+     * <p>Whether black-level compensation is locked
+     * to its current values, or is free to vary.</p>
+     */
     ANDROID_BLACK_LEVEL_LOCK = CameraMetadataSectionStart:ANDROID_BLACK_LEVEL_START,
 
     ANDROID_BLACK_LEVEL_END,
 
+    /** android.sync.frameNumber [dynamic, enum, ndk_public]
+     *
+     * <p>The frame number corresponding to the last request
+     * with which the output result (metadata + buffers) has been fully
+     * synchronized.</p>
+     */
     ANDROID_SYNC_FRAME_NUMBER = CameraMetadataSectionStart:ANDROID_SYNC_START,
 
+    /** android.sync.maxLatency [static, enum, public]
+     *
+     * <p>The maximum number of frames that can occur after a request
+     * (different than the previous) has been submitted, and before the
+     * result's state becomes synchronized.</p>
+     */
     ANDROID_SYNC_MAX_LATENCY,
 
     ANDROID_SYNC_END,
 
+    /** android.reprocess.effectiveExposureFactor [dynamic, float, java_public]
+     *
+     * <p>The amount of exposure time increase factor applied to the original output
+     * frame by the application processing before sending for reprocessing.</p>
+     */
     ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR = CameraMetadataSectionStart:ANDROID_REPROCESS_START,
 
+    /** android.reprocess.maxCaptureStall [static, int32, java_public]
+     *
+     * <p>The maximal camera capture pipeline stall (in unit of frame count) introduced by a
+     * reprocess capture request.</p>
+     */
     ANDROID_REPROCESS_MAX_CAPTURE_STALL,
 
     ANDROID_REPROCESS_END,
 
+    /** android.depth.maxDepthSamples [static, int32, system]
+     *
+     * <p>Maximum number of points that a depth point cloud may contain.</p>
+     */
     ANDROID_DEPTH_MAX_DEPTH_SAMPLES = CameraMetadataSectionStart:ANDROID_DEPTH_START,
 
+    /** android.depth.availableDepthStreamConfigurations [static, enum[], ndk_public]
+     *
+     * <p>The available depth dataspace stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     */
     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
 
+    /** android.depth.availableDepthMinFrameDurations [static, int64[], ndk_public]
+     *
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for depth output formats.</p>
+     */
     ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
 
+    /** android.depth.availableDepthStallDurations [static, int64[], ndk_public]
+     *
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for depth streams.</p>
+     */
     ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
 
+    /** android.depth.depthIsExclusive [static, enum, public]
+     *
+     * <p>Indicates whether a capture request may target both a
+     * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
+     * YUV_420_888, JPEG, or RAW) simultaneously.</p>
+     */
     ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
 
     ANDROID_DEPTH_END,
 
 };
 
-/**
+/*
  * Enumeration definitions for the various entries that need them
  */
+
+/** android.colorCorrection.mode enumeration values
+ * @see ANDROID_COLOR_CORRECTION_MODE
+ */
 enum CameraMetadataEnumAndroidColorCorrectionMode : uint32_t {
     ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
-
     ANDROID_COLOR_CORRECTION_MODE_FAST,
-
     ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY,
-
 };
 
+/** android.colorCorrection.aberrationMode enumeration values
+ * @see ANDROID_COLOR_CORRECTION_ABERRATION_MODE
+ */
 enum CameraMetadataEnumAndroidColorCorrectionAberrationMode : uint32_t {
     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
-
     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
-
     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
-
 };
 
+/** android.control.aeAntibandingMode enumeration values
+ * @see ANDROID_CONTROL_AE_ANTIBANDING_MODE
+ */
 enum CameraMetadataEnumAndroidControlAeAntibandingMode : uint32_t {
     ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
-
     ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
-
     ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
-
     ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
-
 };
 
+/** android.control.aeLock enumeration values
+ * @see ANDROID_CONTROL_AE_LOCK
+ */
 enum CameraMetadataEnumAndroidControlAeLock : uint32_t {
     ANDROID_CONTROL_AE_LOCK_OFF,
-
     ANDROID_CONTROL_AE_LOCK_ON,
-
 };
 
+/** android.control.aeMode enumeration values
+ * @see ANDROID_CONTROL_AE_MODE
+ */
 enum CameraMetadataEnumAndroidControlAeMode : uint32_t {
     ANDROID_CONTROL_AE_MODE_OFF,
-
     ANDROID_CONTROL_AE_MODE_ON,
-
     ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,
-
     ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,
-
     ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE,
-
 };
 
+/** android.control.aePrecaptureTrigger enumeration values
+ * @see ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER
+ */
 enum CameraMetadataEnumAndroidControlAePrecaptureTrigger : uint32_t {
     ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE,
-
     ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START,
-
     ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL,
-
 };
 
+/** android.control.afMode enumeration values
+ * @see ANDROID_CONTROL_AF_MODE
+ */
 enum CameraMetadataEnumAndroidControlAfMode : uint32_t {
     ANDROID_CONTROL_AF_MODE_OFF,
-
     ANDROID_CONTROL_AF_MODE_AUTO,
-
     ANDROID_CONTROL_AF_MODE_MACRO,
-
     ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
-
     ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
-
     ANDROID_CONTROL_AF_MODE_EDOF,
-
 };
 
+/** android.control.afTrigger enumeration values
+ * @see ANDROID_CONTROL_AF_TRIGGER
+ */
 enum CameraMetadataEnumAndroidControlAfTrigger : uint32_t {
     ANDROID_CONTROL_AF_TRIGGER_IDLE,
-
     ANDROID_CONTROL_AF_TRIGGER_START,
-
     ANDROID_CONTROL_AF_TRIGGER_CANCEL,
-
 };
 
+/** android.control.awbLock enumeration values
+ * @see ANDROID_CONTROL_AWB_LOCK
+ */
 enum CameraMetadataEnumAndroidControlAwbLock : uint32_t {
     ANDROID_CONTROL_AWB_LOCK_OFF,
-
     ANDROID_CONTROL_AWB_LOCK_ON,
-
 };
 
+/** android.control.awbMode enumeration values
+ * @see ANDROID_CONTROL_AWB_MODE
+ */
 enum CameraMetadataEnumAndroidControlAwbMode : uint32_t {
     ANDROID_CONTROL_AWB_MODE_OFF,
-
     ANDROID_CONTROL_AWB_MODE_AUTO,
-
     ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
-
     ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
-
     ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,
-
     ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
-
     ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT,
-
     ANDROID_CONTROL_AWB_MODE_TWILIGHT,
-
     ANDROID_CONTROL_AWB_MODE_SHADE,
-
 };
 
+/** android.control.captureIntent enumeration values
+ * @see ANDROID_CONTROL_CAPTURE_INTENT
+ */
 enum CameraMetadataEnumAndroidControlCaptureIntent : uint32_t {
     ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM,
-
     ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW,
-
     ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE,
-
     ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD,
-
     ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT,
-
     ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG,
-
     ANDROID_CONTROL_CAPTURE_INTENT_MANUAL,
-
 };
 
+/** android.control.effectMode enumeration values
+ * @see ANDROID_CONTROL_EFFECT_MODE
+ */
 enum CameraMetadataEnumAndroidControlEffectMode : uint32_t {
     ANDROID_CONTROL_EFFECT_MODE_OFF,
-
     ANDROID_CONTROL_EFFECT_MODE_MONO,
-
     ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,
-
     ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,
-
     ANDROID_CONTROL_EFFECT_MODE_SEPIA,
-
     ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,
-
     ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD,
-
     ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD,
-
     ANDROID_CONTROL_EFFECT_MODE_AQUA,
-
 };
 
+/** android.control.mode enumeration values
+ * @see ANDROID_CONTROL_MODE
+ */
 enum CameraMetadataEnumAndroidControlMode : uint32_t {
     ANDROID_CONTROL_MODE_OFF,
-
     ANDROID_CONTROL_MODE_AUTO,
-
     ANDROID_CONTROL_MODE_USE_SCENE_MODE,
-
     ANDROID_CONTROL_MODE_OFF_KEEP_STATE,
-
 };
 
+/** android.control.sceneMode enumeration values
+ * @see ANDROID_CONTROL_SCENE_MODE
+ */
 enum CameraMetadataEnumAndroidControlSceneMode : uint32_t {
-    ANDROID_CONTROL_SCENE_MODE_DISABLED = 0,
-
+    ANDROID_CONTROL_SCENE_MODE_DISABLED                         = 0,
     ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,
-
     ANDROID_CONTROL_SCENE_MODE_ACTION,
-
     ANDROID_CONTROL_SCENE_MODE_PORTRAIT,
-
     ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,
-
     ANDROID_CONTROL_SCENE_MODE_NIGHT,
-
     ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT,
-
     ANDROID_CONTROL_SCENE_MODE_THEATRE,
-
     ANDROID_CONTROL_SCENE_MODE_BEACH,
-
     ANDROID_CONTROL_SCENE_MODE_SNOW,
-
     ANDROID_CONTROL_SCENE_MODE_SUNSET,
-
     ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,
-
     ANDROID_CONTROL_SCENE_MODE_FIREWORKS,
-
     ANDROID_CONTROL_SCENE_MODE_SPORTS,
-
     ANDROID_CONTROL_SCENE_MODE_PARTY,
-
     ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,
-
     ANDROID_CONTROL_SCENE_MODE_BARCODE,
-
     ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO,
-
     ANDROID_CONTROL_SCENE_MODE_HDR,
-
     ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT,
-
-    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START = 100,
-
-    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END = 127,
-
+    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START              = 100,
+    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END                = 127,
 };
 
+/** android.control.videoStabilizationMode enumeration values
+ * @see ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
+ */
 enum CameraMetadataEnumAndroidControlVideoStabilizationMode : uint32_t {
     ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
-
     ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON,
-
 };
 
+/** android.control.aeState enumeration values
+ * @see ANDROID_CONTROL_AE_STATE
+ */
 enum CameraMetadataEnumAndroidControlAeState : uint32_t {
     ANDROID_CONTROL_AE_STATE_INACTIVE,
-
     ANDROID_CONTROL_AE_STATE_SEARCHING,
-
     ANDROID_CONTROL_AE_STATE_CONVERGED,
-
     ANDROID_CONTROL_AE_STATE_LOCKED,
-
     ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED,
-
     ANDROID_CONTROL_AE_STATE_PRECAPTURE,
-
 };
 
+/** android.control.afState enumeration values
+ * @see ANDROID_CONTROL_AF_STATE
+ */
 enum CameraMetadataEnumAndroidControlAfState : uint32_t {
     ANDROID_CONTROL_AF_STATE_INACTIVE,
-
     ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN,
-
     ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED,
-
     ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN,
-
     ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED,
-
     ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED,
-
     ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED,
-
 };
 
+/** android.control.awbState enumeration values
+ * @see ANDROID_CONTROL_AWB_STATE
+ */
 enum CameraMetadataEnumAndroidControlAwbState : uint32_t {
     ANDROID_CONTROL_AWB_STATE_INACTIVE,
-
     ANDROID_CONTROL_AWB_STATE_SEARCHING,
-
     ANDROID_CONTROL_AWB_STATE_CONVERGED,
-
     ANDROID_CONTROL_AWB_STATE_LOCKED,
-
 };
 
+/** android.control.aeLockAvailable enumeration values
+ * @see ANDROID_CONTROL_AE_LOCK_AVAILABLE
+ */
 enum CameraMetadataEnumAndroidControlAeLockAvailable : uint32_t {
     ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE,
-
     ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE,
-
 };
 
+/** android.control.awbLockAvailable enumeration values
+ * @see ANDROID_CONTROL_AWB_LOCK_AVAILABLE
+ */
 enum CameraMetadataEnumAndroidControlAwbLockAvailable : uint32_t {
     ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE,
-
     ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE,
-
 };
 
+/** android.control.enableZsl enumeration values
+ * @see ANDROID_CONTROL_ENABLE_ZSL
+ */
 enum CameraMetadataEnumAndroidControlEnableZsl : uint32_t {
     ANDROID_CONTROL_ENABLE_ZSL_FALSE,
-
     ANDROID_CONTROL_ENABLE_ZSL_TRUE,
-
 };
 
+/** android.demosaic.mode enumeration values
+ * @see ANDROID_DEMOSAIC_MODE
+ */
 enum CameraMetadataEnumAndroidDemosaicMode : uint32_t {
     ANDROID_DEMOSAIC_MODE_FAST,
-
     ANDROID_DEMOSAIC_MODE_HIGH_QUALITY,
-
 };
 
+/** android.edge.mode enumeration values
+ * @see ANDROID_EDGE_MODE
+ */
 enum CameraMetadataEnumAndroidEdgeMode : uint32_t {
     ANDROID_EDGE_MODE_OFF,
-
     ANDROID_EDGE_MODE_FAST,
-
     ANDROID_EDGE_MODE_HIGH_QUALITY,
-
     ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG,
-
 };
 
+/** android.flash.mode enumeration values
+ * @see ANDROID_FLASH_MODE
+ */
 enum CameraMetadataEnumAndroidFlashMode : uint32_t {
     ANDROID_FLASH_MODE_OFF,
-
     ANDROID_FLASH_MODE_SINGLE,
-
     ANDROID_FLASH_MODE_TORCH,
-
 };
 
+/** android.flash.state enumeration values
+ * @see ANDROID_FLASH_STATE
+ */
 enum CameraMetadataEnumAndroidFlashState : uint32_t {
     ANDROID_FLASH_STATE_UNAVAILABLE,
-
     ANDROID_FLASH_STATE_CHARGING,
-
     ANDROID_FLASH_STATE_READY,
-
     ANDROID_FLASH_STATE_FIRED,
-
     ANDROID_FLASH_STATE_PARTIAL,
-
 };
 
+/** android.flash.info.available enumeration values
+ * @see ANDROID_FLASH_INFO_AVAILABLE
+ */
 enum CameraMetadataEnumAndroidFlashInfoAvailable : uint32_t {
     ANDROID_FLASH_INFO_AVAILABLE_FALSE,
-
     ANDROID_FLASH_INFO_AVAILABLE_TRUE,
-
 };
 
+/** android.hotPixel.mode enumeration values
+ * @see ANDROID_HOT_PIXEL_MODE
+ */
 enum CameraMetadataEnumAndroidHotPixelMode : uint32_t {
     ANDROID_HOT_PIXEL_MODE_OFF,
-
     ANDROID_HOT_PIXEL_MODE_FAST,
-
     ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY,
-
 };
 
+/** android.lens.opticalStabilizationMode enumeration values
+ * @see ANDROID_LENS_OPTICAL_STABILIZATION_MODE
+ */
 enum CameraMetadataEnumAndroidLensOpticalStabilizationMode : uint32_t {
     ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
-
     ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON,
-
 };
 
+/** android.lens.facing enumeration values
+ * @see ANDROID_LENS_FACING
+ */
 enum CameraMetadataEnumAndroidLensFacing : uint32_t {
     ANDROID_LENS_FACING_FRONT,
-
     ANDROID_LENS_FACING_BACK,
-
     ANDROID_LENS_FACING_EXTERNAL,
-
 };
 
+/** android.lens.state enumeration values
+ * @see ANDROID_LENS_STATE
+ */
 enum CameraMetadataEnumAndroidLensState : uint32_t {
     ANDROID_LENS_STATE_STATIONARY,
-
     ANDROID_LENS_STATE_MOVING,
-
 };
 
+/** android.lens.info.focusDistanceCalibration enumeration values
+ * @see ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
 enum CameraMetadataEnumAndroidLensInfoFocusDistanceCalibration : uint32_t {
     ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
-
     ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
-
     ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
-
 };
 
+/** android.noiseReduction.mode enumeration values
+ * @see ANDROID_NOISE_REDUCTION_MODE
+ */
 enum CameraMetadataEnumAndroidNoiseReductionMode : uint32_t {
     ANDROID_NOISE_REDUCTION_MODE_OFF,
-
     ANDROID_NOISE_REDUCTION_MODE_FAST,
-
     ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
-
     ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
-
     ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG,
-
 };
 
+/** android.quirks.partialResult enumeration values
+ * @see ANDROID_QUIRKS_PARTIAL_RESULT
+ */
 enum CameraMetadataEnumAndroidQuirksPartialResult : uint32_t {
     ANDROID_QUIRKS_PARTIAL_RESULT_FINAL,
-
     ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL,
-
 };
 
+/** android.request.metadataMode enumeration values
+ * @see ANDROID_REQUEST_METADATA_MODE
+ */
 enum CameraMetadataEnumAndroidRequestMetadataMode : uint32_t {
     ANDROID_REQUEST_METADATA_MODE_NONE,
-
     ANDROID_REQUEST_METADATA_MODE_FULL,
-
 };
 
+/** android.request.type enumeration values
+ * @see ANDROID_REQUEST_TYPE
+ */
 enum CameraMetadataEnumAndroidRequestType : uint32_t {
     ANDROID_REQUEST_TYPE_CAPTURE,
-
     ANDROID_REQUEST_TYPE_REPROCESS,
-
 };
 
+/** android.request.availableCapabilities enumeration values
+ * @see ANDROID_REQUEST_AVAILABLE_CAPABILITIES
+ */
 enum CameraMetadataEnumAndroidRequestAvailableCapabilities : uint32_t {
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT,
-
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO,
-
 };
 
+/** android.scaler.availableFormats enumeration values
+ * @see ANDROID_SCALER_AVAILABLE_FORMATS
+ */
 enum CameraMetadataEnumAndroidScalerAvailableFormats : uint32_t {
-    ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 = 0x20,
-
-    ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE = 0x24,
-
-    ANDROID_SCALER_AVAILABLE_FORMATS_YV12 = 0x32315659,
-
-    ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP = 0x11,
-
-    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED = 0x22,
-
-    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888 = 0x23,
-
-    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB = 0x21,
-
+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW16                      = 0x20,
+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE                 = 0x24,
+    ANDROID_SCALER_AVAILABLE_FORMATS_YV12                       = 0x32315659,
+    ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP               = 0x11,
+    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED     = 0x22,
+    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888              = 0x23,
+    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB                       = 0x21,
 };
 
+/** android.scaler.availableStreamConfigurations enumeration values
+ * @see ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ */
 enum CameraMetadataEnumAndroidScalerAvailableStreamConfigurations : uint32_t {
     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
-
     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
-
 };
 
+/** android.scaler.croppingType enumeration values
+ * @see ANDROID_SCALER_CROPPING_TYPE
+ */
 enum CameraMetadataEnumAndroidScalerCroppingType : uint32_t {
     ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY,
-
     ANDROID_SCALER_CROPPING_TYPE_FREEFORM,
-
 };
 
+/** android.sensor.referenceIlluminant1 enumeration values
+ * @see ANDROID_SENSOR_REFERENCE_ILLUMINANT1
+ */
 enum CameraMetadataEnumAndroidSensorReferenceIlluminant1 : uint32_t {
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13,
-
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT               = 1,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT            = 2,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN               = 3,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH                  = 4,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER           = 9,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER         = 10,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE                  = 11,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT   = 12,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT  = 13,
     ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23,
-
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24,
-
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT      = 15,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A             = 17,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B             = 18,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C             = 19,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55                    = 20,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65                    = 21,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75                    = 22,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50                    = 23,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN    = 24,
 };
 
+/** android.sensor.testPatternMode enumeration values
+ * @see ANDROID_SENSOR_TEST_PATTERN_MODE
+ */
 enum CameraMetadataEnumAndroidSensorTestPatternMode : uint32_t {
     ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
-
     ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,
-
     ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,
-
     ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY,
-
     ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,
-
-    ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256,
-
+    ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1                    = 256,
 };
 
+/** android.sensor.info.colorFilterArrangement enumeration values
+ * @see ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
 enum CameraMetadataEnumAndroidSensorInfoColorFilterArrangement : uint32_t {
     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB,
-
     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG,
-
     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG,
-
     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR,
-
     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB,
-
 };
 
+/** android.sensor.info.timestampSource enumeration values
+ * @see ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE
+ */
 enum CameraMetadataEnumAndroidSensorInfoTimestampSource : uint32_t {
     ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN,
-
     ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME,
-
 };
 
+/** android.sensor.info.lensShadingApplied enumeration values
+ * @see ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED
+ */
 enum CameraMetadataEnumAndroidSensorInfoLensShadingApplied : uint32_t {
     ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE,
-
     ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE,
-
 };
 
+/** android.shading.mode enumeration values
+ * @see ANDROID_SHADING_MODE
+ */
 enum CameraMetadataEnumAndroidShadingMode : uint32_t {
     ANDROID_SHADING_MODE_OFF,
-
     ANDROID_SHADING_MODE_FAST,
-
     ANDROID_SHADING_MODE_HIGH_QUALITY,
-
 };
 
+/** android.statistics.faceDetectMode enumeration values
+ * @see ANDROID_STATISTICS_FACE_DETECT_MODE
+ */
 enum CameraMetadataEnumAndroidStatisticsFaceDetectMode : uint32_t {
     ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
-
     ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
-
     ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,
-
 };
 
+/** android.statistics.histogramMode enumeration values
+ * @see ANDROID_STATISTICS_HISTOGRAM_MODE
+ */
 enum CameraMetadataEnumAndroidStatisticsHistogramMode : uint32_t {
     ANDROID_STATISTICS_HISTOGRAM_MODE_OFF,
-
     ANDROID_STATISTICS_HISTOGRAM_MODE_ON,
-
 };
 
+/** android.statistics.sharpnessMapMode enumeration values
+ * @see ANDROID_STATISTICS_SHARPNESS_MAP_MODE
+ */
 enum CameraMetadataEnumAndroidStatisticsSharpnessMapMode : uint32_t {
     ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF,
-
     ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON,
-
 };
 
+/** android.statistics.hotPixelMapMode enumeration values
+ * @see ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE
+ */
 enum CameraMetadataEnumAndroidStatisticsHotPixelMapMode : uint32_t {
     ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF,
-
     ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON,
-
 };
 
+/** android.statistics.sceneFlicker enumeration values
+ * @see ANDROID_STATISTICS_SCENE_FLICKER
+ */
 enum CameraMetadataEnumAndroidStatisticsSceneFlicker : uint32_t {
     ANDROID_STATISTICS_SCENE_FLICKER_NONE,
-
     ANDROID_STATISTICS_SCENE_FLICKER_50HZ,
-
     ANDROID_STATISTICS_SCENE_FLICKER_60HZ,
-
 };
 
+/** android.statistics.lensShadingMapMode enumeration values
+ * @see ANDROID_STATISTICS_LENS_SHADING_MAP_MODE
+ */
 enum CameraMetadataEnumAndroidStatisticsLensShadingMapMode : uint32_t {
     ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
-
     ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON,
-
 };
 
+/** android.tonemap.mode enumeration values
+ * @see ANDROID_TONEMAP_MODE
+ */
 enum CameraMetadataEnumAndroidTonemapMode : uint32_t {
     ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
-
     ANDROID_TONEMAP_MODE_FAST,
-
     ANDROID_TONEMAP_MODE_HIGH_QUALITY,
-
     ANDROID_TONEMAP_MODE_GAMMA_VALUE,
-
     ANDROID_TONEMAP_MODE_PRESET_CURVE,
-
 };
 
+/** android.tonemap.presetCurve enumeration values
+ * @see ANDROID_TONEMAP_PRESET_CURVE
+ */
 enum CameraMetadataEnumAndroidTonemapPresetCurve : uint32_t {
     ANDROID_TONEMAP_PRESET_CURVE_SRGB,
-
     ANDROID_TONEMAP_PRESET_CURVE_REC709,
-
 };
 
+/** android.led.transmit enumeration values
+ * @see ANDROID_LED_TRANSMIT
+ */
 enum CameraMetadataEnumAndroidLedTransmit : uint32_t {
     ANDROID_LED_TRANSMIT_OFF,
-
     ANDROID_LED_TRANSMIT_ON,
-
 };
 
+/** android.led.availableLeds enumeration values
+ * @see ANDROID_LED_AVAILABLE_LEDS
+ */
 enum CameraMetadataEnumAndroidLedAvailableLeds : uint32_t {
     ANDROID_LED_AVAILABLE_LEDS_TRANSMIT,
-
 };
 
+/** android.info.supportedHardwareLevel enumeration values
+ * @see ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL
+ */
 enum CameraMetadataEnumAndroidInfoSupportedHardwareLevel : uint32_t {
     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
-
     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
-
     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
-
     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3,
-
 };
 
+/** android.blackLevel.lock enumeration values
+ * @see ANDROID_BLACK_LEVEL_LOCK
+ */
 enum CameraMetadataEnumAndroidBlackLevelLock : uint32_t {
     ANDROID_BLACK_LEVEL_LOCK_OFF,
-
     ANDROID_BLACK_LEVEL_LOCK_ON,
-
 };
 
+/** android.sync.frameNumber enumeration values
+ * @see ANDROID_SYNC_FRAME_NUMBER
+ */
 enum CameraMetadataEnumAndroidSyncFrameNumber : uint32_t {
-    ANDROID_SYNC_FRAME_NUMBER_CONVERGING = -1,
-
-    ANDROID_SYNC_FRAME_NUMBER_UNKNOWN = -2,
-
+    ANDROID_SYNC_FRAME_NUMBER_CONVERGING                        = -1,
+    ANDROID_SYNC_FRAME_NUMBER_UNKNOWN                           = -2,
 };
 
+/** android.sync.maxLatency enumeration values
+ * @see ANDROID_SYNC_MAX_LATENCY
+ */
 enum CameraMetadataEnumAndroidSyncMaxLatency : uint32_t {
-    ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0,
-
-    ANDROID_SYNC_MAX_LATENCY_UNKNOWN = -1,
-
+    ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL                  = 0,
+    ANDROID_SYNC_MAX_LATENCY_UNKNOWN                            = -1,
 };
 
+/** android.depth.availableDepthStreamConfigurations enumeration values
+ * @see ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
+ */
 enum CameraMetadataEnumAndroidDepthAvailableDepthStreamConfigurations : uint32_t {
     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
-
     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT,
-
 };
 
+/** android.depth.depthIsExclusive enumeration values
+ * @see ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
+ */
 enum CameraMetadataEnumAndroidDepthDepthIsExclusive : uint32_t {
     ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE,
-
     ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE,
-
 };
diff --git a/cas/1.0/default/Android.bp b/cas/1.0/default/Android.bp
index 544162b..6da5cc4 100644
--- a/cas/1.0/default/Android.bp
+++ b/cas/1.0/default/Android.bp
@@ -12,6 +12,12 @@
       "TypeConvert.cpp",
     ],
 
+    product_variables: {
+        treble: {
+            cflags: ["-DUSE_VNDBINDER"],
+        },
+    },
+
     compile_multilib: "32",
     init_rc: ["android.hardware.cas@1.0-service.rc"],
 
diff --git a/cas/1.0/default/service.cpp b/cas/1.0/default/service.cpp
index 04a8ad9..3f1df5a 100644
--- a/cas/1.0/default/service.cpp
+++ b/cas/1.0/default/service.cpp
@@ -31,9 +31,11 @@
 int main() {
     ALOGD("android.hardware.cas@1.0-service starting...");
 
+#ifdef USE_VNDBINDER
     // The CAS HAL may communicate to other vendor components via
     // /dev/vndbinder
     android::ProcessState::initWithDriver("/dev/vndbinder");
+#endif // USE_VNDBINDER
 
     configureRpcThreadpool(8, true /* callerWillJoin */);
 
diff --git a/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp b/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp
index d3b0f1d..193253a 100644
--- a/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp
+++ b/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp
@@ -223,12 +223,26 @@
     sp<ICas> mMediaCas;
     sp<IDescramblerBase> mDescramblerBase;
     sp<MediaCasListener> mCasListener;
+    typedef struct _OobInputTestParams {
+        const SubSample* subSamples;
+        uint32_t numSubSamples;
+        size_t imemSizeActual;
+        uint64_t imemOffset;
+        uint64_t imemSize;
+        uint64_t srcOffset;
+        uint64_t dstOffset;
+    } OobInputTestParams;
 
     ::testing::AssertionResult createCasPlugin(int32_t caSystemId);
     ::testing::AssertionResult openCasSession(std::vector<uint8_t>* sessionId);
-    ::testing::AssertionResult descrambleTestInputBuffer(const sp<IDescrambler>& descrambler,
-                                                         Status* descrambleStatus,
-                                                         sp<IMemory>* hidlInMemory);
+    ::testing::AssertionResult descrambleTestInputBuffer(
+            const sp<IDescrambler>& descrambler,
+            Status* descrambleStatus,
+            sp<IMemory>* hidlInMemory);
+    ::testing::AssertionResult descrambleTestOobInput(
+            const sp<IDescrambler>& descrambler,
+            Status* descrambleStatus,
+            const OobInputTestParams& params);
 };
 
 ::testing::AssertionResult MediaCasHidlTest::createCasPlugin(int32_t caSystemId) {
@@ -332,6 +346,72 @@
     return ::testing::AssertionResult(returnVoid.isOk());
 }
 
+::testing::AssertionResult MediaCasHidlTest::descrambleTestOobInput(
+        const sp<IDescrambler>& descrambler,
+        Status* descrambleStatus,
+        const OobInputTestParams& params) {
+    hidl_vec<SubSample> hidlSubSamples;
+    hidlSubSamples.setToExternal(
+            const_cast<SubSample*>(params.subSamples), params.numSubSamples, false /*own*/);
+
+    sp<MemoryDealer> dealer = new MemoryDealer(params.imemSizeActual, "vts-cas");
+    if (nullptr == dealer.get()) {
+        ALOGE("couldn't get MemoryDealer!");
+        return ::testing::AssertionFailure();
+    }
+
+    sp<IMemory> mem = dealer->allocate(params.imemSizeActual);
+    if (nullptr == mem.get()) {
+        ALOGE("couldn't allocate IMemory!");
+        return ::testing::AssertionFailure();
+    }
+
+    // build hidl_memory from memory heap
+    ssize_t offset;
+    size_t size;
+    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+    if (nullptr == heap.get()) {
+        ALOGE("couldn't get memory heap!");
+        return ::testing::AssertionFailure();
+    }
+
+    native_handle_t* nativeHandle = native_handle_create(1, 0);
+    if (!nativeHandle) {
+        ALOGE("failed to create native handle!");
+        return ::testing::AssertionFailure();
+    }
+    nativeHandle->data[0] = heap->getHeapID();
+
+    SharedBuffer srcBuffer = {
+            .heapBase = hidl_memory("ashmem", hidl_handle(nativeHandle), heap->getSize()),
+            .offset = (uint64_t) offset + params.imemOffset,
+            .size = (uint64_t) params.imemSize,
+    };
+
+    DestinationBuffer dstBuffer;
+    dstBuffer.type = BufferType::SHARED_MEMORY;
+    dstBuffer.nonsecureMemory = srcBuffer;
+
+    uint32_t outBytes;
+    hidl_string detailedError;
+    auto returnVoid = descrambler->descramble(
+        ScramblingControl::EVENKEY /*2*/, hidlSubSamples,
+        srcBuffer,
+        params.srcOffset,
+        dstBuffer,
+        params.dstOffset,
+        [&](Status status, uint32_t bytesWritten, const hidl_string& detailedErr) {
+            *descrambleStatus = status;
+            outBytes = bytesWritten;
+            detailedError = detailedErr;
+        });
+    if (!returnVoid.isOk() || *descrambleStatus != Status::OK) {
+        ALOGI("descramble failed, trans=%s, status=%d, outBytes=%u, error=%s",
+              returnVoid.description().c_str(), *descrambleStatus, outBytes, detailedError.c_str());
+    }
+    return ::testing::AssertionResult(returnVoid.isOk());
+}
+
 TEST_F(MediaCasHidlTest, EnumeratePlugins) {
     description("Test enumerate plugins");
     hidl_vec<HidlCasPluginDescriptor> descriptors;
@@ -613,6 +693,153 @@
     EXPECT_FALSE(mDescramblerBase->requiresSecureDecoderComponent("bad"));
 }
 
+TEST_F(MediaCasHidlTest, TestClearKeyOobFails) {
+    description("Test that oob descramble request fails with expected error");
+
+    ASSERT_TRUE(createCasPlugin(CLEAR_KEY_SYSTEM_ID));
+
+    auto returnStatus = mMediaCas->provision(hidl_string(PROVISION_STR));
+    EXPECT_TRUE(returnStatus.isOk());
+    EXPECT_EQ(Status::OK, returnStatus);
+
+    std::vector<uint8_t> sessionId;
+    ASSERT_TRUE(openCasSession(&sessionId));
+
+    returnStatus = mDescramblerBase->setMediaCasSession(sessionId);
+    EXPECT_TRUE(returnStatus.isOk());
+    EXPECT_EQ(Status::OK, returnStatus);
+
+    hidl_vec<uint8_t> hidlEcm;
+    hidlEcm.setToExternal(const_cast<uint8_t*>(kEcmBinaryBuffer), sizeof(kEcmBinaryBuffer));
+    returnStatus = mMediaCas->processEcm(sessionId, hidlEcm);
+    EXPECT_TRUE(returnStatus.isOk());
+    EXPECT_EQ(Status::OK, returnStatus);
+
+    sp<IDescrambler> descrambler = IDescrambler::castFrom(mDescramblerBase);
+    ASSERT_NE(nullptr, descrambler.get());
+
+    Status descrambleStatus = Status::OK;
+
+    // test invalid src buffer offset
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = kSubSamples,
+                .numSubSamples  = sizeof(kSubSamples)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 0xcccccc,
+                .imemSize       = sizeof(kInBinaryBuffer),
+                .srcOffset      = 0,
+                .dstOffset      = 0
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    // test invalid src buffer size
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = kSubSamples,
+                .numSubSamples  = sizeof(kSubSamples)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 0,
+                .imemSize       = 0xcccccc,
+                .srcOffset      = 0,
+                .dstOffset      = 0
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    // test invalid src buffer size
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = kSubSamples,
+                .numSubSamples  = sizeof(kSubSamples)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 1,
+                .imemSize       = (uint64_t)-1,
+                .srcOffset      = 0,
+                .dstOffset      = 0
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    // test invalid srcOffset
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = kSubSamples,
+                .numSubSamples  = sizeof(kSubSamples)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 0,
+                .imemSize       = sizeof(kInBinaryBuffer),
+                .srcOffset      = 0xcccccc,
+                .dstOffset      = 0
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    // test invalid dstOffset
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = kSubSamples,
+                .numSubSamples  = sizeof(kSubSamples)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 0,
+                .imemSize       = sizeof(kInBinaryBuffer),
+                .srcOffset      = 0,
+                .dstOffset      = 0xcccccc
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    // test detection of oob subsample sizes
+    const SubSample invalidSubSamples1[] =
+        {{162, 0}, {0, 184}, {0, 0xdddddd}};
+
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = invalidSubSamples1,
+                .numSubSamples  = sizeof(invalidSubSamples1)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 0,
+                .imemSize       = sizeof(kInBinaryBuffer),
+                .srcOffset      = 0,
+                .dstOffset      = 0
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    // test detection of overflowing subsample sizes
+    const SubSample invalidSubSamples2[] =
+        {{162, 0}, {0, 184}, {2, (uint32_t)-1}};
+
+    ASSERT_TRUE(descrambleTestOobInput(
+            descrambler,
+            &descrambleStatus,
+            {
+                .subSamples     = invalidSubSamples2,
+                .numSubSamples  = sizeof(invalidSubSamples2)/sizeof(SubSample),
+                .imemSizeActual = sizeof(kInBinaryBuffer),
+                .imemOffset     = 0,
+                .imemSize       = sizeof(kInBinaryBuffer),
+                .srcOffset      = 0,
+                .dstOffset      = 0
+            }));
+    EXPECT_EQ(Status::BAD_VALUE, descrambleStatus);
+
+    returnStatus = mDescramblerBase->release();
+    EXPECT_TRUE(returnStatus.isOk());
+    EXPECT_EQ(Status::OK, returnStatus);
+
+    returnStatus = mMediaCas->release();
+    EXPECT_TRUE(returnStatus.isOk());
+    EXPECT_EQ(Status::OK, returnStatus);
+}
+
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
diff --git a/contexthub/1.0/default/Contexthub.cpp b/contexthub/1.0/default/Contexthub.cpp
index 8d10c75..5f83a22 100644
--- a/contexthub/1.0/default/Contexthub.cpp
+++ b/contexthub/1.0/default/Contexthub.cpp
@@ -281,11 +281,11 @@
                 result = TransactionResult::FAILURE;
             }
 
+            mIsTransactionPending = false;
             if (cb != nullptr) {
                 cb->handleTxnResult(mTransactionId, result);
             }
             retVal = 0;
-            mIsTransactionPending = false;
             break;
         }
 
@@ -383,6 +383,7 @@
 
         msg.appName = rxMsg->app_name.id;
         msg.msgType = rxMsg->message_type;
+        msg.hostEndPoint = static_cast<uint16_t>(HostEndPoint::BROADCAST);
         msg.msg = std::vector<uint8_t>(static_cast<const uint8_t *>(rxMsg->message),
                                        static_cast<const uint8_t *>(rxMsg->message) +
                                        rxMsg->message_len);
diff --git a/current.txt b/current.txt
index 67d5ee0..4a943b9 100644
--- a/current.txt
+++ b/current.txt
@@ -262,3 +262,4 @@
 # ABI preserving changes to HALs during Android P
 fb92e2b40f8e9d494e8fd3b4ac18499a3216342e7cff160714c3bbf3660b6e79 android.hardware.gnss@1.0::IGnssConfiguration
 d4c10cb28318dba8efb22231a8c23e86ad8853f85775187c40b42a878a5ef4d5 android.hardware.automotive.vehicle@2.0::types
+cf72ff5a52bfa4d08e9e1000cf3ab5952a2d280c7f13cdad5ab7905c08050766 android.hardware.camera.metadata@3.2::types
diff --git a/keymaster/4.0/Android.bp b/keymaster/4.0/Android.bp
index 34997d2..378204a 100644
--- a/keymaster/4.0/Android.bp
+++ b/keymaster/4.0/Android.bp
@@ -16,12 +16,10 @@
     ],
     types: [
         "HardwareAuthToken",
-        "HardwareAuthTokenMacMethod",
         "KeyCharacteristics",
         "KeyParameter",
         "KeyPurpose",
         "Tag",
-        "TagType",
     ],
     gen_java: false,
 }
diff --git a/tests/memory/1.0/Android.bp b/tests/memory/1.0/Android.bp
index 5038664..cbee247 100644
--- a/tests/memory/1.0/Android.bp
+++ b/tests/memory/1.0/Android.bp
@@ -7,9 +7,9 @@
         "IMemoryTest.hal",
     ],
     interfaces: [
-        "android.hidl.memory.token@1.0",
-        "android.hidl.memory.block@1.0",
         "android.hidl.base@1.0",
+        "android.hidl.memory.block@1.0",
+        "android.hidl.memory.token@1.0",
     ],
     gen_java: false,
 }