am 1d0c7e05: am 6e5236ee: am 5c8f84a1: am 3a18bccc: (-s ours) am 431cd416: (-s ours) Reconcile with jb-mr1-release - do not merge

* commit '1d0c7e05d80ab18c1a859782313448dc6a31bab9':
diff --git a/include/hardware/audio_effect.h b/include/hardware/audio_effect.h
index 46e323d..2940b1a 100644
--- a/include/hardware/audio_effect.h
+++ b/include/hardware/audio_effect.h
@@ -61,8 +61,9 @@
 #define EFFECT_UUID_INITIALIZER { 0xec7178ec, 0xe5e1, 0x4432, 0xa3f4, \
                                   { 0x46, 0x57, 0xe6, 0x79, 0x52, 0x10 } }
 static const effect_uuid_t EFFECT_UUID_NULL_ = EFFECT_UUID_INITIALIZER;
-const effect_uuid_t * const EFFECT_UUID_NULL = &EFFECT_UUID_NULL_;
-const char * const EFFECT_UUID_NULL_STR = "ec7178ec-e5e1-4432-a3f4-4657e6795210";
+static const effect_uuid_t * const EFFECT_UUID_NULL = &EFFECT_UUID_NULL_;
+static const char * const EFFECT_UUID_NULL_STR = "ec7178ec-e5e1-4432-a3f4-4657e6795210";
+
 
 // The effect descriptor contains necessary information to facilitate the enumeration of the effect
 // engines present in a library.
@@ -873,8 +874,10 @@
 //      Effect library interface
 /////////////////////////////////////////////////
 
-// Effect library interface version 2.0
-#define EFFECT_LIBRARY_API_VERSION EFFECT_MAKE_API_VERSION(2,0)
+// Effect library interface version 3.0
+// Note that EffectsFactory.c only checks the major version component, so changes to the minor
+// number can only be used for fully backwards compatible changes
+#define EFFECT_LIBRARY_API_VERSION EFFECT_MAKE_API_VERSION(3,0)
 
 #define AUDIO_EFFECT_LIBRARY_TAG ((('A') << 24) | (('E') << 16) | (('L') << 8) | ('T'))
 
@@ -893,57 +896,6 @@
 
     ////////////////////////////////////////////////////////////////////////////////
     //
-    //    Function:        query_num_effects
-    //
-    //    Description:    Returns the number of different effects exposed by the
-    //          library. Each effect must have a unique effect uuid (see
-    //          effect_descriptor_t). This function together with EffectQueryEffect()
-    //          is used to enumerate all effects present in the library.
-    //
-    //    Input/Output:
-    //          pNumEffects:    address where the number of effects should be returned.
-    //
-    //    Output:
-    //        returned value:    0          successful operation.
-    //                          -ENODEV     library failed to initialize
-    //                          -EINVAL     invalid pNumEffects
-    //        *pNumEffects:     updated with number of effects in library
-    //
-    ////////////////////////////////////////////////////////////////////////////////
-    int32_t (*query_num_effects)(uint32_t *pNumEffects);
-
-    ////////////////////////////////////////////////////////////////////////////////
-    //
-    //    Function:        query_effect
-    //
-    //    Description:    Returns the descriptor of the effect engine which index is
-    //          given as argument.
-    //          See effect_descriptor_t for details on effect descriptors.
-    //          This function together with EffectQueryNumberEffects() is used to enumerate all
-    //          effects present in the library. The enumeration sequence is:
-    //              EffectQueryNumberEffects(&num_effects);
-    //              for (i = 0; i < num_effects; i++)
-    //                  EffectQueryEffect(i,...);
-    //
-    //    Input/Output:
-    //          index:          index of the effect
-    //          pDescriptor:    address where to return the effect descriptor.
-    //
-    //    Output:
-    //        returned value:    0          successful operation.
-    //                          -ENODEV     library failed to initialize
-    //                          -EINVAL     invalid pDescriptor or index
-    //                          -ENOSYS     effect list has changed since last execution of
-    //                                      EffectQueryNumberEffects()
-    //                          -ENOENT     no more effect available
-    //        *pDescriptor:     updated with the effect descriptor.
-    //
-    ////////////////////////////////////////////////////////////////////////////////
-    int32_t (*query_effect)(uint32_t index,
-                            effect_descriptor_t *pDescriptor);
-
-    ////////////////////////////////////////////////////////////////////////////////
-    //
     //    Function:        create_effect
     //
     //    Description:    Creates an effect engine of the specified implementation uuid and
diff --git a/include/hardware/audio_policy.h b/include/hardware/audio_policy.h
index 9e145f8..c635ebf 100644
--- a/include/hardware/audio_policy.h
+++ b/include/hardware/audio_policy.h
@@ -229,11 +229,15 @@
     int (*set_effect_enabled)(struct audio_policy *pol, int id, bool enabled);
 
     bool (*is_stream_active)(const struct audio_policy *pol,
-                             audio_stream_type_t stream,
-                             uint32_t in_past_ms);
+            audio_stream_type_t stream,
+            uint32_t in_past_ms);
+
+    bool (*is_stream_active_remotely)(const struct audio_policy *pol,
+            audio_stream_type_t stream,
+            uint32_t in_past_ms);
 
     bool (*is_source_active)(const struct audio_policy *pol,
-                             audio_source_t source);
+            audio_source_t source);
 
     /* dump state */
     int (*dump)(const struct audio_policy *pol, int fd);
diff --git a/include/hardware/bluetooth.h b/include/hardware/bluetooth.h
index be7f0b1..8aaecad 100644
--- a/include/hardware/bluetooth.h
+++ b/include/hardware/bluetooth.h
@@ -43,6 +43,7 @@
 #define BT_PROFILE_HIDHOST_ID "hidhost"
 #define BT_PROFILE_PAN_ID "pan"
 
+#define BT_PROFILE_GATT_ID "gatt"
 
 /** Bluetooth Address */
 typedef struct {
@@ -51,7 +52,7 @@
 
 /** Bluetooth Device Name */
 typedef struct {
-    uint8_t name[248];
+    uint8_t name[249];
 } __attribute__((packed))bt_bdname_t;
 
 /** Bluetooth Adapter Visibility Modes*/
@@ -115,6 +116,15 @@
    char name[256]; // what's the maximum length
 } bt_service_record_t;
 
+
+/** Bluetooth Remote Version info */
+typedef struct
+{
+   int version;
+   int sub_ver;
+   int manufacturer;
+} bt_remote_version_t;
+
 /* Bluetooth Adapter and Remote Device property types */
 typedef enum {
     /* Properties common to both adapter and remote device */
@@ -189,6 +199,13 @@
      * Data type   - int32_t.
      */
     BT_PROPERTY_REMOTE_RSSI,
+    /**
+     * Description - Remote version info
+     * Access mode - SET/GET.
+     * Data type   - bt_remote_version_t.
+     */
+
+    BT_PROPERTY_REMOTE_VERSION_INFO,
 
     BT_PROPERTY_REMOTE_DEVICE_TIMESTAMP = 0xFF,
 } bt_property_type_t;
@@ -299,6 +316,10 @@
 /* Receive any HCI event from controller. Must be in DUT Mode for this callback to be received */
 typedef void (*dut_mode_recv_callback)(uint16_t opcode, uint8_t *buf, uint8_t len);
 
+/* LE Test mode callbacks
+* This callback shall be invoked whenever the le_tx_test, le_rx_test or le_test_end is invoked
+* The num_packets is valid only for le_test_end command */
+typedef void (*le_test_mode_callback)(bt_status_t status, uint16_t num_packets);
 /** TODO: Add callbacks for Link Up/Down and other generic
   *  notifications/callbacks */
 
@@ -317,6 +338,7 @@
     acl_state_changed_callback acl_state_changed_cb;
     callback_thread_event thread_evt_cb;
     dut_mode_recv_callback dut_mode_recv_cb;
+    le_test_mode_callback le_test_mode_cb;
 } bt_callbacks_t;
 
 /** NOTE: By default, no profiles are initialized at the time of init/enable.
@@ -424,6 +446,10 @@
 
     /* Send any test HCI (vendor-specific) command to the controller. Must be in DUT Mode */
     int (*dut_mode_send)(uint16_t opcode, uint8_t *buf, uint8_t len);
+    /** BLE Test Mode APIs */
+    /* opcode MUST be one of: LE_Receiver_Test, LE_Transmitter_Test, LE_Test_End */
+    int (*le_test_mode)(uint16_t opcode, uint8_t *buf, uint8_t len);
+
 } bt_interface_t;
 
 /** TODO: Need to add APIs for Service Discovery, Service authorization and
diff --git a/include/hardware/bt_gatt.h b/include/hardware/bt_gatt.h
new file mode 100644
index 0000000..42e14c2
--- /dev/null
+++ b/include/hardware/bt_gatt.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_INCLUDE_BT_GATT_H
+#define ANDROID_INCLUDE_BT_GATT_H
+
+#include <stdint.h>
+#include "bt_gatt_client.h"
+#include "bt_gatt_server.h"
+
+__BEGIN_DECLS
+
+/** BT-GATT callbacks */
+typedef struct {
+    /** Set to sizeof(btgatt_callbacks_t) */
+    size_t size;
+
+    /** GATT Client callbacks */
+    const btgatt_client_callbacks_t* client;
+
+    /** GATT Server callbacks */
+    const btgatt_server_callbacks_t* server;
+} btgatt_callbacks_t;
+
+/** Represents the standard Bluetooth GATT interface. */
+typedef struct {
+    /** Set to sizeof(btgatt_interface_t) */
+    size_t          size;
+
+    /**
+     * Initializes the interface and provides callback routines
+     */
+    bt_status_t (*init)( const btgatt_callbacks_t* callbacks );
+
+    /** Closes the interface */
+    void (*cleanup)( void );
+
+    /** Pointer to the GATT client interface methods.*/
+    const btgatt_client_interface_t* client;
+
+    /** Pointer to the GATT server interface methods.*/
+    const btgatt_server_interface_t* server;
+} btgatt_interface_t;
+
+__END_DECLS
+
+#endif /* ANDROID_INCLUDE_BT_GATT_H */
diff --git a/include/hardware/bt_gatt_client.h b/include/hardware/bt_gatt_client.h
new file mode 100644
index 0000000..8b49f59
--- /dev/null
+++ b/include/hardware/bt_gatt_client.h
@@ -0,0 +1,275 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_INCLUDE_BT_GATT_CLIENT_H
+#define ANDROID_INCLUDE_BT_GATT_CLIENT_H
+
+#include <stdint.h>
+#include "bt_gatt_types.h"
+
+__BEGIN_DECLS
+
+/**
+ * Buffer sizes for maximum attribute length and maximum read/write
+ * operation buffer size.
+ */
+#define BTGATT_MAX_ATTR_LEN 600
+
+/** Buffer type for unformatted reads/writes */
+typedef struct
+{
+    uint8_t             value[BTGATT_MAX_ATTR_LEN];
+    uint16_t            len;
+} btgatt_unformatted_value_t;
+
+/** Parameters for GATT read operations */
+typedef struct
+{
+    btgatt_srvc_id_t    srvc_id;
+    btgatt_char_id_t    char_id;
+    bt_uuid_t           descr_id;
+    btgatt_unformatted_value_t value;
+    uint16_t            value_type;
+    uint8_t             status;
+} btgatt_read_params_t;
+
+/** Parameters for GATT write operations */
+typedef struct
+{
+    btgatt_srvc_id_t    srvc_id;
+    btgatt_char_id_t    char_id;
+    bt_uuid_t           descr_id;
+    uint8_t             status;
+} btgatt_write_params_t;
+
+/** Attribute change notification parameters */
+typedef struct
+{
+    uint8_t             value[BTGATT_MAX_ATTR_LEN];
+    bt_bdaddr_t         bda;
+    btgatt_srvc_id_t    srvc_id;
+    btgatt_char_id_t    char_id;
+    uint16_t            len;
+    uint8_t             is_notify;
+} btgatt_notify_params_t;
+
+typedef struct
+{
+    bt_bdaddr_t        *bda1;
+    bt_uuid_t          *uuid1;
+    uint16_t            u1;
+    uint16_t            u2;
+    uint16_t            u3;
+    uint16_t            u4;
+    uint16_t            u5;
+} btgatt_test_params_t;
+
+/** BT-GATT Client callback structure. */
+
+/** Callback invoked in response to register_client */
+typedef void (*register_client_callback)(int status, int client_if,
+                bt_uuid_t *app_uuid);
+
+/** Callback for scan results */
+typedef void (*scan_result_callback)(bt_bdaddr_t* bda, int rssi, uint8_t* adv_data);
+
+/** GATT open callback invoked in response to open */
+typedef void (*connect_callback)(int conn_id, int status, int client_if, bt_bdaddr_t* bda);
+
+/** Callback invoked in response to close */
+typedef void (*disconnect_callback)(int conn_id, int status,
+                int client_if, bt_bdaddr_t* bda);
+
+/**
+ * Invoked in response to search_service when the GATT service search
+ * has been completed.
+ */
+typedef void (*search_complete_callback)(int conn_id, int status);
+
+/** Reports GATT services on a remote device */
+typedef void (*search_result_callback)( int conn_id, btgatt_srvc_id_t *srvc_id);
+
+/** GATT characteristic enumeration result callback */
+typedef void (*get_characteristic_callback)(int conn_id, int status,
+                btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                int char_prop);
+
+/** GATT descriptor enumeration result callback */
+typedef void (*get_descriptor_callback)(int conn_id, int status,
+                btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                bt_uuid_t *descr_id);
+
+/** GATT included service enumeration result callback */
+typedef void (*get_included_service_callback)(int conn_id, int status,
+                btgatt_srvc_id_t *srvc_id, btgatt_srvc_id_t *incl_srvc_id);
+
+/** Callback invoked in response to [de]register_for_notification */
+typedef void (*register_for_notification_callback)(int conn_id,
+                int registered, int status, btgatt_srvc_id_t *srvc_id,
+                btgatt_char_id_t *char_id);
+
+/**
+ * Remote device notification callback, invoked when a remote device sends
+ * a notification or indication that a client has registered for.
+ */
+typedef void (*notify_callback)(int conn_id, btgatt_notify_params_t *p_data);
+
+/** Reports result of a GATT read operation */
+typedef void (*read_characteristic_callback)(int conn_id, int status,
+                btgatt_read_params_t *p_data);
+
+/** GATT write characteristic operation callback */
+typedef void (*write_characteristic_callback)(int conn_id, int status,
+                btgatt_write_params_t *p_data);
+
+/** GATT execute prepared write callback */
+typedef void (*execute_write_callback)(int conn_id, int status);
+
+/** Callback invoked in response to read_descriptor */
+typedef void (*read_descriptor_callback)(int conn_id, int status,
+                btgatt_read_params_t *p_data);
+
+/** Callback invoked in response to write_descriptor */
+typedef void (*write_descriptor_callback)(int conn_id, int status,
+                btgatt_write_params_t *p_data);
+
+/** Callback triggered in response to read_remote_rssi */
+typedef void (*read_remote_rssi_callback)(int client_if, bt_bdaddr_t* bda,
+                                          int rssi, int status);
+
+typedef struct {
+    register_client_callback            register_client_cb;
+    scan_result_callback                scan_result_cb;
+    connect_callback                    open_cb;
+    disconnect_callback                 close_cb;
+    search_complete_callback            search_complete_cb;
+    search_result_callback              search_result_cb;
+    get_characteristic_callback         get_characteristic_cb;
+    get_descriptor_callback             get_descriptor_cb;
+    get_included_service_callback       get_included_service_cb;
+    register_for_notification_callback  register_for_notification_cb;
+    notify_callback                     notify_cb;
+    read_characteristic_callback        read_characteristic_cb;
+    write_characteristic_callback       write_characteristic_cb;
+    read_descriptor_callback            read_descriptor_cb;
+    write_descriptor_callback           write_descriptor_cb;
+    execute_write_callback              execute_write_cb;
+    read_remote_rssi_callback           read_remote_rssi_cb;
+} btgatt_client_callbacks_t;
+
+/** Represents the standard BT-GATT client interface. */
+
+typedef struct {
+    /** Registers a GATT client application with the stack */
+    bt_status_t (*register_client)( bt_uuid_t *uuid );
+
+    /** Unregister a client application from the stack */
+    bt_status_t (*unregister_client)(int client_if );
+
+    /** Start or stop LE device scanning */
+    bt_status_t (*scan)( int client_if, bool start );
+
+    /** Create a connection to a remote LE or dual-mode device */
+    bt_status_t (*connect)( int client_if, const bt_bdaddr_t *bd_addr,
+                         bool is_direct );
+
+    /** Disconnect a remote device or cancel a pending connection */
+    bt_status_t (*disconnect)( int client_if, const bt_bdaddr_t *bd_addr,
+                    int conn_id);
+
+    /** Clear the attribute cache for a given device */
+    bt_status_t (*refresh)( int client_if, const bt_bdaddr_t *bd_addr );
+
+    /**
+     * Enumerate all GATT services on a connected device.
+     * Optionally, the results can be filtered for a given UUID.
+     */
+    bt_status_t (*search_service)(int conn_id, bt_uuid_t *filter_uuid );
+
+    /**
+     * Enumerate included services for a given service.
+     * Set start_incl_srvc_id to NULL to get the first included service.
+     */
+    bt_status_t (*get_included_service)( int conn_id, btgatt_srvc_id_t *srvc_id,
+                                         btgatt_srvc_id_t *start_incl_srvc_id);
+
+    /**
+     * Enumerate characteristics for a given service.
+     * Set start_char_id to NULL to get the first characteristic.
+     */
+    bt_status_t (*get_characteristic)( int conn_id,
+                    btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *start_char_id);
+
+    /**
+     * Enumerate descriptors for a given characteristic.
+     * Set start_descr_id to NULL to get the first descriptor.
+     */
+    bt_status_t (*get_descriptor)( int conn_id,
+                    btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                    bt_uuid_t *start_descr_id);
+
+    /** Read a characteristic on a remote device */
+    bt_status_t (*read_characteristic)( int conn_id,
+                    btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                    int auth_req );
+
+    /** Write a remote characteristic */
+    bt_status_t (*write_characteristic)(int conn_id,
+                    btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                    int write_type, int len, int auth_req,
+                    char* p_value);
+
+    /** Read the descriptor for a given characteristic */
+    bt_status_t (*read_descriptor)(int conn_id,
+                    btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                    bt_uuid_t *descr_id, int auth_req);
+
+    /** Write a remote descriptor for a given characteristic */
+    bt_status_t (*write_descriptor)( int conn_id,
+                    btgatt_srvc_id_t *srvc_id, btgatt_char_id_t *char_id,
+                    bt_uuid_t *descr_id, int write_type, int len,
+                    int auth_req, char* p_value);
+
+    /** Execute a prepared write operation */
+    bt_status_t (*execute_write)(int conn_id, int execute);
+
+    /**
+     * Register to receive notifications or indications for a given
+     * characteristic
+     */
+    bt_status_t (*register_for_notification)( int client_if,
+                    const bt_bdaddr_t *bd_addr, btgatt_srvc_id_t *srvc_id,
+                    btgatt_char_id_t *char_id);
+
+    /** Deregister a previous request for notifications/indications */
+    bt_status_t (*deregister_for_notification)( int client_if,
+                    const bt_bdaddr_t *bd_addr, btgatt_srvc_id_t *srvc_id,
+                    btgatt_char_id_t *char_id);
+
+    /** Request RSSI for a given remote device */
+    bt_status_t (*read_remote_rssi)( int client_if, const bt_bdaddr_t *bd_addr);
+
+    /** Determine the type of the remote device (LE, BR/EDR, Dual-mode) */
+    int (*get_device_type)( const bt_bdaddr_t *bd_addr );
+
+    /** Test mode interface */
+    bt_status_t (*test_command)( int command, btgatt_test_params_t* params);
+} btgatt_client_interface_t;
+
+__END_DECLS
+
+#endif /* ANDROID_INCLUDE_BT_GATT_CLIENT_H */
diff --git a/include/hardware/bt_gatt_server.h b/include/hardware/bt_gatt_server.h
new file mode 100644
index 0000000..0a5b23b
--- /dev/null
+++ b/include/hardware/bt_gatt_server.h
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_INCLUDE_BT_GATT_SERVER_H
+#define ANDROID_INCLUDE_BT_GATT_SERVER_H
+
+#include <stdint.h>
+
+#include "bt_gatt_types.h"
+
+__BEGIN_DECLS
+
+/** GATT value type used in response to remote read requests */
+typedef struct
+{
+    uint8_t           value[BTGATT_MAX_ATTR_LEN];
+    uint16_t          handle;
+    uint16_t          offset;
+    uint16_t          len;
+    uint8_t           auth_req;
+} btgatt_value_t;
+
+/** GATT remote read request response type */
+typedef union
+{
+    btgatt_value_t attr_value;
+    uint16_t            handle;
+} btgatt_response_t;
+
+/** BT-GATT Server callback structure. */
+
+/** Callback invoked in response to register_server */
+typedef void (*register_server_callback)(int status, int server_if,
+                bt_uuid_t *app_uuid);
+
+/** Callback indicating that a remote device has connected or been disconnected */
+typedef void (*connection_callback)(int conn_id, int connected,
+                                    bt_bdaddr_t *bda);
+
+/** Callback invoked in response to create_service */
+typedef void (*service_added_callback)(int status, int server_if,
+                btgatt_srvc_id_t *srvc_id, int srvc_handle);
+
+/** Callback indicating that an included service has been added to a service */
+typedef void (*included_service_added_callback)(int status, int server_if,
+                int srvc_handle, int incl_srvc_handle);
+
+/** Callback invoked when a characteristic has been added to a service */
+typedef void (*characteristic_added_callback)(int status, int server_if,
+                bt_uuid_t *uuid, int srvc_handle, int char_handle);
+
+/** Callback invoked when a descriptor has been added to a characteristic */
+typedef void (*descriptor_added_callback)(int status, int server_if,
+                bt_uuid_t *uuid, int srvc_handle, int descr_handle);
+
+/** Callback invoked in response to start_service */
+typedef void (*service_started_callback)(int status, int server_if,
+                                         int srvc_handle);
+
+/** Callback invoked in response to stop_service */
+typedef void (*service_stopped_callback)(int status, int server_if,
+                                         int srvc_handle);
+
+/** Callback triggered when a service has been deleted */
+typedef void (*service_deleted_callback)(int status, int server_if,
+                                         int srvc_handle);
+
+/**
+ * Callback invoked when a remote device has requested to read a characteristic
+ * or descriptor. The application must respond by calling send_response
+ */
+typedef void (*request_read_callback)(int conn_id, int trans_id, bt_bdaddr_t *bda,
+                                      int attr_handle, int offset, bool is_long);
+
+/**
+ * Callback invoked when a remote device has requested to write to a
+ * characteristic or descriptor.
+ */
+typedef void (*request_write_callback)(int conn_id, int trans_id, bt_bdaddr_t *bda,
+                                       int attr_handle, int offset, int length,
+                                       bool need_rsp, bool is_prep, uint8_t* value);
+
+/** Callback invoked when a previously prepared write is to be executed */
+typedef void (*request_exec_write_callback)(int conn_id, int trans_id,
+                                            bt_bdaddr_t *bda, int exec_write);
+
+/**
+ * Callback triggered in response to send_response if the remote device
+ * sends a confirmation.
+ */
+typedef void (*response_confirmation_callback)(int status, int handle);
+
+typedef struct {
+    register_server_callback        register_server_cb;
+    connection_callback             connection_cb;
+    service_added_callback          service_added_cb;
+    included_service_added_callback included_service_added_cb;
+    characteristic_added_callback   characteristic_added_cb;
+    descriptor_added_callback       descriptor_added_cb;
+    service_started_callback        service_started_cb;
+    service_stopped_callback        service_stopped_cb;
+    service_deleted_callback        service_deleted_cb;
+    request_read_callback           request_read_cb;
+    request_write_callback          request_write_cb;
+    request_exec_write_callback     request_exec_write_cb;
+    response_confirmation_callback  response_confirmation_cb;
+} btgatt_server_callbacks_t;
+
+/** Represents the standard BT-GATT server interface. */
+typedef struct {
+    /** Registers a GATT server application with the stack */
+    bt_status_t (*register_server)( bt_uuid_t *uuid );
+
+    /** Unregister a server application from the stack */
+    bt_status_t (*unregister_server)(int server_if );
+
+    /** Create a connection to a remote peripheral */
+    bt_status_t (*connect)(int server_if, const bt_bdaddr_t *bd_addr, bool is_direct );
+
+    /** Disconnect an established connection or cancel a pending one */
+    bt_status_t (*disconnect)(int server_if, const bt_bdaddr_t *bd_addr,
+                    int conn_id );
+
+    /** Create a new service */
+    bt_status_t (*add_service)( int server_if, btgatt_srvc_id_t *srvc_id, int num_handles);
+
+    /** Assign an included service to it's parent service */
+    bt_status_t (*add_included_service)( int server_if, int service_handle, int included_handle);
+
+    /** Add a characteristic to a service */
+    bt_status_t (*add_characteristic)( int server_if,
+                    int service_handle, bt_uuid_t *uuid,
+                    int properties, int permissions);
+
+    /** Add a descriptor to a given service */
+    bt_status_t (*add_descriptor)(int server_if, int service_handle,
+                                  bt_uuid_t *uuid, int permissions);
+
+    /** Starts a local service */
+    bt_status_t (*start_service)(int server_if, int service_handle,
+                                 int transport);
+
+    /** Stops a local service */
+    bt_status_t (*stop_service)(int server_if, int service_handle);
+
+    /** Delete a local service */
+    bt_status_t (*delete_service)(int server_if, int service_handle);
+
+    /** Send value indication to a remote device */
+    bt_status_t (*send_indication)(int server_if, int attribute_handle,
+                                   int conn_id, int len, int confirm,
+                                   char* p_value);
+
+    /** Send a response to a read/write operation */
+    bt_status_t (*send_response)(int conn_id, int trans_id,
+                                 int status, btgatt_response_t *response);
+} btgatt_server_interface_t;
+
+__END_DECLS
+
+#endif /* ANDROID_INCLUDE_BT_GATT_CLIENT_H */
diff --git a/include/hardware/bt_gatt_types.h b/include/hardware/bt_gatt_types.h
new file mode 100644
index 0000000..fee9bb5
--- /dev/null
+++ b/include/hardware/bt_gatt_types.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_INCLUDE_BT_GATT_TYPES_H
+#define ANDROID_INCLUDE_BT_GATT_TYPES_H
+
+#include <stdint.h>
+#include <stdbool.h>
+
+__BEGIN_DECLS
+
+/**
+ * GATT Service types
+ */
+#define BTGATT_SERVICE_TYPE_PRIMARY 0
+#define BTGATT_SERVICE_TYPE_SECONDARY 1
+
+/** GATT Characteristic ID adding instance id tracking to the UUID */
+typedef struct
+{
+    bt_uuid_t           uuid;
+    uint8_t             inst_id;
+} btgatt_char_id_t;
+
+/** GATT Service ID also identifies the service type (primary/secondary) */
+typedef struct
+{
+    btgatt_char_id_t    id;
+    uint8_t             is_primary;
+} btgatt_srvc_id_t;
+
+__END_DECLS
+
+#endif /* ANDROID_INCLUDE_BT_GATT_TYPES_H */
diff --git a/include/hardware/camera2.h b/include/hardware/camera2.h
index 5d45325..5b182a8 100644
--- a/include/hardware/camera2.h
+++ b/include/hardware/camera2.h
@@ -21,23 +21,33 @@
 #include "system/camera_metadata.h"
 
 /**
- * Camera device HAL 2.0 [ CAMERA_DEVICE_API_VERSION_2_0 ]
+ * Camera device HAL 2.1 [ CAMERA_DEVICE_API_VERSION_2_0, CAMERA_DEVICE_API_VERSION_2_1 ]
  *
  * EXPERIMENTAL.
  *
- * Supports both the android.hardware.ProCamera and
- * android.hardware.Camera APIs.
+ * Supports the android.hardware.Camera APIs.
  *
  * Camera devices that support this version of the HAL must return
- * CAMERA_DEVICE_API_VERSION_2_0 in camera_device_t.common.version and in
+ * CAMERA_DEVICE_API_VERSION_2_1 in camera_device_t.common.version and in
  * camera_info_t.device_version (from camera_module_t.get_camera_info).
  *
- * Camera modules that may contain version 2.0 devices must implement at least
+ * Camera modules that may contain version 2.x devices must implement at least
  * version 2.0 of the camera module interface (as defined by
  * camera_module_t.common.module_api_version).
  *
  * See camera_common.h for more versioning details.
  *
+ * Version history:
+ *
+ * 2.0: CAMERA_DEVICE_API_VERSION_2_0. Initial release (Android 4.2):
+ *      - Sufficient for implementing existing android.hardware.Camera API.
+ *      - Allows for ZSL queue in camera service layer
+ *      - Not tested for any new features such manual capture control,
+ *        Bayer RAW capture, reprocessing of RAW data.
+ *
+ * 2.1: CAMERA_DEVICE_API_VERSION_2_1. Support per-device static metadata:
+ *      - Add get_instance_metadata() method to retrieve metadata that is fixed
+ *        after device open, but may be variable between open() calls.
  */
 
 __BEGIN_DECLS
@@ -786,6 +796,26 @@
      */
     int (*dump)(const struct camera2_device *, int fd);
 
+    /**
+     * Get device-instance-specific metadata. This metadata must be constant for
+     * a single instance of the camera device, but may be different between
+     * open() calls. The returned camera_metadata pointer must be valid until
+     * the device close() method is called.
+     *
+     * Version information:
+     *
+     * CAMERA_DEVICE_API_VERSION_2_0:
+     *
+     *   Not available. Framework may not access this function pointer.
+     *
+     * CAMERA_DEVICE_API_VERSION_2_1:
+     *
+     *   Valid. Can be called by the framework.
+     *
+     */
+    int (*get_instance_metadata)(const struct camera2_device *,
+            camera_metadata **instance_metadata);
+
 } camera2_device_ops_t;
 
 /**********************************************************************
diff --git a/include/hardware/camera3.h b/include/hardware/camera3.h
new file mode 100644
index 0000000..be0154f
--- /dev/null
+++ b/include/hardware/camera3.h
@@ -0,0 +1,1343 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_INCLUDE_CAMERA3_H
+#define ANDROID_INCLUDE_CAMERA3_H
+
+#include <system/camera_metadata.h>
+#include "camera_common.h"
+
+/**
+ * Camera device HAL 3.0 [ CAMERA_DEVICE_API_VERSION_3_0 ]
+ *
+ * EXPERIMENTAL.
+ *
+ * Supports the android.hardware.Camera API.
+ *
+ * Camera devices that support this version of the HAL must return
+ * CAMERA_DEVICE_API_VERSION_3_0 in camera_device_t.common.version and in
+ * camera_info_t.device_version (from camera_module_t.get_camera_info).
+ *
+ * Camera modules that may contain version 3.0 devices must implement at least
+ * version 2.0 of the camera module interface (as defined by
+ * camera_module_t.common.module_api_version).
+ *
+ * See camera_common.h for more versioning details.
+ *
+ * Version history:
+ *
+ * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]:
+ *
+ *   - Converted from C++ CameraHardwareInterface abstraction layer.
+ *
+ *   - Supports android.hardware.Camera API.
+ *
+ * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:
+ *
+ *   - Sufficient for implementing existing android.hardware.Camera API.
+ *
+ *   - Allows for ZSL queue in camera service layer
+ *
+ *   - Not tested for any new features such manual capture control, Bayer RAW
+ *     capture, reprocessing of RAW data.
+ *
+ * 3.0: First revision of expanded-capability HAL:
+ *
+ *   - Major version change since the ABI is completely different. No change to
+ *     the required hardware capabilities or operational model from 2.0.
+ *
+ *   - Reworked input request and stream queue interfaces: Framework calls into
+ *     HAL with next request and stream buffers already dequeued. Sync framework
+ *     support is included, necessary for efficient implementations.
+ *
+ *   - Moved triggers into requests, most notifications into results.
+ *
+ *   - Consolidated all callbacks into framework into one structure, and all
+ *     setup methods into a single initialize() call.
+ *
+ *   - Made stream configuration into a single call to simplify stream
+ *     management. Bidirectional streams replace STREAM_FROM_STREAM construct.
+ *
+ *   - Limited mode semantics for older/limited hardware devices.
+ */
+
+/**
+ * Startup and general expected operation sequence:
+ *
+ * 1. Framework calls camera_module_t->common.open(), which returns a
+ *    hardware_device_t structure.
+ *
+ * 2. Framework inspects the hardware_device_t->version field, and instantiates
+ *    the appropriate handler for that version of the camera hardware device. In
+ *    case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to
+ *    a camera3_device_t.
+ *
+ * 3. Framework calls camera3_device_t->ops->initialize() with the framework
+ *    callback function pointers. This will only be called this one time after
+ *    open(), before any other functions in the ops structure are called.
+ *
+ * 4. The framework calls camera3_device_t->ops->configure_streams() with a list
+ *    of input/output streams to the HAL device.
+ *
+ * 5. The framework allocates gralloc buffers and calls
+ *    camera3_device_t->ops->register_stream_buffers() for at least one of the
+ *    output streams listed in configure_streams. The same stream is registered
+ *    only once.
+ *
+ * 5. The framework requests default settings for some number of use cases with
+ *    calls to camera3_device_t->ops->construct_default_request_settings(). This
+ *    may occur any time after step 3.
+ *
+ * 7. The framework constructs and sends the first capture request to the HAL,
+ *    with settings based on one of the sets of default settings, and with at
+ *    least one output stream, which has been registered earlier by the
+ *    framework. This is sent to the HAL with
+ *    camera3_device_t->ops->process_capture_request(). The HAL must block the
+ *    return of this call until it is ready for the next request to be sent.
+ *
+ * 8. The framework continues to submit requests, and possibly call
+ *    register_stream_buffers() for not-yet-registered streams, and call
+ *    construct_default_request_settings to get default settings buffers for
+ *    other use cases.
+ *
+ * 9. When the capture of a request begins (sensor starts exposing for the
+ *    capture), the HAL calls camera3_callback_ops_t->notify() with the SHUTTER
+ *    event, including the frame number and the timestamp for start of exposure.
+ *
+ * 10. After some pipeline delay, the HAL begins to return completed captures to
+ *    the framework with camera3_callback_ops_t->process_capture_result(). These
+ *    are returned in the same order as the requests were submitted. Multiple
+ *    requests can be in flight at once, depending on the pipeline depth of the
+ *    camera HAL device.
+ *
+ * 11. After some time, the framework may stop submitting new requests, wait for
+ *    the existing captures to complete (all buffers filled, all results
+ *    returned), and then call configure_streams() again. This resets the camera
+ *    hardware and pipeline for a new set of input/output streams. Some streams
+ *    may be reused from the previous configuration; if these streams' buffers
+ *    had already been registered with the HAL, they will not be registered
+ *    again. The framework then continues from step 7, if at least one
+ *    registered output stream remains (otherwise, step 5 is required first).
+ *
+ * 12. Alternatively, the framework may call camera3_device_t->common->close()
+ *    to end the camera session. This may be called at any time, although the
+ *    call may block until all in-flight captures have completed (all results
+ *    returned, all buffers filled). After the close call returns, no more calls
+ *    to the camera3_callback_ops_t functions are allowed from the HAL. The
+ *    framework may not call any methods on the HAL device interface again after
+ *    close() returns.
+ *
+ * 13. In case of an error or other asynchronous event, the HAL must call
+ *    camera3_callback_ops_t->notify() with the appropriate error/event
+ *    message. After returning from a fatal device-wide error notification, the
+ *    HAL should act as if close() had been called on it. However, the HAL must
+ *    either cancel or complete all outstanding captures before calling
+ *    notify(), so that once notify() is called with a fatal error, the
+ *    framework will not receive further callbacks from the device. Methods
+ *    besides close() should return -ENODEV or NULL after the notify() method
+ *    returns from a fatal error message.
+ */
+
+/**
+ * Operational modes:
+ *
+ * The camera 3 HAL device can implement one of two possible operational modes;
+ * limited and full. Full support is expected from new higher-end
+ * devices. Limited mode has hardware requirements roughly in line with those
+ * for a camera HAL device v1 implementation, and is expected from older or
+ * inexpensive devices. Full is a strict superset of limited, and they share the
+ * same essential operational flow, as documented above.
+ *
+ * The HAL must indicate its level of support with the
+ * android.info.supportedHardwareLevel static metadata entry, with 0 indicating
+ * limited mode, and 1 indicating full mode support.
+ *
+ * Roughly speaking, limited-mode devices do not allow for application control
+ * of capture settings (3A control only), high-rate capture of high-resolution
+ * images, raw sensor readout, and support for YUV output streams maximum
+ * recording resolution (JPEG only for large images).
+ *
+ * ** Details of limited mode behavior:
+ *
+ * - Limited-mode devices do not need to implement accurate synchronization
+ *   between capture request settings and the actual image data
+ *   captured. Instead, changes to settings may take effect some time in the
+ *   future, and possibly not for the same output frame for each settings
+ *   entry. Rapid changes in settings may result in some settings never being
+ *   used for a capture. However, captures that include high-resolution output
+ *   buffers ( > 1080p ) have to use the settings as specified (but see below
+ *   for processing rate).
+ *
+ * - Limited-mode devices do not need to support most of the
+ *   settings/result/static info metadata. Full-mode devices must support all
+ *   metadata fields listed in TODO. Specifically, only the following settings
+ *   are expected to be consumed or produced by a limited-mode HAL device:
+ *
+ *   android.control.aeAntibandingMode (controls)
+ *   android.control.aeExposureCompensation (controls)
+ *   android.control.aeLock (controls)
+ *   android.control.aeMode (controls)
+ *       [OFF means ON_FLASH_TORCH - TODO]
+ *   android.control.aeRegions (controls)
+ *   android.control.aeTargetFpsRange (controls)
+ *   android.control.afMode (controls)
+ *       [OFF means infinity focus]
+ *   android.control.afRegions (controls)
+ *   android.control.awbLock (controls)
+ *   android.control.awbMode (controls)
+ *       [OFF not supported]
+ *   android.control.awbRegions (controls)
+ *   android.control.captureIntent (controls)
+ *   android.control.effectMode (controls)
+ *   android.control.mode (controls)
+ *       [OFF not supported]
+ *   android.control.sceneMode (controls)
+ *   android.control.videoStabilizationMode (controls)
+ *   android.control.aeAvailableAntibandingModes (static)
+ *   android.control.aeAvailableModes (static)
+ *   android.control.aeAvailableTargetFpsRanges (static)
+ *   android.control.aeCompensationRange (static)
+ *   android.control.aeCompensationStep (static)
+ *   android.control.afAvailableModes (static)
+ *   android.control.availableEffects (static)
+ *   android.control.availableSceneModes (static)
+ *   android.control.availableVideoStabilizationModes (static)
+ *   android.control.awbAvailableModes (static)
+ *   android.control.maxRegions (static)
+ *   android.control.sceneModeOverrides (static)
+ *   android.control.aeRegions (dynamic)
+ *   android.control.aeState (dynamic)
+ *   android.control.afMode (dynamic)
+ *   android.control.afRegions (dynamic)
+ *   android.control.afState (dynamic)
+ *   android.control.awbMode (dynamic)
+ *   android.control.awbRegions (dynamic)
+ *   android.control.awbState (dynamic)
+ *   android.control.mode (dynamic)
+ *
+ *   android.flash.info.available (static)
+ *
+ *   android.info.supportedHardwareLevel (static)
+ *
+ *   android.jpeg.gpsCoordinates (controls)
+ *   android.jpeg.gpsProcessingMethod (controls)
+ *   android.jpeg.gpsTimestamp (controls)
+ *   android.jpeg.orientation (controls)
+ *   android.jpeg.quality (controls)
+ *   android.jpeg.thumbnailQuality (controls)
+ *   android.jpeg.thumbnailSize (controls)
+ *   android.jpeg.availableThumbnailSizes (static)
+ *   android.jpeg.maxSize (static)
+ *   android.jpeg.gpsCoordinates (dynamic)
+ *   android.jpeg.gpsProcessingMethod (dynamic)
+ *   android.jpeg.gpsTimestamp (dynamic)
+ *   android.jpeg.orientation (dynamic)
+ *   android.jpeg.quality (dynamic)
+ *   android.jpeg.size (dynamic)
+ *   android.jpeg.thumbnailQuality (dynamic)
+ *   android.jpeg.thumbnailSize (dynamic)
+ *
+ *   android.lens.info.minimumFocusDistance (static)
+ *
+ *   android.request.id (controls)
+ *   android.request.id (dynamic)
+ *
+ *   android.scaler.cropRegion (controls)
+ *       [ignores (x,y), assumes center-zoom]
+ *   android.scaler.availableFormats (static)
+ *       [RAW not supported]
+ *   android.scaler.availableJpegMinDurations (static)
+ *   android.scaler.availableJpegSizes (static)
+ *   android.scaler.availableMaxDigitalZoom (static)
+ *   android.scaler.availableProcessedMinDurations (static)
+ *   android.scaler.availableProcessedSizes (static)
+ *       [full resolution not supported]
+ *   android.scaler.maxDigitalZoom (static)
+ *   android.scaler.cropRegion (dynamic)
+ *
+ *   android.sensor.orientation (static)
+ *   android.sensor.timestamp (dynamic)
+ *
+ *   android.statistics.faceDetectMode (controls)
+ *   android.statistics.info.availableFaceDetectModes (static)
+ *   android.statistics.faceDetectMode (dynamic)
+ *   android.statistics.faceIds (dynamic)
+ *   android.statistics.faceLandmarks (dynamic)
+ *   android.statistics.faceRectangles (dynamic)
+ *   android.statistics.faceScores (dynamic)
+ *
+ * - Captures in limited mode that include high-resolution (> 1080p) output
+ *   buffers may block in process_capture_request() until all the output buffers
+ *   have been filled. A full-mode HAL device must process sequences of
+ *   high-resolution requests at the rate indicated in the static metadata for
+ *   that pixel format. The HAL must still call process_capture_result() to
+ *   provide the output; the framework must simply be prepared for
+ *   process_capture_request() to block until after process_capture_result() for
+ *   that request completes for high-resolution captures for limited-mode
+ *   devices.
+ *
+ */
+
+/**
+ * Error management:
+ *
+ * Camera HAL device ops functions that have a return value will all return
+ * -ENODEV / NULL in case of a serious error. This means the device cannot
+ * continue operation, and must be closed by the framework. Once this error is
+ * returned by some method, or if notify() is called with ERROR_DEVICE, only
+ * the close() method can be called successfully. All other methods will return
+ * -ENODEV / NULL.
+ *
+ * Transient errors in image capture must be reported through notify() as follows:
+ *
+ * - The failure of an entire capture to occur must be reported by the HAL by
+ *   calling notify() with ERROR_REQUEST. Individual errors for the result
+ *   metadata or the output buffers must not be reported in this case.
+ *
+ * - If the metadata for a capture cannot be produced, but some image buffers
+ *   were filled, the HAL must call notify() with ERROR_RESULT.
+ *
+ * - If an output image buffer could not be filled, but either the metadata was
+ *   produced or some other buffers were filled, the HAL must call notify() with
+ *   ERROR_BUFFER for each failed buffer.
+ *
+ * In each of these transient failure cases, the HAL must still call
+ * process_capture_result, with valid output buffer_handle_t. If the result
+ * metadata could not be produced, it should be NULL. If some buffers could not
+ * be filled, their sync fences must be set to the error state.
+ *
+ * Invalid input aguments result in -EINVAL from the appropriate methods. In
+ * that case, the framework should act as if that call had never been made.
+ *
+ */
+
+__BEGIN_DECLS
+
+struct camera3_device;
+
+/**********************************************************************
+ *
+ * Camera3 stream and stream buffer definitions.
+ *
+ * These structs and enums define the handles and contents of the input and
+ * output streams connecting the HAL to various framework and application buffer
+ * consumers. Each stream is backed by a gralloc buffer queue.
+ *
+ */
+
+/**
+ * camera3_stream_type_t:
+ *
+ * The type of the camera stream, which defines whether the camera HAL device is
+ * the producer or the consumer for that stream, and how the buffers of the
+ * stream relate to the other streams.
+ */
+typedef enum camera3_stream_type {
+    /**
+     * This stream is an output stream; the camera HAL device will be
+     * responsible for filling buffers from this stream with newly captured or
+     * reprocessed image data.
+     */
+    CAMERA3_STREAM_OUTPUT = 0,
+
+    /**
+     * This stream is an input stream; the camera HAL device will be responsible
+     * for reading buffers from this stream and sending them through the camera
+     * processing pipeline, as if the buffer was a newly captured image from the
+     * imager.
+     */
+    CAMERA3_STREAM_INPUT = 1,
+
+    /**
+     * This stream can be used for input and output. Typically, the stream is
+     * used as an output stream, but occasionally one already-filled buffer may
+     * be sent back to the HAL device for reprocessing.
+     *
+     * This kind of stream is meant generally for zero-shutter-lag features,
+     * where copying the captured image from the output buffer to the
+     * reprocessing input buffer would be expensive. The stream will be used by
+     * the framework as follows:
+     *
+     * 1. The framework includes a buffer from this stream as output buffer in a
+     *    request as normal.
+     *
+     * 2. Once the HAL device returns a filled output buffer to the framework,
+     *    the framework may do one of two things with the filled buffer:
+     *
+     * 2. a. The framework uses the filled data, and returns the now-used buffer
+     *       to the stream queue for reuse. This behavior exactly matches the
+     *       OUTPUT type of stream.
+     *
+     * 2. b. The framework wants to reprocess the filled data, and uses the
+     *       buffer as an input buffer for a request. Once the HAL device has
+     *       used the reprocessing buffer, it then returns it to the
+     *       framework. The framework then returns the now-used buffer to the
+     *       stream queue for reuse.
+     *
+     * 3. The HAL device will be given the buffer again as an output buffer for
+     *    a request at some future point.
+     *
+     * Note that the HAL will always be reprocessing data it produced.
+     *
+     */
+    CAMERA3_STREAM_BIDIRECTIONAL = 2,
+
+    /**
+     * Total number of framework-defined stream types
+     */
+    CAMERA3_NUM_STREAM_TYPES
+
+} camera3_stream_type_t;
+
+/**
+ * camera3_stream_t:
+ *
+ * A handle to a single camera input or output stream. A stream is defined by
+ * the framework by its buffer resolution and format, and additionally by the
+ * HAL with the gralloc usage flags and the maximum in-flight buffer count.
+ *
+ * The stream structures are owned by the framework, but pointers to a
+ * camera3_stream passed into the HAL by configure_streams() are valid until the
+ * end of the first subsequent configure_streams() call that _does not_ include
+ * that camera3_stream as an argument, or until the end of the close() call.
+ *
+ * All camera3_stream framework-controlled members are immutable once the
+ * camera3_stream is passed into configure_streams().  The HAL may only change
+ * the HAL-controlled parameters during a configure_streams() call, except for
+ * the contents of the private pointer.
+ *
+ * If a configure_streams() call returns a non-fatal error, all active streams
+ * remain valid as if configure_streams() had not been called.
+ *
+ * The endpoint of the stream is not visible to the camera HAL device.
+ */
+typedef struct camera3_stream {
+
+    /*****
+     * Set by framework before configure_streams()
+     */
+
+    /**
+     * The type of the stream, one of the camera3_stream_type_t values.
+     */
+    int stream_type;
+
+    /**
+     * The width in pixels of the buffers in this stream
+     */
+    uint32_t width;
+
+    /**
+     * The height in pixels of the buffers in this stream
+     */
+    uint32_t height;
+
+    /**
+     * The pixel format for the buffers in this stream. Format is a value from
+     * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or
+     * from device-specific headers.
+     *
+     * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
+     * gralloc module will select a format based on the usage flags provided by
+     * the camera device and the other endpoint of the stream.
+     *
+     * The camera HAL device must inspect the buffers handed to it in the
+     * subsequent register_stream_buffers() call to obtain the
+     * implementation-specific format details, if necessary.
+     */
+    int format;
+
+    /*****
+     * Set by HAL during configure_streams().
+     */
+
+    /**
+     * The gralloc usage flags for this stream, as needed by the HAL. The usage
+     * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific
+     * headers.
+     *
+     * For output streams, these are the HAL's producer usage flags. For input
+     * streams, these are the HAL's consumer usage flags. The usage flags from
+     * the producer and the consumer will be combined together and then passed
+     * to the platform gralloc HAL module for allocating the gralloc buffers for
+     * each stream.
+     */
+    uint32_t usage;
+
+    /**
+     * The maximum number of buffers the HAL device may need to have dequeued at
+     * the same time. The HAL device may not have more buffers in-flight from
+     * this stream than this value.
+     */
+    uint32_t max_buffers;
+
+    /**
+     * A handle to HAL-private information for the stream. Will not be inspected
+     * by the framework code.
+     */
+    void *priv;
+
+} camera3_stream_t;
+
+/**
+ * camera3_stream_configuration_t:
+ *
+ * A structure of stream definitions, used by configure_streams(). This
+ * structure defines all the output streams and the reprocessing input
+ * stream for the current camera use case.
+ */
+typedef struct camera3_stream_configuration {
+    /**
+     * The total number of streams requested by the framework.  This includes
+     * both input and output streams. The number of streams will be at least 1,
+     * and there will be at least one output-capable stream.
+     */
+    uint32_t num_streams;
+
+    /**
+     * An array of camera stream pointers, defining the input/output
+     * configuration for the camera HAL device.
+     *
+     * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL)
+     * in a single configuration.
+     *
+     * At least one output-capable stream must be defined (OUTPUT or
+     * BIDIRECTIONAL).
+     */
+    camera3_stream_t **streams;
+
+} camera3_stream_configuration_t;
+
+/**
+ * camera3_stream_buffer_t:
+ *
+ * A single buffer from a camera3 stream. It includes a handle to its parent
+ * stream, the handle to the gralloc buffer itself, and sync fences
+ *
+ * The buffer does not specify whether it is to be used for input or output;
+ * that is determined by its parent stream type and how the buffer is passed to
+ * the HAL device.
+ */
+typedef struct camera3_stream_buffer {
+    /**
+     * The handle of the stream this buffer is associated with
+     */
+    camera3_stream_t *stream;
+
+    /**
+     * The native handle to the buffer
+     */
+    buffer_handle_t *buffer;
+
+    /**
+     * The acquire sync fence for this buffer. The HAL must wait on this fence
+     * fd before attempting to read from or write to this buffer.
+     *
+     * The framework may be set to -1 to indicate that no waiting is necessary
+     * for this buffer. This field should not be changed by the HAL.
+     */
+     int acquire_fence;
+
+    /**
+     * The release sync fence for this buffer. The HAL must set this fence when
+     * returning buffers to the framework, or write -1 to indicate that no
+     * waiting is required for this buffer.
+     *
+     * For the input buffer, the release fence must be set by the
+     * process_capture_request() call. For the output buffers, the fences must
+     * be set in the output_buffers array passed to process_capture_result().
+     */
+    int release_fence;
+
+} camera3_stream_buffer_t;
+
+/**
+ * camera3_stream_buffer_set_t:
+ *
+ * The complete set of gralloc buffers for a stream. This structure is given to
+ * register_stream_buffers() to allow the camera HAL device to register/map/etc
+ * newly allocated stream buffers.
+ */
+typedef struct camera3_stream_buffer_set {
+    /**
+     * The stream handle for the stream these buffers belong to
+     */
+    camera3_stream_t *stream;
+
+    /**
+     * The number of buffers in this stream. It is guaranteed to be at least
+     * stream->max_buffers.
+     */
+    uint32_t num_buffers;
+
+    /**
+     * The array of gralloc buffer handles for this stream. If the stream format
+     * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device
+     * should inspect the passed-in buffers to determine any platform-private
+     * pixel format information.
+     */
+    buffer_handle_t **buffers;
+
+} camera3_stream_buffer_set_t;
+
+/**
+ * camera3_jpeg_blob:
+ *
+ * Transport header for compressed JPEG buffers in output streams.
+ *
+ * To capture JPEG images, a stream is created using the pixel format
+ * HAL_PIXEL_FORMAT_BLOB, and the static metadata field android.jpeg.maxSize is
+ * used as the buffer size. Since compressed JPEG images are of variable size,
+ * the HAL needs to include the final size of the compressed image using this
+ * structure inside the output stream buffer. The JPEG blob ID field must be set
+ * to CAMERA3_JPEG_BLOB_ID.
+ *
+ * Transport header should be at the end of the JPEG output stream buffer.  That
+ * means the jpeg_blob_id must start at byte[android.jpeg.maxSize -
+ * sizeof(camera3_jpeg_blob)].  Any HAL using this transport header must
+ * account for it in android.jpeg.maxSize.  The JPEG data itself starts at
+ * the beginning of the buffer and should be jpeg_size bytes long.
+ */
+typedef struct camera3_jpeg_blob {
+    uint16_t jpeg_blob_id;
+    uint32_t jpeg_size;
+} camera3_jpeg_blob_t;
+
+enum {
+    CAMERA3_JPEG_BLOB_ID = 0x00FF
+};
+
+/**********************************************************************
+ *
+ * Message definitions for the HAL notify() callback.
+ *
+ * These definitions are used for the HAL notify callback, to signal
+ * asynchronous events from the HAL device to the Android framework.
+ *
+ */
+
+/**
+ * camera3_msg_type:
+ *
+ * Indicates the type of message sent, which specifies which member of the
+ * message union is valid.
+ *
+ */
+typedef enum camera3_msg_type {
+    /**
+     * An error has occurred. camera3_notify_msg.message.error contains the
+     * error information.
+     */
+    CAMERA3_MSG_ERROR = 1,
+
+    /**
+     * The exposure of a given request has
+     * begun. camera3_notify_msg.message.shutter contains the information
+     * the capture.
+     */
+    CAMERA3_MSG_SHUTTER = 2,
+
+    /**
+     * Number of framework message types
+     */
+    CAMERA3_NUM_MESSAGES
+
+} camera3_msg_type_t;
+
+/**
+ * Defined error codes for CAMERA_MSG_ERROR
+ */
+typedef enum camera3_error_msg_code {
+    /**
+     * A serious failure occured. No further frames or buffer streams will
+     * be produced by the device. Device should be treated as closed. The
+     * client must reopen the device to use it again. The frame_number field
+     * is unused.
+     */
+    CAMERA3_MSG_ERROR_DEVICE = 1,
+
+    /**
+     * An error has occurred in processing a request. No output (metadata or
+     * buffers) will be produced for this request. The frame_number field
+     * specifies which request has been dropped. Subsequent requests are
+     * unaffected, and the device remains operational.
+     */
+    CAMERA3_MSG_ERROR_REQUEST = 2,
+
+    /**
+     * An error has occurred in producing an output result metadata buffer
+     * for a request, but output stream buffers for it will still be
+     * available. Subsequent requests are unaffected, and the device remains
+     * operational.  The frame_number field specifies the request for which
+     * result metadata won't be available.
+     */
+    CAMERA3_MSG_ERROR_RESULT = 3,
+
+    /**
+     * An error has occurred in placing an output buffer into a stream for a
+     * request. The frame metadata and other buffers may still be
+     * available. Subsequent requests are unaffected, and the device remains
+     * operational. The frame_number field specifies the request for which the
+     * buffer was dropped, and error_stream contains a pointer to the stream
+     * that dropped the frame.u
+     */
+    CAMERA3_MSG_ERROR_BUFFER = 4,
+
+    /**
+     * Number of error types
+     */
+    CAMERA3_MSG_NUM_ERRORS
+
+} camera3_error_msg_code_t;
+
+/**
+ * camera3_error_msg_t:
+ *
+ * Message contents for CAMERA3_MSG_ERROR
+ */
+typedef struct camera3_error_msg {
+    /**
+     * Frame number of the request the error applies to. 0 if the frame number
+     * isn't applicable to the error.
+     */
+    uint32_t frame_number;
+
+    /**
+     * Pointer to the stream that had a failure. NULL if the stream isn't
+     * applicable to the error.
+     */
+    camera3_stream_t *error_stream;
+
+    /**
+     * The code for this error; one of the CAMERA_MSG_ERROR enum values.
+     */
+    int error_code;
+
+} camera3_error_msg_t;
+
+/**
+ * camera3_shutter_msg_t:
+ *
+ * Message contents for CAMERA3_MSG_SHUTTER
+ */
+typedef struct camera3_shutter_msg {
+    /**
+     * Frame number of the request that has begun exposure
+     */
+    uint32_t frame_number;
+
+    /**
+     * Timestamp for the start of capture. This must match the capture result
+     * metadata's sensor exposure start timestamp.
+     */
+    uint64_t timestamp;
+
+} camera3_shutter_msg_t;
+
+/**
+ * camera3_notify_msg_t:
+ *
+ * The message structure sent to camera3_callback_ops_t.notify()
+ */
+typedef struct camera3_notify_msg {
+
+    /**
+     * The message type. One of camera3_notify_msg_type, or a private extension.
+     */
+    int type;
+
+    union {
+        /**
+         * Error message contents. Valid if type is CAMERA3_MSG_ERROR
+         */
+        camera3_error_msg_t error;
+
+        /**
+         * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER
+         */
+        camera3_shutter_msg_t shutter;
+
+        /**
+         * Generic message contents. Used to ensure a minimum size for custom
+         * message types.
+         */
+        uint8_t generic[32];
+    } message;
+
+} camera3_notify_msg_t;
+
+/**********************************************************************
+ *
+ * Capture request/result definitions for the HAL process_capture_request()
+ * method, and the process_capture_result() callback.
+ *
+ */
+
+/**
+ * camera3_request_template_t:
+ *
+ * Available template types for
+ * camera3_device_ops.construct_default_request_settings()
+ */
+typedef enum camera3_request_template {
+    /**
+     * Standard camera preview operation with 3A on auto.
+     */
+    CAMERA3_TEMPLATE_PREVIEW = 1,
+
+    /**
+     * Standard camera high-quality still capture with 3A and flash on auto.
+     */
+    CAMERA3_TEMPLATE_STILL_CAPTURE = 2,
+
+    /**
+     * Standard video recording plus preview with 3A on auto, torch off.
+     */
+    CAMERA3_TEMPLATE_VIDEO_RECORD = 3,
+
+    /**
+     * High-quality still capture while recording video. Application will
+     * include preview, video record, and full-resolution YUV or JPEG streams in
+     * request. Must not cause stuttering on video stream. 3A on auto.
+     */
+    CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4,
+
+    /**
+     * Zero-shutter-lag mode. Application will request preview and
+     * full-resolution data for each frame, and reprocess it to JPEG when a
+     * still image is requested by user. Settings should provide highest-quality
+     * full-resolution images without compromising preview frame rate. 3A on
+     * auto.
+     */
+    CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5,
+
+    /* Total number of templates */
+    CAMERA3_TEMPLATE_COUNT,
+
+    /**
+     * First value for vendor-defined request templates
+     */
+    CAMERA3_VENDOR_TEMPLATE_START = 0x40000000
+
+} camera3_request_template_t;
+
+/**
+ * camera3_capture_request_t:
+ *
+ * A single request for image capture/buffer reprocessing, sent to the Camera
+ * HAL device by the framework in process_capture_request().
+ *
+ * The request contains the settings to be used for this capture, and the set of
+ * output buffers to write the resulting image data in. It may optionally
+ * contain an input buffer, in which case the request is for reprocessing that
+ * input buffer instead of capturing a new image with the camera sensor. The
+ * capture is identified by the frame_number.
+ *
+ * In response, the camera HAL device must send a camera3_capture_result
+ * structure asynchronously to the framework, using the process_capture_result()
+ * callback.
+ */
+typedef struct camera3_capture_request {
+    /**
+     * The frame number is an incrementing integer set by the framework to
+     * uniquely identify this capture. It needs to be returned in the result
+     * call, and is also used to identify the request in asynchronous
+     * notifications sent to camera3_callback_ops_t.notify().
+     */
+    uint32_t frame_number;
+
+    /**
+     * The settings buffer contains the capture and processing parameters for
+     * the request. As a special case, a NULL settings buffer indicates that the
+     * settings are identical to the most-recently submitted capture request. A
+     * NULL buffer cannot be used as the first submitted request after a
+     * configure_streams() call.
+     */
+    const camera_metadata_t *settings;
+
+    /**
+     * The input stream buffer to use for this request, if any.
+     *
+     * If input_buffer is NULL, then the request is for a new capture from the
+     * imager. If input_buffer is valid, the request is for reprocessing the
+     * image contained in input_buffer.
+     *
+     * In the latter case, the HAL must set the release_fence of the
+     * input_buffer to a valid sync fence, or to -1 if the HAL does not support
+     * sync, before process_capture_request() returns.
+     *
+     * The HAL is required to wait on the acquire sync fence of the input buffer
+     * before accessing it.
+     *
+     * Any input buffer included here will have been registered with the HAL
+     * through register_stream_buffers() before its inclusion in a request.
+     */
+    camera3_stream_buffer_t *input_buffer;
+
+    /**
+     * The number of output buffers for this capture request. Must be at least
+     * 1.
+     */
+    uint32_t num_output_buffers;
+
+    /**
+     * An array of num_output_buffers stream buffers, to be filled with image
+     * data from this capture/reprocess. The HAL must wait on the acquire fences
+     * of each stream buffer before writing to them. All the buffers included
+     * here will have been registered with the HAL through
+     * register_stream_buffers() before their inclusion in a request.
+     *
+     * The HAL takes ownership of the actual buffer_handle_t entries in
+     * output_buffers; the framework does not access them until they are
+     * returned in a camera3_capture_result_t.
+     */
+    const camera3_stream_buffer_t *output_buffers;
+
+} camera3_capture_request_t;
+
+/**
+ * camera3_capture_result_t:
+ *
+ * The result of a single capture/reprocess by the camera HAL device. This is
+ * sent to the framework asynchronously with process_capture_result(), in
+ * response to a single capture request sent to the HAL with
+ * process_capture_request().
+ *
+ * The result structure contains the output metadata from this capture, and the
+ * set of output buffers that have been/will be filled for this capture. Each
+ * output buffer may come with a release sync fence that the framework will wait
+ * on before reading, in case the buffer has not yet been filled by the HAL.
+ *
+ */
+typedef struct camera3_capture_result {
+    /**
+     * The frame number is an incrementing integer set by the framework in the
+     * submitted request to uniquely identify this capture. It is also used to
+     * identify the request in asynchronous notifications sent to
+     * camera3_callback_ops_t.notify().
+    */
+    uint32_t frame_number;
+
+    /**
+     * The result metadata for this capture. This contains information about the
+     * final capture parameters, the state of the capture and post-processing
+     * hardware, the state of the 3A algorithms, if enabled, and the output of
+     * any enabled statistics units.
+     */
+    const camera_metadata_t *result;
+
+    /**
+     * The number of output buffers used for this capture. Must equal the
+     * matching capture request's count.
+     */
+    uint32_t num_output_buffers;
+
+    /**
+     * The handles for the output stream buffers for this capture. They may not
+     * yet be filled at the time the HAL calls process_capture_result(); the
+     * framework will wait on the release sync fences provided by the HAL before
+     * reading the buffers.
+     *
+     * The HAL must set the stream buffer's release sync fence to a valid sync
+     * fd, or to -1 if the buffer has already been filled.
+     */
+     const camera3_stream_buffer_t *output_buffers;
+
+} camera3_capture_result_t;
+
+/**********************************************************************
+ *
+ * Callback methods for the HAL to call into the framework.
+ *
+ * These methods are used to return metadata and image buffers for a completed
+ * or failed captures, and to notify the framework of asynchronous events such
+ * as errors.
+ *
+ * The framework will not call back into the HAL from within these callbacks,
+ * and these calls will not block for extended periods.
+ *
+ */
+typedef struct camera3_callback_ops {
+
+    /**
+     * process_capture_result:
+     *
+     * Send a completed capture result metadata buffer to the framework, along
+     * with the possibly completed output stream buffers.
+     *
+     * Captures must be processed in-order, so that the Nth request submitted
+     * will match with the Nth result returned. Only one call to
+     * process_capture_request() should be made at a time to ensure correct
+     * ordering.
+     *
+     * The HAL retains ownership of result structure, which only needs to be
+     * valid to access during this call. The framework will copy whatever it
+     * needs before this call returns.
+     *
+     * The output buffers do not need to be filled yet; the framework will wait
+     * on the stream buffer release sync fence before reading the buffer
+     * data. Therefore, this method must be called by the HAL as soon as the
+     * result metadata is available, even if some or all of the output buffers
+     * are still in processing. The HAL must include valid release sync fences
+     * into each output_buffers stream buffer entry, or -1 if it does not
+     * support streams or if that stream buffer is already filled.
+     *
+     * If the result buffer cannot be constructed for a request, the HAL should
+     * return a NULL buffer here, but still provide the output buffers and their
+     * sync fences. In addition, notify() must be called with an ERROR_RESULT
+     * message.
+     *
+     * If an output buffer cannot be filled, a sync error should be produced by
+     * the HAL for that buffer; this method should still be called with a valid
+     * (possibly in an error state) sync fence, if the HAL is using them. In
+     * addition, notify() must be called with a ERROR_BUFFER message.
+     *
+     * If the entire capture has failed, then this method still needs to be
+     * called to return the output buffers to the framework. All the sync
+     * fences, if used, should be in the error state, and the result metadata
+     * should be NULL. In addition, notify() must be called with a ERROR_REQUEST
+     * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages
+     * should not be sent.
+     *
+     */
+    void (*process_capture_result)(const struct camera3_callback_ops *,
+            const camera3_capture_result_t *result);
+
+    /**
+     * notify:
+     *
+     * Asynchronous notification callback from the HAL, fired for various
+     * reasons. Only for information independent of frame capture, or that
+     * require specific timing. The ownership of the message structure remains
+     * with the HAL, and the msg only needs to be valid for the duration of this
+     * call.
+     *
+     * Multiple threads may call notify() simultaneously.
+     */
+    void (*notify)(const struct camera3_callback_ops *,
+            const camera3_notify_msg_t *msg);
+
+} camera3_callback_ops_t;
+
+/**********************************************************************
+ *
+ * Camera device operations
+ *
+ */
+typedef struct camera3_device_ops {
+
+    /**
+     * initialize:
+     *
+     * One-time initialization to pass framework callback function pointers to
+     * the HAL. Will be called once after a successful open() call, before any
+     * other functions are called on the camera3_device_ops structure.
+     *
+     * Return values:
+     *
+     *  0:     On successful initialization
+     *
+     * -ENODEV: If initialization fails. Only close() can be called successfully
+     *          by the framework after this.
+     */
+    int (*initialize)(const struct camera3_device *,
+            const camera3_callback_ops_t *callback_ops);
+
+    /**********************************************************************
+     * Stream management
+     */
+
+    /**
+     * configure_streams:
+     *
+     * Reset the HAL camera device processing pipeline and set up new input and
+     * output streams. This call replaces any existing stream configuration with
+     * the streams defined in the stream_list. This method will be called at
+     * least once after initialize() before a request is submitted with
+     * process_capture_request().
+     *
+     * The stream_list must contain at least one output-capable stream, and may
+     * not contain more than one input-capable stream.
+     *
+     * The stream_list may contain streams that are also in the currently-active
+     * set of streams (from the previous call to configure_stream()). These
+     * streams will already have valid values for usage, max_buffers, and the
+     * private pointer. If such a stream has already had its buffers registered,
+     * register_stream_buffers() will not be called again for the stream, and
+     * buffers from the stream can be immediately included in input requests.
+     *
+     * If the HAL needs to change the stream configuration for an existing
+     * stream due to the new configuration, it may rewrite the values of usage
+     * and/or max_buffers during the configure call. The framework will detect
+     * such a change, and will then reallocate the stream buffers, and call
+     * register_stream_buffers() again before using buffers from that stream in
+     * a request.
+     *
+     * If a currently-active stream is not included in stream_list, the HAL may
+     * safely remove any references to that stream. It will not be reused in a
+     * later configure() call by the framework, and all the gralloc buffers for
+     * it will be freed after the configure_streams() call returns.
+     *
+     * The stream_list structure is owned by the framework, and may not be
+     * accessed once this call completes. The address of an individual
+     * camera3_stream_t structure will remain valid for access by the HAL until
+     * the end of the first configure_stream() call which no longer includes
+     * that camera3_stream_t in the stream_list argument. The HAL may not change
+     * values in the stream structure outside of the private pointer, except for
+     * the usage and max_buffers members during the configure_streams() call
+     * itself.
+     *
+     * If the stream is new, the usage, max_buffer, and private pointer fields
+     * of the stream structure will all be set to 0. The HAL device must set
+     * these fields before the configure_streams() call returns. These fields
+     * are then used by the framework and the platform gralloc module to
+     * allocate the gralloc buffers for each stream.
+     *
+     * Before such a new stream can have its buffers included in a capture
+     * request, the framework will call register_stream_buffers() with that
+     * stream. However, the framework is not required to register buffers for
+     * _all_ streams before submitting a request. This allows for quick startup
+     * of (for example) a preview stream, with allocation for other streams
+     * happening later or concurrently.
+     *
+     * Preconditions:
+     *
+     * The framework will only call this method when no captures are being
+     * processed. That is, all results have been returned to the framework, and
+     * all in-flight input and output buffers have been returned and their
+     * release sync fences have been signaled by the HAL. The framework will not
+     * submit new requests for capture while the configure_streams() call is
+     * underway.
+     *
+     * Postconditions:
+     *
+     * The HAL device must configure itself to provide maximum possible output
+     * frame rate given the sizes and formats of the output streams, as
+     * documented in the camera device's static metadata.
+     *
+     * Performance expectations:
+     *
+     * This call is expected to be heavyweight and possibly take several hundred
+     * milliseconds to complete, since it may require resetting and
+     * reconfiguring the image sensor and the camera processing pipeline.
+     * Nevertheless, the HAL device should attempt to minimize the
+     * reconfiguration delay to minimize the user-visible pauses during
+     * application operational mode changes (such as switching from still
+     * capture to video recording).
+     *
+     * Return values:
+     *
+     *  0:      On successful stream configuration
+     *
+     * -EINVAL: If the requested stream configuration is invalid. Some examples
+     *          of invalid stream configurations include:
+     *
+     *          - Including more than 1 input-capable stream (INPUT or
+     *            BIDIRECTIONAL)
+     *
+     *          - Not including any output-capable streams (OUTPUT or
+     *            BIDIRECTIONAL)
+     *
+     *          - Including streams with unsupported formats, or an unsupported
+     *            size for that format.
+     *
+     *          - Including too many output streams of a certain format.
+     *
+     *          Note that the framework submitting an invalid stream
+     *          configuration is not normal operation, since stream
+     *          configurations are checked before configure. An invalid
+     *          configuration means that a bug exists in the framework code, or
+     *          there is a mismatch between the HAL's static metadata and the
+     *          requirements on streams.
+     *
+     * -ENODEV: If there has been a fatal error and the device is no longer
+     *          operational. Only close() can be called successfully by the
+     *          framework after this error is returned.
+     */
+    int (*configure_streams)(const struct camera3_device *,
+            camera3_stream_configuration_t *stream_list);
+
+    /**
+     * register_stream_buffers:
+     *
+     * Register buffers for a given stream with the HAL device. This method is
+     * called by the framework after a new stream is defined by
+     * configure_streams, and before buffers from that stream are included in a
+     * capture request. If the same stream is listed in a subsequent
+     * configure_streams() call, register_stream_buffers will _not_ be called
+     * again for that stream.
+     *
+     * The framework does not need to register buffers for all configured
+     * streams before it submits the first capture request. This allows quick
+     * startup for preview (or similar use cases) while other streams are still
+     * being allocated.
+     *
+     * This method is intended to allow the HAL device to map or otherwise
+     * prepare the buffers for later use. The buffers passed in will already be
+     * locked for use. At the end of the call, all the buffers must be ready to
+     * be returned to the stream.  The buffer_set argument is only valid for the
+     * duration of this call.
+     *
+     * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+     * the camera HAL should inspect the passed-in buffers here to determine any
+     * platform-private pixel format information.
+     *
+     * Return values:
+     *
+     *  0:      On successful registration of the new stream buffers
+     *
+     * -EINVAL: If the stream_buffer_set does not refer to a valid active
+     *          stream, or if the buffers array is invalid.
+     *
+     * -ENOMEM: If there was a failure in registering the buffers. The framework
+     *          must consider all the stream buffers to be unregistered, and can
+     *          try to register again later.
+     *
+     * -ENODEV: If there is a fatal error, and the device is no longer
+     *          operational. Only close() can be called successfully by the
+     *          framework after this error is returned.
+     */
+    int (*register_stream_buffers)(const struct camera3_device *,
+            const camera3_stream_buffer_set_t *buffer_set);
+
+    /**********************************************************************
+     * Request creation and submission
+     */
+
+    /**
+     * construct_default_request_settings:
+     *
+     * Create capture settings for standard camera use cases.
+     *
+     * The device must return a settings buffer that is configured to meet the
+     * requested use case, which must be one of the CAMERA3_TEMPLATE_*
+     * enums. All request control fields must be included.
+     *
+     * The HAL retains ownership of this structure, but the pointer to the
+     * structure must be valid until the device is closed. The framework and the
+     * HAL may not modify the buffer once it is returned by this call. The same
+     * buffer may be returned for subsequent calls for the same template, or for
+     * other templates.
+     *
+     * Return values:
+     *
+     *   Valid metadata: On successful creation of a default settings
+     *                   buffer.
+     *
+     *   NULL:           In case of a fatal error. After this is returned, only
+     *                   the close() method can be called succesfully by the
+     *                   framework.
+     */
+    const camera_metadata_t* (*construct_default_request_settings)(
+            const struct camera3_device *,
+            int type);
+
+    /**
+     * process_capture_request:
+     *
+     * Send a new capture request to the HAL. The HAL should not return from
+     * this call until it is ready to accept the next request to process. Only
+     * one call to process_capture_request() will be made at a time by the
+     * framework, and the calls will all be from the same thread. The next call
+     * to process_capture_request() will be made as soon as a new request and
+     * its associated buffers are available. In a normal preview scenario, this
+     * means the function will be called again by the framework almost
+     * instantly.
+     *
+     * The actual request processing is asynchronous, with the results of
+     * capture being returned by the HAL through the process_capture_result()
+     * call. This call requires the result metadata to be available, but output
+     * buffers may simply provide sync fences to wait on. Multiple requests are
+     * expected to be in flight at once, to maintain full output frame rate.
+     *
+     * The framework retains ownership of the request structure. It is only
+     * guaranteed to be valid during this call. The HAL device must make copies
+     * of the information it needs to retain for the capture processing.
+     *
+     * The HAL must write the file descriptor for the input buffer's release
+     * sync fence into input_buffer->release_fence, if input_buffer is not
+     * NULL. If the HAL returns -1 for the input buffer release sync fence, the
+     * framework is free to immediately reuse the input buffer. Otherwise, the
+     * framework will wait on the sync fence before refilling and reusing the
+     * input buffer.
+     *
+     * Return values:
+     *
+     *  0:      On a successful start to processing the capture request
+     *
+     * -EINVAL: If the input is malformed (the settings are NULL when not
+     *          allowed, there are 0 output buffers, etc) and capture processing
+     *          cannot start. Failures during request processing should be
+     *          handled by calling camera3_callback_ops_t.notify().
+     *
+     * -ENODEV: If the camera device has encountered a serious error. After this
+     *          error is returned, only the close() method can be successfully
+     *          called by the framework.
+     *
+     */
+    int (*process_capture_request)(const struct camera3_device *,
+            camera3_capture_request_t *request);
+
+    /**********************************************************************
+     * Miscellaneous methods
+     */
+
+    /**
+     * get_metadata_vendor_tag_ops:
+     *
+     * Get methods to query for vendor extension metadata tag infomation. The
+     * HAL should fill in all the vendor tag operation methods, or leave ops
+     * unchanged if no vendor tags are defined.
+     *
+     * The definition of vendor_tag_query_ops_t can be found in
+     * system/media/camera/include/system/camera_metadata.h.
+     *
+     */
+    void (*get_metadata_vendor_tag_ops)(const struct camera3_device*,
+            vendor_tag_query_ops_t* ops);
+
+    /**
+     * dump:
+     *
+     * Print out debugging state for the camera device. This will be called by
+     * the framework when the camera service is asked for a debug dump, which
+     * happens when using the dumpsys tool, or when capturing a bugreport.
+     *
+     * The passed-in file descriptor can be used to write debugging text using
+     * dprintf() or write(). The text should be in ASCII encoding only.
+     */
+    void (*dump)(const struct camera3_device *, int fd);
+
+} camera3_device_ops_t;
+
+/**********************************************************************
+ *
+ * Camera device definition
+ *
+ */
+typedef struct camera3_device {
+    /**
+     * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this
+     * device as implementing version 3.0 of the camera device HAL.
+     */
+    hw_device_t common;
+    camera3_device_ops_t *ops;
+    void *priv;
+} camera3_device_t;
+
+__END_DECLS
+
+#endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */
diff --git a/include/hardware/camera_common.h b/include/hardware/camera_common.h
index 5697bda..22b3f7a 100644
--- a/include/hardware/camera_common.h
+++ b/include/hardware/camera_common.h
@@ -59,6 +59,14 @@
  *   HAL interface. The device_version field of camera_info is always valid; the
  *   static_camera_characteristics field of camera_info is valid if the
  *   device_version field is 2.0 or higher.
+ *
+ *******************************************************************************
+ * Version: 2.1 [CAMERA_MODULE_API_VERSION_2_1]
+ *
+ *   This camera module version adds support for asynchronous callbacks to the
+ *   framework from the camera HAL module, which is used to notify the framework
+ *   about changes to the camera module state. Modules that provide a valid
+ *   set_callbacks() method must report at least this version number.
  */
 
 /**
@@ -71,8 +79,9 @@
  */
 #define CAMERA_MODULE_API_VERSION_1_0 HARDWARE_MODULE_API_VERSION(1, 0)
 #define CAMERA_MODULE_API_VERSION_2_0 HARDWARE_MODULE_API_VERSION(2, 0)
+#define CAMERA_MODULE_API_VERSION_2_1 HARDWARE_MODULE_API_VERSION(2, 1)
 
-#define CAMERA_MODULE_API_VERSION_CURRENT CAMERA_MODULE_API_VERSION_2_0
+#define CAMERA_MODULE_API_VERSION_CURRENT CAMERA_MODULE_API_VERSION_2_1
 
 /**
  * All device versions <= HARDWARE_DEVICE_API_VERSION(1, 0xFF) must be treated
@@ -80,8 +89,10 @@
  */
 #define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
 #define CAMERA_DEVICE_API_VERSION_2_0 HARDWARE_DEVICE_API_VERSION(2, 0)
+#define CAMERA_DEVICE_API_VERSION_2_1 HARDWARE_DEVICE_API_VERSION(2, 1)
+#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
 
-// Device version 2.0 is experimental
+// Device version 2.x is outdated; device version 3.0 is experimental
 #define CAMERA_DEVICE_API_VERSION_CURRENT CAMERA_DEVICE_API_VERSION_1_0
 
 /**
@@ -89,7 +100,7 @@
  */
 typedef struct camera_metadata camera_metadata_t;
 
-struct camera_info {
+typedef struct camera_info {
     /**
      * The direction that the camera faces to. It should be CAMERA_FACING_BACK
      * or CAMERA_FACING_FRONT.
@@ -105,11 +116,11 @@
      * display in its natural orientation. It should be 0, 90, 180, or 270.
      *
      * For example, suppose a device has a naturally tall screen. The
-     * back-facing camera sensor is mounted in landscape. You are looking at
-     * the screen. If the top side of the camera sensor is aligned with the
-     * right edge of the screen in natural orientation, the value should be
-     * 90. If the top side of a front-facing camera sensor is aligned with the
-     * right of the screen, the value should be 270.
+     * back-facing camera sensor is mounted in landscape. You are looking at the
+     * screen. If the top side of the camera sensor is aligned with the right
+     * edge of the screen in natural orientation, the value should be 90. If the
+     * top side of a front-facing camera sensor is aligned with the right of the
+     * screen, the value should be 270.
      *
      * Version information:
      *   Valid in all camera_module versions
@@ -126,7 +137,7 @@
      *    Not valid. Can be assumed to be CAMERA_DEVICE_API_VERSION_1_0. Do
      *    not read this field.
      *
-     *  CAMERA_MODULE_API_VERSION_2_0:
+     *  CAMERA_MODULE_API_VERSION_2_0 or higher:
      *
      *    Always valid
      *
@@ -137,7 +148,8 @@
      * The camera's fixed characteristics, which include all camera metadata in
      * the android.*.info.* sections. This should be a sorted metadata buffer,
      * and may not be modified or freed by the caller. The pointer should remain
-     * valid for the lifetime of the camera module.
+     * valid for the lifetime of the camera module, and values in it may not
+     * change after it is returned by get_camera_info().
      *
      * Version information (based on camera_module_t.common.module_api_version):
      *
@@ -146,19 +158,119 @@
      *    Not valid. Extra characteristics are not available. Do not read this
      *    field.
      *
-     *  CAMERA_MODULE_API_VERSION_2_0:
+     *  CAMERA_MODULE_API_VERSION_2_0 or higher:
      *
      *    Valid if device_version >= CAMERA_DEVICE_API_VERSION_2_0. Do not read
      *    otherwise.
      *
      */
     const camera_metadata_t *static_camera_characteristics;
-};
+} camera_info_t;
+
+/**
+ * camera_device_status_t:
+ *
+ * The current status of the camera device, as provided by the HAL through the
+ * camera_module_callbacks.camera_device_status_change() call.
+ */
+typedef enum camera_device_status {
+    /**
+     * The camera device is not currently connected, and opening it will return
+     * failure. Calls to get_camera_info must still succeed, and provide the
+     * same information it would if the camera were connected
+     */
+    CAMERA_DEVICE_STATUS_NOT_PRESENT = 0,
+
+    /**
+     * The camera device is connected, and opening it will succeed. The
+     * information returned by get_camera_info cannot change due to this status
+     * change. By default, the framework will assume all devices are in this
+     * state.
+     */
+    CAMERA_DEVICE_STATUS_PRESENT = 1
+
+} camera_device_status_t;
+
+/**
+ * Callback functions for the camera HAL module to use to inform the framework
+ * of changes to the camera subsystem. These are called only by HAL modules
+ * implementing version CAMERA_MODULE_API_VERSION_2_1 or higher of the HAL
+ * module API interface.
+ */
+typedef struct camera_module_callbacks {
+
+    /**
+     * camera_device_status_change:
+     *
+     * Callback to the framework to indicate that the state of a specific camera
+     * device has changed. At module load time, the framework will assume all
+     * camera devices are in the CAMERA_DEVICE_STATUS_PRESENT state. The HAL
+     * must call this method to inform the framework of any initially
+     * NOT_PRESENT devices.
+     *
+     * camera_module_callbacks: The instance of camera_module_callbacks_t passed
+     *   to the module with set_callbacks.
+     *
+     * camera_id: The ID of the camera device that has a new status.
+     *
+     * new_status: The new status code, one of the camera_device_status_t enums,
+     *   or a platform-specific status.
+     *
+     */
+    void (*camera_device_status_change)(const struct camera_module_callbacks*,
+            int camera_id,
+            int new_status);
+
+} camera_module_callbacks_t;
 
 typedef struct camera_module {
     hw_module_t common;
+
+    /**
+     * get_number_of_cameras:
+     *
+     * Returns the number of camera devices accessible through the camera
+     * module.  The camera devices are numbered 0 through N-1, where N is the
+     * value returned by this call. The name of the camera device for open() is
+     * simply the number converted to a string. That is, "0" for camera ID 0,
+     * "1" for camera ID 1.
+     *
+     * The value here must be static, and cannot change after the first call to
+     * this method
+     */
     int (*get_number_of_cameras)(void);
+
+    /**
+     * get_camera_info:
+     *
+     * Return the static camera information for a given camera device. This
+     * information may not change for a camera device.
+     *
+     */
     int (*get_camera_info)(int camera_id, struct camera_info *info);
+
+    /**
+     * set_callbacks:
+     *
+     * Provide callback function pointers to the HAL module to inform framework
+     * of asynchronous camera module events. The framework will call this
+     * function once after initial camera HAL module load, after the
+     * get_number_of_cameras() method is called for the first time, and before
+     * any other calls to the module.
+     *
+     * Version information (based on camera_module_t.common.module_api_version):
+     *
+     *  CAMERA_MODULE_API_VERSION_1_0, CAMERA_MODULE_API_VERSION_2_0:
+     *
+     *    Not provided by HAL module. Framework may not call this function.
+     *
+     *  CAMERA_MODULE_API_VERSION_2_1:
+     *
+     *    Valid to be called by the framework.
+     *
+     */
+    int (*set_callbacks)(const camera_module_callbacks_t *callbacks);
+
 } camera_module_t;
 
 __END_DECLS
diff --git a/include/hardware/gps.h b/include/hardware/gps.h
index 69bfd50..183647c 100644
--- a/include/hardware/gps.h
+++ b/include/hardware/gps.h
@@ -103,6 +103,8 @@
 #define GPS_CAPABILITY_SINGLE_SHOT      0x0000008
 /** GPS supports on demand time injection */
 #define GPS_CAPABILITY_ON_DEMAND_TIME   0x0000010
+/** GPS supports Geofencing  */
+#define GPS_CAPABILITY_GEOFENCING       0x0000020
 
 /** Flags used to specify which aiding data to delete
     when calling delete_aiding_data(). */
@@ -228,6 +230,12 @@
  */
 #define AGPS_RIL_INTERFACE      "agps_ril"
 
+/**
+ * Name for the GPS_Geofencing interface.
+ */
+#define GPS_GEOFENCING_INTERFACE   "gps_geofencing"
+
+
 /** Represents a location. */
 typedef struct {
     /** set to sizeof(GpsLocation) */
@@ -673,6 +681,220 @@
     void (*update_network_availability) (int avaiable, const char* apn);
 } AGpsRilInterface;
 
+/**
+ * GPS Geofence.
+ *      There are 3 states associated with a Geofence: Inside, Outside, Unknown.
+ * There are 3 transitions: ENTERED, EXITED, UNCERTAIN.
+ *
+ * An example state diagram with confidence level: 95% and Unknown time limit
+ * set as 30 secs is shown below. (confidence level and Unknown time limit are
+ * explained latter)
+ *                         ____________________________
+ *                        |       Unknown (30 secs)   |
+ *                         """"""""""""""""""""""""""""
+ *                            ^ |                  |  ^
+ *                   UNCERTAIN| |ENTERED     EXITED|  |UNCERTAIN
+ *                            | v                  v  |
+ *                        ________    EXITED     _________
+ *                       | Inside | -----------> | Outside |
+ *                       |        | <----------- |         |
+ *                        """"""""    ENTERED    """""""""
+ *
+ * Inside state: We are 95% confident that the user is inside the geofence.
+ * Outside state: We are 95% confident that the user is outside the geofence
+ * Unknown state: Rest of the time.
+ *
+ * The Unknown state is better explained with an example:
+ *
+ *                            __________
+ *                           |         c|
+ *                           |  ___     |    _______
+ *                           |  |a|     |   |   b   |
+ *                           |  """     |    """""""
+ *                           |          |
+ *                            """"""""""
+ * In the diagram above, "a" and "b" are 2 geofences and "c" is the accuracy
+ * circle reported by the GPS subsystem. Now with regard to "b", the system is
+ * confident that the user is outside. But with regard to "a" is not confident
+ * whether it is inside or outside the geofence. If the accuracy remains the
+ * same for a sufficient period of time, the UNCERTAIN transition would be
+ * triggered with the state set to Unknown. If the accuracy improves later, an
+ * appropriate transition should be triggered.  This "sufficient period of time"
+ * is defined by the parameter in the add_geofence_area API.
+ *     In other words, Unknown state can be interpreted as a state in which the
+ * GPS subsystem isn't confident enough that the user is either inside or
+ * outside the Geofence. It moves to Unknown state only after the expiry of the
+ * timeout.
+ *
+ * The geofence callback needs to be triggered for the ENTERED and EXITED
+ * transitions, when the GPS system is confident that the user has entered
+ * (Inside state) or exited (Outside state) the Geofence. An implementation
+ * which uses a value of 95% as the confidence is recommended. The callback
+ * should be triggered only for the transitions requested by the
+ * add_geofence_area call.
+ *
+ * Even though the diagram and explanation talks about states and transitions,
+ * the callee is only interested in the transistions. The states are mentioned
+ * here for illustrative purposes.
+ *
+ * Startup Scenario: When the device boots up, if an application adds geofences,
+ * and then we get an accurate GPS location fix, it needs to trigger the
+ * appropriate (ENTERED or EXITED) transition for every Geofence it knows about.
+ * By default, all the Geofences will be in the Unknown state.
+ *
+ * When the GPS system is unavailable, gps_geofence_status_callback should be
+ * called to inform the upper layers of the same. Similarly, when it becomes
+ * available the callback should be called. This is a global state while the
+ * UNKNOWN transition described above is per geofence.
+ *
+ * An important aspect to note is that users of this API (framework), will use
+ * other subsystems like wifi, sensors, cell to handle Unknown case and
+ * hopefully provide a definitive state transition to the third party
+ * application. GPS Geofence will just be a signal indicating what the GPS
+ * subsystem knows about the Geofence.
+ *
+ */
+#define GPS_GEOFENCE_ENTERED     (1<<0L)
+#define GPS_GEOFENCE_EXITED      (1<<1L)
+#define GPS_GEOFENCE_UNCERTAIN   (1<<2L)
+
+#define GPS_GEOFENCE_UNAVAILABLE (1<<0L)
+#define GPS_GEOFENCE_AVAILABLE   (1<<1L)
+
+#define GPS_GEOFENCE_OPERATION_SUCCESS           0
+#define GPS_GEOFENCE_ERROR_TOO_MANY_GEOFENCES -100
+#define GPS_GEOFENCE_ERROR_ID_EXISTS          -101
+#define GPS_GEOFENCE_ERROR_ID_UNKNOWN         -102
+#define GPS_GEOFENCE_ERROR_INVALID_TRANSITION -103
+#define GPS_GEOFENCE_ERROR_GENERIC            -149
+
+/**
+ * The callback associated with the geofence.
+ * Parameters:
+ *      geofence_id - The id associated with the add_geofence_area.
+ *      location    - The current GPS location.
+ *      transition  - Can be one of GPS_GEOFENCE_ENTERED, GPS_GEOFENCE_EXITED,
+ *                    GPS_GEOFENCE_UNCERTAIN.
+ *      timestamp   - Timestamp when the transition was detected.
+ *
+ * The callback should only be called when the caller is interested in that
+ * particular transition. For instance, if the caller is interested only in
+ * ENTERED transition, then the callback should NOT be called with the EXITED
+ * transition.
+ *
+ * IMPORTANT: If a transition is triggered resulting in this callback, the GPS
+ * subsystem will wake up the application processor, if its in suspend state.
+ */
+typedef void (*gps_geofence_transition_callback) (int32_t geofence_id,  GpsLocation* location,
+        int32_t transition, GpsUtcTime timestamp);
+
+/**
+ * The callback associated with the availablity of the GPS system for geofencing
+ * monitoring. If the GPS system determines that it cannot monitor geofences
+ * because of lack of reliability or unavailability of the GPS signals, it will
+ * call this callback with GPS_GEOFENCE_UNAVAILABLE parameter.
+ *
+ * Parameters:
+ *  status - GPS_GEOFENCE_UNAVAILABLE or GPS_GEOFENCE_AVAILABLE.
+ *  last_location - Last known location.
+ */
+typedef void (*gps_geofence_status_callback) (int32_t status, GpsLocation* last_location);
+
+typedef struct {
+    gps_geofence_transition_callback geofence_transition_callback;
+    gps_geofence_status_callback geofence_status_callback;
+    gps_create_thread create_thread_cb;
+} GpsGeofenceCallbacks;
+
+/** Extended interface for GPS_Geofencing support */
+typedef struct {
+   /** set to sizeof(GpsGeofencingInterface) */
+   size_t          size;
+
+   /**
+    * Opens the geofence interface and provides the callback routines
+    * to the implemenation of this interface.
+    */
+   void  (*init)( GpsGeofenceCallbacks* callbacks );
+
+   /**
+    * Add a geofence area. This api currently supports circular geofences.
+    * Parameters:
+    *    geofence_id - The id for the geofence. If a geofence with this id
+    *       already exists, an error value (GPS_GEOFENCE_ERROR_ID_EXISTS)
+    *       should be returned.
+    *    latitude, longtitude, radius_meters - The lat, long and radius
+    *       (in meters) for the geofence
+    *    last_transition - The current state of the geofence. For example, if
+    *       the system already knows that the user is inside the geofence,
+    *       this will be set to GPS_GEOFENCE_ENTERED. In most cases, it
+    *       will be GPS_GEOFENCE_UNCERTAIN.
+    *    monitor_transition - Which transitions to monitor. Bitwise OR of
+    *       GPS_GEOFENCE_ENTERED, GPS_GEOFENCE_EXITED and
+    *       GPS_GEOFENCE_UNCERTAIN.
+    *    notification_responsiveness_ms - Defines the best-effort description
+    *       of how soon should the callback be called when the transition
+    *       associated with the Geofence is triggered. For instance, if set
+    *       to 1000 millseconds with GPS_GEOFENCE_ENTERED, the callback
+    *       should be called 1000 milliseconds within entering the geofence.
+    *       This parameter is defined in milliseconds.
+    *       NOTE: This is not to be confused with the rate that the GPS is
+    *       polled at. It is acceptable to dynamically vary the rate of
+    *       sampling the GPS for power-saving reasons; thus the rate of
+    *       sampling may be faster or slower than this.
+    *    unknown_timer_ms - The time limit after which the UNCERTAIN transition
+    *       should be triggered. This paramter is defined in milliseconds.
+    *       See above for a detailed explanation.
+    *    Return value: GPS_GEOFENCE_OPERATION_SUCCESS on success,
+    *                  or any of the GPS_GEOFENCE_ERRORS on failure.
+    */
+   int (*add_geofence_area) (int32_t geofence_id, double latitude,
+                                double longitude, double radius_meters,
+                                int last_transition, int monitor_transitions,
+                                int notification_responsiveness_ms,
+                                int unknown_timer_ms);
+
+   /**
+    * Pause monitoring a particular geofence.
+    * Parameters:
+    *   geofence_id - The id for the geofence.
+    *
+    * Return value: GPS_GEOFENCE_OPERATION_SUCCESS on success,
+    *               GPS_GEOFENCE_ERROR_ID_UNKNOWN - for invalid id
+    *               GPS_GEOFENCE_ERROR_GENERIC for others.
+    */
+   int (*pause_geofence) (int32_t geofence_id);
+
+   /**
+    * Resume monitoring a particular geofence.
+    * Parameters:
+    *   geofence_id - The id for the geofence.
+    *   monitor_transitions - Which transitions to monitor. Bitwise OR of
+    *       GPS_GEOFENCE_ENTERED, GPS_GEOFENCE_EXITED and
+    *       GPS_GEOFENCE_UNCERTAIN.
+    *       This supersedes the value associated provided in the
+    *       add_geofence_area call.
+    *
+    * Return value: GPS_GEOFENCE_OPERATION_SUCCESS on success,
+    *               GPS_GEOFENCE_ERROR_ID_UNKNOWN - for invalid id
+    *               GPS_GEOFENCE_ERROR_INVALID_TRANSITION - when
+    *                   monitor_transitions is invalid
+    *               GPS_GEOFENCE_ERROR_GENERIC for others.
+    *
+    */
+   int (*resume_geofence) (int32_t geofence_id, int monitor_transitions);
+
+   /**
+    * Remove a geofence area. After the function returns, no notifications
+    * should be sent.
+    * Parameter:
+    *   geofence_id - The id for the geofence.
+    * Return value: GPS_GEOFENCE_OPERATION_SUCCESS on success,
+    *               GPS_GEOFENCE_ERROR_ID_UNKNOWN - for invalid id
+    *               GPS_GEOFENCE_ERROR_GENERIC for others.
+    */
+   int (*remove_geofence_area) (int32_t geofence_id);
+} GpsGeofencingInterface;
 __END_DECLS
 
 #endif /* ANDROID_INCLUDE_HARDWARE_GPS_H */
diff --git a/include/hardware/gralloc.h b/include/hardware/gralloc.h
index 7c819f0..a6d9c1f 100644
--- a/include/hardware/gralloc.h
+++ b/include/hardware/gralloc.h
@@ -57,9 +57,9 @@
     
     /* buffer is never written in software */
     GRALLOC_USAGE_SW_WRITE_NEVER        = 0x00000000,
-    /* buffer is never written in software */
+    /* buffer is rarely written in software */
     GRALLOC_USAGE_SW_WRITE_RARELY       = 0x00000020,
-    /* buffer is never written in software */
+    /* buffer is often written in software */
     GRALLOC_USAGE_SW_WRITE_OFTEN        = 0x00000030,
     /* mask for the software write values */
     GRALLOC_USAGE_SW_WRITE_MASK         = 0x000000F0,
diff --git a/include/hardware/hwcomposer.h b/include/hardware/hwcomposer.h
index 391fe64..3b29f1f 100644
--- a/include/hardware/hwcomposer.h
+++ b/include/hardware/hwcomposer.h
@@ -202,13 +202,53 @@
              * responsible for closing it when no longer needed.
              */
             int releaseFenceFd;
+
+            /*
+             * Availability: HWC_DEVICE_API_VERSION_1_2
+             *
+             * Alpha value applied to the whole layer. The effective
+             * value of each pixel is computed as:
+             *
+             *   if (blending == HWC_BLENDING_PREMULT)
+             *      pixel.rgb = pixel.rgb * planeAlpha / 255
+             *   pixel.a = pixel.a * planeAlpha / 255
+             *
+             * Then blending proceeds as usual according to the "blending"
+             * field above.
+             *
+             * NOTE: planeAlpha applies to YUV layers as well:
+             *
+             *   pixel.rgb = yuv_to_rgb(pixel.yuv)
+             *   if (blending == HWC_BLENDING_PREMULT)
+             *      pixel.rgb = pixel.rgb * planeAlpha / 255
+             *   pixel.a = planeAlpha
+             *
+             *
+             * IMPLEMENTATION NOTE:
+             *
+             * If the source image doesn't have an alpha channel, then
+             * the h/w can use the HWC_BLENDING_COVERAGE equations instead of
+             * HWC_BLENDING_PREMULT and simply set the alpha channel to
+             * planeAlpha.
+             *
+             * e.g.:
+             *
+             *   if (blending == HWC_BLENDING_PREMULT)
+             *      blending = HWC_BLENDING_COVERAGE;
+             *   pixel.a = planeAlpha;
+             *
+             */
+            uint8_t planeAlpha;
+
+            /* reserved for future use */
+            uint8_t _pad[3];
         };
     };
 
     /* Allow for expansion w/o breaking binary compatibility.
      * Pad layer to 96 bytes, assuming 32-bit pointers.
      */
-    int32_t reserved[24 - 18];
+    int32_t reserved[24 - 19];
 
 } hwc_layer_1_t;
 
diff --git a/include/hardware/sensors.h b/include/hardware/sensors.h
index af6bced..dc209f7 100644
--- a/include/hardware/sensors.h
+++ b/include/hardware/sensors.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008 The Android Open Source Project
+ * Copyright (C) 2012 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -26,6 +26,13 @@
 
 __BEGIN_DECLS
 
+/*****************************************************************************/
+
+#define SENSORS_HEADER_VERSION          1
+#define SENSORS_MODULE_API_VERSION_0_1  HARDWARE_MODULE_API_VERSION(0, 1)
+#define SENSORS_DEVICE_API_VERSION_0_1  HARDWARE_DEVICE_API_VERSION_2(0, 1, SENSORS_HEADER_VERSION)
+#define SENSORS_DEVICE_API_VERSION_1_0  HARDWARE_DEVICE_API_VERSION_2(1, 0, SENSORS_HEADER_VERSION)
+
 /**
  * The id of this module
  */
@@ -47,50 +54,35 @@
 #define SENSORS_HANDLE_COUNT            (1<<SENSORS_HANDLE_BITS)
 
 
-/**
- * Sensor types
+/* attributes queriable with query() */
+enum {
+    /*
+     * Availability: SENSORS_DEVICE_API_VERSION_1_0
+     * return the maximum number of events that can be returned
+     * in a single call to (*poll)(). This value is used by the
+     * framework to adequately dimension the buffer passed to
+     * (*poll)(), note that (*poll)() still needs to pay attention to
+     * the count parameter passed to it, it cannot blindly expect that
+     * this value will be used for all calls to (*poll)().
+     *
+     * Generally this value should be set to match the sum of the internal
+     * FIFOs of all available sensors.
+     */
+    SENSORS_QUERY_MAX_EVENTS_BATCH_COUNT     = 0
+};
+
+/*
+ * flags for (*batch)()
+ * Availability: SENSORS_DEVICE_API_VERSION_1_0
+ * see (*batch)() documentation for details
  */
-#define SENSOR_TYPE_ACCELEROMETER       1
-#define SENSOR_TYPE_MAGNETIC_FIELD      2
-#define SENSOR_TYPE_ORIENTATION         3
-#define SENSOR_TYPE_GYROSCOPE           4
-#define SENSOR_TYPE_LIGHT               5
-#define SENSOR_TYPE_PRESSURE            6
-#define SENSOR_TYPE_TEMPERATURE         7   // deprecated
-#define SENSOR_TYPE_PROXIMITY           8
-#define SENSOR_TYPE_GRAVITY             9
-#define SENSOR_TYPE_LINEAR_ACCELERATION 10
-#define SENSOR_TYPE_ROTATION_VECTOR     11
-#define SENSOR_TYPE_RELATIVE_HUMIDITY   12
-#define SENSOR_TYPE_AMBIENT_TEMPERATURE 13
+enum {
+    SENSORS_BATCH_DRY_RUN               = 0x00000001,
+    SENSORS_BATCH_WAKE_UPON_FIFO_FULL   = 0x00000002
+};
 
 /**
- * Values returned by the accelerometer in various locations in the universe.
- * all values are in SI units (m/s^2)
- */
-
-#define GRAVITY_SUN             (275.0f)
-#define GRAVITY_EARTH           (9.80665f)
-
-/** Maximum magnetic field on Earth's surface */
-#define MAGNETIC_FIELD_EARTH_MAX    (60.0f)
-
-/** Minimum magnetic field on Earth's surface */
-#define MAGNETIC_FIELD_EARTH_MIN    (30.0f)
-
-
-/**
- * status of each sensor
- */
-
-#define SENSOR_STATUS_UNRELIABLE        0
-#define SENSOR_STATUS_ACCURACY_LOW      1
-#define SENSOR_STATUS_ACCURACY_MEDIUM   2
-#define SENSOR_STATUS_ACCURACY_HIGH     3
-
-/**
- * Definition of the axis
- * ----------------------
+ * Definition of the axis used by the sensor HAL API
  *
  * This API is relative to the screen of the device in its default orientation,
  * that is, if the device can be used in portrait or landscape, this API
@@ -117,9 +109,144 @@
  *
  *    O: Origin (x=0,y=0,z=0)
  *
+ */
+
+/*
+ * Interaction with suspend mode
  *
+ * Unless otherwise noted, an enabled sensor shall not prevent the
+ * SoC to go into suspend mode. It is the responsibility of applications
+ * to keep a partial wake-lock should they wish to receive sensor
+ * events while the screen is off. While in suspend mode, and unless
+ * otherwise noted, enabled sensors' events are lost.
+ *
+ * Note that conceptually, the sensor itself is not de-activated while in
+ * suspend mode -- it's just that the data it returns are lost. As soon as
+ * the SoC gets out of suspend mode, operations resume as usual. Of course,
+ * in practice sensors shall be disabled while in suspend mode to
+ * save power, unless batch mode is active, in which case they must
+ * continue fill their internal FIFO (see the documentation of batch() to
+ * learn how suspend interacts with batch mode).
+ *
+ * In batch mode and only when the flag SENSORS_BATCH_WAKE_UPON_FIFO_FULL is
+ * set and supported, the specified sensor must be able to wake-up the SoC and
+ * be able to buffer at least 10 seconds worth of the requested sensor events.
+ *
+ * There are notable exceptions to this behavior, which are sensor-dependent
+ * (see sensor types definitions below)
+ *
+ *
+ * The sensor type documentation below specifies the wake-up behavior of
+ * each sensor:
+ *   wake-up: yes     this sensor must wake-up the SoC to deliver events
+ *   wake-up: no      this sensor shall not wake-up the SoC, events are dropped
+ *
+ */
+
+/*
+ * Sensor type
+ *
+ * Each sensor has a type which defines what this sensor measures and how
+ * measures are reported. All types are defined below.
+ */
+
+/*
+ * Sensor fusion and virtual sensors
+ *
+ * Many sensor types are or can be implemented as virtual sensors from
+ * physical sensors on the device. For instance the rotation vector sensor,
+ * orientation sensor, step-detector, step-counter, etc...
+ *
+ * From the point of view of this API these virtual sensors MUST appear as
+ * real, individual sensors. It is the responsibility of the driver and HAL
+ * to make sure this is the case.
+ *
+ * In particular, all sensors must be able to function concurrently.
+ * For example, if defining both an accelerometer and a step counter,
+ * then both must be able to work concurrently.
+ */
+
+/*
+ * Trigger modes
+ *
+ * Sensors can report events in different ways called trigger modes,
+ * each sensor type has one and only one trigger mode associated to it.
+ * Currently there are four trigger modes defined:
+ *
+ * continuous: events are reported at a constant rate defined by setDelay().
+ *             eg: accelerometers, gyroscopes.
+ * on-change:  events are reported only if the sensor's value has changed.
+ *             setDelay() is used to set a lower limit to the reporting
+ *             period (minimum time between two events).
+ *             The HAL must return an event immediately when an on-change
+ *             sensor is activated.
+ *             eg: proximity, light sensors
+ * one-shot:   upon detection of an event, the sensor deactivates itself and
+ *             then sends a single event. Order matters to avoid race
+ *             conditions. No other event is sent until the sensor get
+ *             reactivated. setDelay() is ignored.
+ *             eg: significant motion sensor
+ * special:    see details in the sensor type specification below
+ *
+ */
+
+/*
+ * SENSOR_TYPE_ACCELEROMETER
+ * trigger-mode: continuous
+ * wake-up sensor: no
+ *
+ *  All values are in SI units (m/s^2) and measure the acceleration of the
+ *  device minus the force of gravity.
+ *
+ *  Acceleration sensors return sensor events for all 3 axes at a constant
+ *  rate defined by setDelay().
+ *
+ *  x: Acceleration on the x-axis
+ *  y: Acceleration on the y-axis
+ *  z: Acceleration on the z-axis
+ *
+ * Note that the readings from the accelerometer include the acceleration
+ * due to gravity (which is opposite to the direction of the gravity vector).
+ *
+ *  Examples:
+ *    The norm of <x, y, z>  should be close to 0 when in free fall.
+ *
+ *    When the device lies flat on a table and is pushed on its left side
+ *    toward the right, the x acceleration value is positive.
+ *
+ *    When the device lies flat on a table, the acceleration value is +9.81,
+ *    which correspond to the acceleration of the device (0 m/s^2) minus the
+ *    force of gravity (-9.81 m/s^2).
+ *
+ *    When the device lies flat on a table and is pushed toward the sky, the
+ *    acceleration value is greater than +9.81, which correspond to the
+ *    acceleration of the device (+A m/s^2) minus the force of
+ *    gravity (-9.81 m/s^2).
+ */
+#define SENSOR_TYPE_ACCELEROMETER                    (1)
+
+/*
+ * SENSOR_TYPE_GEOMAGNETIC_FIELD
+ * trigger-mode: continuous
+ * wake-up sensor: no
+ *
+ *  All values are in micro-Tesla (uT) and measure the geomagnetic
+ *  field in the X, Y and Z axis.
+ *
+ *  Returned values include calibration mechanisms such that the vector is
+ *  aligned with the magnetic declination and heading of the earth's
+ *  geomagnetic field.
+ *
+ *  Magnetic Field sensors return sensor events for all 3 axes at a constant
+ *  rate defined by setDelay().
+ */
+#define SENSOR_TYPE_GEOMAGNETIC_FIELD                (2)
+#define SENSOR_TYPE_MAGNETIC_FIELD  SENSOR_TYPE_GEOMAGNETIC_FIELD
+
+/*
  * SENSOR_TYPE_ORIENTATION
- * -----------------------
+ * trigger-mode: continuous
+ * wake-up sensor: no
  * 
  * All values are angles in degrees.
  * 
@@ -153,46 +280,13 @@
  *
  * Note: This definition is different from yaw, pitch and roll used in aviation
  *  where the X axis is along the long side of the plane (tail to nose).
- *  
- *  
- * SENSOR_TYPE_ACCELEROMETER
- * -------------------------
- *
- *  All values are in SI units (m/s^2) and measure the acceleration of the
- *  device minus the force of gravity.
- *  
- *  Acceleration sensors return sensor events for all 3 axes at a constant
- *  rate defined by setDelay().
- *
- *  x: Acceleration minus Gx on the x-axis 
- *  y: Acceleration minus Gy on the y-axis 
- *  z: Acceleration minus Gz on the z-axis
- *  
- *  Examples:
- *    When the device lies flat on a table and is pushed on its left side
- *    toward the right, the x acceleration value is positive.
- *    
- *    When the device lies flat on a table, the acceleration value is +9.81,
- *    which correspond to the acceleration of the device (0 m/s^2) minus the
- *    force of gravity (-9.81 m/s^2).
- *    
- *    When the device lies flat on a table and is pushed toward the sky, the
- *    acceleration value is greater than +9.81, which correspond to the
- *    acceleration of the device (+A m/s^2) minus the force of 
- *    gravity (-9.81 m/s^2).
- *    
- *    
- * SENSOR_TYPE_MAGNETIC_FIELD
- * --------------------------
- * 
- *  All values are in micro-Tesla (uT) and measure the ambient magnetic
- *  field in the X, Y and Z axis.
- *
- *  Magnetic Field sensors return sensor events for all 3 axes at a constant
- *  rate defined by setDelay().
- *
+ */
+#define SENSOR_TYPE_ORIENTATION                      (3)
+
+/*
  * SENSOR_TYPE_GYROSCOPE
- * ---------------------
+ * trigger-mode: continuous
+ * wake-up sensor: no
  *
  *  All values are in radians/second and measure the rate of rotation
  *  around the X, Y and Z axis.  The coordinate system is the same as is
@@ -205,52 +299,78 @@
  *  with the definition of roll given earlier.
  *  The range should at least be 17.45 rad/s (ie: ~1000 deg/s).
  *
+ *  automatic gyro-drift compensation is allowed but not required.
+ */
+#define SENSOR_TYPE_GYROSCOPE                        (4)
+
+/*
+ * SENSOR_TYPE_LIGHT
+ * trigger-mode: on-change
+ * wake-up sensor: no
+ *
+ * The light sensor value is returned in SI lux units.
+ */
+#define SENSOR_TYPE_LIGHT                            (5)
+
+/*
+ * SENSOR_TYPE_PRESSURE
+ * trigger-mode: continuous
+ * wake-up sensor: no
+ *
+ * The pressure sensor return the athmospheric pressure in hectopascal (hPa)
+ */
+#define SENSOR_TYPE_PRESSURE                         (6)
+
+/* SENSOR_TYPE_TEMPERATURE is deprecated in the HAL */
+#define SENSOR_TYPE_TEMPERATURE                      (7)
+
+/*
  * SENSOR_TYPE_PROXIMITY
- * ----------------------
+ * trigger-mode: on-change
+ * wake-up sensor: yes
  *
  * The distance value is measured in centimeters.  Note that some proximity
  * sensors only support a binary "close" or "far" measurement.  In this case,
  * the sensor should report its maxRange value in the "far" state and a value
  * less than maxRange in the "near" state.
- *
- * Proximity sensors report a value only when it changes and each time the
- * sensor is enabled.
- *
- * SENSOR_TYPE_LIGHT
- * -----------------
- *
- * The light sensor value is returned in SI lux units.
- *
- * Light sensors report a value only when it changes and each time the
- * sensor is enabled.
- *
- * SENSOR_TYPE_PRESSURE
- * --------------------
- *
- * The pressure sensor return the athmospheric pressure in hectopascal (hPa)
- *
- * Pressure sensors report events at a constant rate defined by setDelay().
- *
+ */
+#define SENSOR_TYPE_PROXIMITY                        (8)
+
+/*
  * SENSOR_TYPE_GRAVITY
- * -------------------
+ * trigger-mode: continuous
+ * wake-up sensor: no
  *
  * A gravity output indicates the direction of and magnitude of gravity in
  * the devices's coordinates.  On Earth, the magnitude is 9.8 m/s^2.
  * Units are m/s^2.  The coordinate system is the same as is used for the
  * acceleration sensor. When the device is at rest, the output of the
  * gravity sensor should be identical to that of the accelerometer.
- *
+ */
+#define SENSOR_TYPE_GRAVITY                          (9)
+
+/*
  * SENSOR_TYPE_LINEAR_ACCELERATION
- * --------------------------------
+ * trigger-mode: continuous
+ * wake-up sensor: no
  *
  * Indicates the linear acceleration of the device in device coordinates,
  * not including gravity.
- * This output is essentially Acceleration - Gravity.  Units are m/s^2.
+ *
+ * The output is conceptually:
+ *    output of TYPE_ACCELERATION - output of TYPE_GRAVITY
+ *
+ * Readings on all axes should be close to 0 when device lies on a table.
+ * Units are m/s^2.
  * The coordinate system is the same as is used for the acceleration sensor.
- *
- *
+ */
+#define SENSOR_TYPE_LINEAR_ACCELERATION             (10)
+
+
+/*
  * SENSOR_TYPE_ROTATION_VECTOR
- * ---------------------------
+ * trigger-mode: continuous
+ * wake-up sensor: no
  *
  * A rotation vector represents the orientation of the device as a combination
  * of an angle and an axis, in which the device has rotated through an angle
@@ -259,9 +379,10 @@
  * of the rotation vector is equal to sin(theta/2), and the direction of the
  * rotation vector is equal to the direction of the axis of rotation. The three
  * elements of the rotation vector are equal to the last three components of a
- * unit quaternion <cos(theta/2), x*sin(theta/2), y*sin(theta/2), z*sin(theta/2)>.
- * Elements of the rotation vector are unitless.  The x, y, and z axis are defined
- * in the same was as for the acceleration sensor.
+ * unit quaternion
+ *   <cos(theta/2), x*sin(theta/2), y*sin(theta/2), z*sin(theta/2)>.
+ * Elements of the rotation vector are unitless.  The x, y, and z axis are
+ * defined in the same way as for the acceleration sensor.
  *
  * The reference coordinate system is defined as a direct orthonormal basis,
  * where:
@@ -280,29 +401,319 @@
  *   sensors_event_t.data[0] = x*sin(theta/2)
  *   sensors_event_t.data[1] = y*sin(theta/2)
  *   sensors_event_t.data[2] = z*sin(theta/2)
- *   sensors_event_t.data[3] =   cos(theta/2)
  *
+ * In addition, this sensor reports an estimated heading accuracy.
+ *   sensors_event_t.data[3] = estimated_accuracy (in radians)
+ * The heading error must be less than estimated_accuracy 95% of the time
  *
+ * This sensor must use a gyroscope and an accelerometer as main orientation
+ * change input.
+ *
+ * This sensor can also include magnetometer input to make up for gyro drift,
+ * but it cannot be implemented using only a magnetometer.
+ */
+#define SENSOR_TYPE_ROTATION_VECTOR                 (11)
+
+/*
  * SENSOR_TYPE_RELATIVE_HUMIDITY
- * ------------------------------
+ * trigger-mode: on-change
+ * wake-up sensor: no
  *
  * A relative humidity sensor measures relative ambient air humidity and
  * returns a value in percent.
- *
- * Relative humidity sensors report a value only when it changes and each
- * time the sensor is enabled.
- *
- *
+ */
+#define SENSOR_TYPE_RELATIVE_HUMIDITY               (12)
+
+/*
  * SENSOR_TYPE_AMBIENT_TEMPERATURE
- * -------------------------------
+ * trigger-mode: on-change
+ * wake-up sensor: no
  *
  * The ambient (room) temperature in degree Celsius.
+ */
+#define SENSOR_TYPE_AMBIENT_TEMPERATURE             (13)
+
+/*
+ * SENSOR_TYPE_MAGNETIC_FIELD_UNCALIBRATED
+ * trigger-mode: continuous
+ * wake-up sensor: no
  *
- * Temperature sensors report a value only when it changes and each time the
- * sensor is enabled.
+ *  Similar to SENSOR_TYPE_MAGNETIC_FIELD, but the hard iron calibration is
+ *  reported separately instead of being included in the measurement.
+ *  Factory calibration and temperature compensation should still be applied to
+ *  the "uncalibrated" measurement.
+ *  Separating away the hard iron calibration estimation allows the system to
+ *  better recover from bad hard iron estimation.
  *
+ *  All values are in micro-Tesla (uT) and measure the ambient magnetic
+ *  field in the X, Y and Z axis. Assumptions that the the magnetic field
+ *  is due to the Earth's poles should be avoided.
+ *
+ *  The uncalibrated_magnetic event contains
+ *  - 3 fields for uncalibrated measurement: x_uncalib, y_uncalib, z_uncalib.
+ *    Each is a component of the measured magnetic field, with soft iron
+ *    and temperature compensation applied, but not hard iron calibration.
+ *    These values should be continuous (no re-calibration should cause a jump).
+ *  - 3 fields for hard iron bias estimates: x_bias, y_bias, z_bias.
+ *    Each field is a component of the estimated hard iron calibration.
+ *    They represent the offsets to apply to the uncalibrated readings to obtain
+ *    calibrated readings (x_calibrated = x_uncalib + x_bias)
+ *    These values are expected to jump as soon as the estimate of the hard iron
+ *    changes.
+ *
+ *  If this sensor is present, then the corresponding
+ *  SENSOR_TYPE_MAGNETIC_FIELD must be present and both must return the
+ *  same sensor_t::name and sensor_t::vendor.
+ *
+ * See SENSOR_TYPE_MAGNETIC_FIELD for more information
+ */
+#define SENSOR_TYPE_MAGNETIC_FIELD_UNCALIBRATED     (14)
+
+/*
+ * SENSOR_TYPE_GAME_ROTATION_VECTOR
+ * trigger-mode: continuous
+ * wake-up sensor: no
+ *
+ *  Similar to SENSOR_TYPE_ROTATION_VECTOR, but not using the geomagnetic
+ *  field. Therefore the Y axis doesn't point north, but instead to some other
+ *  reference. That reference is allowed to drift by the same order of
+ *  magnitude than the gyroscope drift around the Z axis.
+ *
+ *  This sensor does not report an estimated heading accuracy:
+ *    sensors_event_t.data[3] is reserved and should be set to 0
+ *
+ *  In the ideal case, a phone rotated and returning to the same real-world
+ *  orientation should report the same game rotation vector
+ *  (without using the earth's geomagnetic field).
+ *
+ *  This sensor must be based on a gyroscope. It cannot be implemented using
+ *  a magnetometer.
+ *
+ * see SENSOR_TYPE_ROTATION_VECTOR for more details
+ */
+#define SENSOR_TYPE_GAME_ROTATION_VECTOR            (15)
+
+/*
+ * SENSOR_TYPE_GYROSCOPE_UNCALIBRATED
+ * trigger-mode: continuous
+ * wake-up sensor: no
+ *
+ *  All values are in radians/second and measure the rate of rotation
+ *  around the X, Y and Z axis. An estimation of the drift on each axis is
+ *  reported as well.
+ *
+ *  No gyro-drift compensation shall be performed.
+ *  Factory calibration and temperature compensation should still be applied
+ *  to the rate of rotation (angular speeds).
+ *
+ *  The coordinate system is the same as is
+ *  used for the acceleration sensor. Rotation is positive in the
+ *  counter-clockwise direction (right-hand rule). That is, an observer
+ *  looking from some positive location on the x, y or z axis at a device
+ *  positioned on the origin would report positive rotation if the device
+ *  appeared to be rotating counter clockwise. Note that this is the
+ *  standard mathematical definition of positive rotation and does not agree
+ *  with the definition of roll given earlier.
+ *  The range should at least be 17.45 rad/s (ie: ~1000 deg/s).
+ *
+ *  Content of an uncalibrated_gyro event: (units are rad/sec)
+ *   x_uncalib : angular speed (w/o drift compensation) around the X axis
+ *   y_uncalib : angular speed (w/o drift compensation) around the Y axis
+ *   z_uncalib : angular speed (w/o drift compensation) around the Z axis
+ *   x_bias : estimated drift around X axis in rad/s
+ *   y_bias : estimated drift around Y axis in rad/s
+ *   z_bias : estimated drift around Z axis in rad/s
+ *
+ *  IMPLEMENTATION NOTES:
+ *
+ *  If the implementation is not able to estimate the drift, then this
+ *  sensor MUST NOT be reported by this HAL. Instead, the regular
+ *  SENSOR_TYPE_GYROSCOPE is used without drift compensation.
+ *
+ *  If this sensor is present, then the corresponding
+ *  SENSOR_TYPE_GYROSCOPE must be present and both must return the
+ *  same sensor_t::name and sensor_t::vendor.
+ */
+#define SENSOR_TYPE_GYROSCOPE_UNCALIBRATED          (16)
+
+
+/*
+ * SENSOR_TYPE_SIGNIFICANT_MOTION
+ * trigger-mode: one-shot
+ * wake-up sensor: yes
+ *
+ * A sensor of this type triggers an event each time significant motion
+ * is detected and automatically disables itself.
+ * The only allowed value to return is 1.0.
+ *
+ * A significant motion is a motion that might lead to a change in the user
+ * location.
+ * Examples of such motions are:
+ *   walking, biking, sitting in a moving car, coach or train.
+ * Examples of situations that should not trigger significant motion:
+ * - phone in pocket and person is not moving
+ * - phone is on a table, even if the table shakes a bit due to nearby traffic
+ *   or washing machine
+ *
+ * A note on false positive / false negative / power consumption tradeoff
+ *  - The goal of this sensor is to save power.
+ *  - Triggering an event when the user is not moving (false positive) is costly
+ *    in terms of power, so it should be avoided.
+ *  - Not triggering an event when the user is moving (false negative) is
+ *    acceptable as long as it is not done repeatedly. If the user has been
+ *    walking for 10 seconds, not triggering an event within those 10 seconds
+ *    is not acceptable.
+ *
+ *  IMPORTANT NOTE: this sensor type is very different from other types
+ *  in that it must work when the screen is off without the need of
+ *  holding a partial wake-lock and MUST allow the SoC to go into suspend.
+ *  When significant motion is detected, the sensor must awaken the SoC and
+ *  the event be reported.
+ *
+ *  If a particular hardware cannot support this mode of operation then this
+ *  sensor type MUST NOT be reported by the HAL. ie: it is not acceptable
+ *  to "emulate" this sensor in the HAL.
+ *
+ *  The whole point of this sensor type is to save power by keeping the
+ *  SoC in suspend mode when the device is at rest.
+ *
+ *  When the sensor is not activated, it must also be deactivated in the
+ *  hardware: it must not wake up the SoC anymore, even in case of
+ *  significant motion.
+ *
+ *  setDelay() has no effect and is ignored.
+ *  Once a "significant motion" event is returned, a sensor of this type
+ *  must disables itself automatically, as if activate(..., 0) had been called.
  */
 
+#define SENSOR_TYPE_SIGNIFICANT_MOTION              (17)
+
+
+/*
+ * SENSOR_TYPE_STEP_DETECTOR
+ * trigger-mode: special
+ * wake-up sensor: no
+ *
+ * A sensor of this type triggers an event each time a step is taken
+ * by the user. The only allowed value to return is 1.0 and an event is
+ * generated for each step. Like with any other event, the timestamp
+ * indicates when the event (here the step) occurred, this corresponds to when
+ * the foot hit the ground, generating a high variation in acceleration.
+ *
+ * While this sensor operates, it shall not disrupt any other sensors, in
+ * particular, but not limited to, the accelerometer; which might very well
+ * be in use as well.
+ *
+ * This sensor must be low power. That is, if the step detection cannot be
+ * done in hardware, this sensor should not be defined. Also, when the
+ * step detector is activated and the accelerometer is not, only steps should
+ * trigger interrupts (not accelerometer data).
+ *
+ * setDelay() has no impact on this sensor type
+ */
+
+#define SENSOR_TYPE_STEP_DETECTOR                   (18)
+
+
+/*
+ * SENSOR_TYPE_STEP_COUNTER
+ * trigger-mode: on-change
+ * wake-up sensor: no
+ *
+ * A sensor of this type returns the number of steps taken by the user since
+ * the last reboot while activated. The value is returned as a uint64_t and is
+ * reset to zero only on a system reboot.
+ *
+ * The timestamp of the event is set to the time when the first step
+ * for that event was taken.
+ * See SENSOR_TYPE_STEP_DETECTOR for the signification of the time of a step.
+ *
+ *  The minimum size of the hardware's internal counter shall be 16 bits
+ *  (this restriction is here to avoid too frequent wake-ups when the
+ *  delay is very large).
+ *
+ *  IMPORTANT NOTE: this sensor type is different from other types
+ *  in that it must work when the screen is off without the need of
+ *  holding a partial wake-lock and MUST allow the SoC to go into suspend.
+ *  Unlike other sensors, while in suspend mode this sensor must stay active,
+ *  no events are reported during that time but, steps continue to be
+ *  accounted for; an event will be reported as soon as the SoC resumes if
+ *  the timeout has expired.
+ *
+ *    In other words, when the screen is off and the device allowed to
+ *    go into suspend mode, we don't want to be woken up, regardless of the
+ *    setDelay() value, but the steps shall continue to be counted.
+ *
+ *    The driver must however ensure that the internal step count never
+ *    overflows. It is allowed in this situation to wake the SoC up so the
+ *    driver can do the counter maintenance.
+ *
+ *  While this sensor operates, it shall not disrupt any other sensors, in
+ *  particular, but not limited to, the accelerometer; which might very well
+ *  be in use as well.
+ *
+ *  If a particular hardware cannot support these modes of operation then this
+ *  sensor type MUST NOT be reported by the HAL. ie: it is not acceptable
+ *  to "emulate" this sensor in the HAL.
+ *
+ * This sensor must be low power. That is, if the step detection cannot be
+ * done in hardware, this sensor should not be defined. Also, when the
+ * step counter is activated and the accelerometer is not, only steps should
+ * trigger interrupts (not accelerometer data).
+ *
+ *  The whole point of this sensor type is to save power by keeping the
+ *  SoC in suspend mode when the device is at rest.
+ */
+
+#define SENSOR_TYPE_STEP_COUNTER                    (19)
+
+/*
+ * SENSOR_TYPE_GEOMAGNETIC_ROTATION_VECTOR
+ * trigger-mode: continuous
+ * wake-up sensor: no
+ *
+ *  Similar to SENSOR_TYPE_ROTATION_VECTOR, but using a magnetometer instead
+ *  of using a gyroscope.
+ *
+ *  This sensor must be based on a magnetometer. It cannot be implemented using
+ *  a gyroscope, and gyroscope input cannot be used by this sensor.
+ *
+ *  Just like SENSOR_TYPE_ROTATION_VECTOR, this sensor reports an estimated
+ *  heading accuracy:
+ *    sensors_event_t.data[3] = estimated_accuracy (in radians)
+ *  The heading error must be less than estimated_accuracy 95% of the time
+ *
+ * see SENSOR_TYPE_ROTATION_VECTOR for more details
+ */
+#define SENSOR_TYPE_GEOMAGNETIC_ROTATION_VECTOR            (20)
+
+/**
+ * Values returned by the accelerometer in various locations in the universe.
+ * all values are in SI units (m/s^2)
+ */
+#define GRAVITY_SUN             (275.0f)
+#define GRAVITY_EARTH           (9.80665f)
+
+/** Maximum magnetic field on Earth's surface */
+#define MAGNETIC_FIELD_EARTH_MAX    (60.0f)
+
+/** Minimum magnetic field on Earth's surface */
+#define MAGNETIC_FIELD_EARTH_MIN    (30.0f)
+
+
+/**
+ * status of orientation sensor
+ */
+
+#define SENSOR_STATUS_UNRELIABLE        0
+#define SENSOR_STATUS_ACCURACY_LOW      1
+#define SENSOR_STATUS_ACCURACY_MEDIUM   2
+#define SENSOR_STATUS_ACCURACY_HIGH     3
+
+
+/**
+ * sensor event data
+ */
 typedef struct {
     union {
         float v[3];
@@ -322,6 +733,18 @@
 } sensors_vec_t;
 
 /**
+ * uncalibrated gyroscope and magnetometer event data
+ */
+typedef struct {
+  float x_uncalib;
+  float y_uncalib;
+  float z_uncalib;
+  float x_bias;
+  float y_bias;
+  float z_bias;
+} uncalibrated_event_t;
+
+/**
  * Union of the various types of sensor data
  * that can be returned.
  */
@@ -370,6 +793,15 @@
 
         /* relative humidity in percent */
         float           relative_humidity;
+
+        /* step-counter */
+        uint64_t        step_counter;
+
+        /* uncalibrated gyroscope values are in rad/s */
+        uncalibrated_event_t uncalibrated_gyro;
+
+        /* uncalibrated magnetometer values are in micro-Teslas */
+        uncalibrated_event_t uncalibrated_magnetic;
     };
     uint32_t        reserved1[4];
 } sensors_event_t;
@@ -395,79 +827,298 @@
 };
 
 struct sensor_t {
-    /* name of this sensors */
+
+    /* Name of this sensor.
+     * All sensors of the same "type" must have a different "name".
+     */
     const char*     name;
+
     /* vendor of the hardware part */
     const char*     vendor;
+
     /* version of the hardware part + driver. The value of this field
      * must increase when the driver is updated in a way that changes the
      * output of this sensor. This is important for fused sensors when the
      * fusion algorithm is updated.
      */    
     int             version;
-    /* handle that identifies this sensors. This handle is used to activate
-     * and deactivate this sensor. The value of the handle must be 8 bits
-     * in this version of the API. 
+
+    /* handle that identifies this sensors. This handle is used to reference
+     * this sensor throughout the HAL API.
      */
     int             handle;
+
     /* this sensor's type. */
     int             type;
-    /* maximaum range of this sensor's value in SI units */
+
+    /* maximum range of this sensor's value in SI units */
     float           maxRange;
+
     /* smallest difference between two values reported by this sensor */
     float           resolution;
+
     /* rough estimate of this sensor's power consumption in mA */
     float           power;
-    /* minimum delay allowed between events in microseconds. A value of zero
-     * means that this sensor doesn't report events at a constant rate, but
-     * rather only when a new data is available */
+
+    /* this value depends on the trigger mode:
+     *
+     *   continuous: minimum sample period allowed in microseconds
+     *   on-change : 0
+     *   one-shot  :-1
+     *   special   : 0, unless otherwise noted
+     */
     int32_t         minDelay;
+
     /* reserved fields, must be zero */
     void*           reserved[8];
 };
 
 
-/**
- * Every device data structure must begin with hw_device_t
- * followed by module specific public methods and attributes.
+/*
+ * sensors_poll_device_t is used with SENSORS_DEVICE_API_VERSION_0_1
+ * and is present for backward binary and source compatibility.
+ * (see documentation of the hooks in struct sensors_poll_device_1 below)
  */
 struct sensors_poll_device_t {
     struct hw_device_t common;
-
-    /** Activate/deactivate one sensor.
-     *
-     * @param handle is the handle of the sensor to change.
-     * @param enabled set to 1 to enable, or 0 to disable the sensor.
-     *
-     * @return 0 on success, negative errno code otherwise
-     */
     int (*activate)(struct sensors_poll_device_t *dev,
             int handle, int enabled);
-
-    /**
-     * Set the delay between sensor events in nanoseconds for a given sensor.
-     *
-     * If the requested value is less than sensor_t::minDelay, then it's
-     * silently clamped to sensor_t::minDelay unless sensor_t::minDelay is
-     * 0, in which case it is clamped to >= 1ms.
-     *
-     * @return 0 if successful, < 0 on error
-     */
     int (*setDelay)(struct sensors_poll_device_t *dev,
             int handle, int64_t ns);
-
-    /**
-     * Returns an array of sensor data.
-     * This function must block until events are available.
-     *
-     * @return the number of events read on success, or -errno in case of an error.
-     * This function should never return 0 (no event).
-     *
-     */
     int (*poll)(struct sensors_poll_device_t *dev,
             sensors_event_t* data, int count);
 };
 
+/*
+ * struct sensors_poll_device_1 is used with SENSORS_DEVICE_API_VERSION_1_0
+ */
+typedef struct sensors_poll_device_1 {
+    union {
+        /* sensors_poll_device_1 is compatible with sensors_poll_device_t,
+         * and can be down-cast to it
+         */
+        struct sensors_poll_device_t v0;
+
+        struct {
+            struct hw_device_t common;
+
+            /* Activate/de-activate one sensor.
+             *
+             * handle is the handle of the sensor to change.
+             * enabled set to 1 to enable, or 0 to disable the sensor.
+             *
+             * unless otherwise noted in the sensor types definitions, an
+             * activated sensor never prevents the SoC to go into suspend
+             * mode; that is, the HAL shall not hold a partial wake-lock on
+             * behalf of applications.
+             *
+             * one-shot sensors de-activate themselves automatically upon
+             * receiving an event and they must still accept to be deactivated
+             * through a call to activate(..., ..., 0).
+             *
+             * if "enabled" is true and the sensor is already activated, this
+             * function is a no-op and succeeds.
+             *
+             * if "enabled" is false and the sensor is already de-activated,
+             * this function is a no-op and succeeds.
+             *
+             * return 0 on success, negative errno code otherwise
+             */
+            int (*activate)(struct sensors_poll_device_t *dev,
+                    int handle, int enabled);
+
+            /**
+             * Set the events's period in nanoseconds for a given sensor.
+             *
+             * What the period_ns parameter means depends on the specified
+             * sensor's trigger mode:
+             *
+             * continuous: setDelay() sets the sampling rate.
+             * on-change: setDelay() limits the delivery rate of events
+             * one-shot: setDelay() is ignored. it has no effect.
+             * special: see specific sensor type definitions
+             *
+             * For continuous and on-change sensors, if the requested value is
+             * less than sensor_t::minDelay, then it's silently clamped to
+             * sensor_t::minDelay unless sensor_t::minDelay is 0, in which
+             * case it is clamped to >= 1ms.
+             *
+             * @return 0 if successful, < 0 on error
+             */
+            int (*setDelay)(struct sensors_poll_device_t *dev,
+                    int handle, int64_t period_ns);
+
+            /**
+             * Returns an array of sensor data.
+             * This function must block until events are available.
+             *
+             * return the number of events read on success, or -errno in case
+             * of an error.
+             *
+             * The number of events returned in data must be less or equal
+             * to SENSORS_QUERY_MAX_EVENTS_BATCH_COUNT.
+             *
+             * This function shall never return 0 (no event).
+             */
+            int (*poll)(struct sensors_poll_device_t *dev,
+                    sensors_event_t* data, int count);
+        };
+    };
+
+    /*
+     * Used to retrieve information about the sensor HAL
+     *
+     * Returns 0 on success or -errno on error.
+     */
+    int (*query)(struct sensors_poll_device_1* dev, int what, int* value);
+
+
+    /*
+     * Enables batch mode for the given sensor and sets the delay between events
+     *
+     * A timeout value of zero disables batch mode for the given sensor.
+     *
+     * The period_ns parameter is equivalent to calling setDelay() -- this
+     * function both enables or disables the batch mode AND sets the events's
+     * period in nanosecond. See setDelay() above for a detailed explanation of
+     * the period_ns parameter.
+     *
+     * While in batch mode sensor events are reported in batches at least
+     * every "timeout" nanosecond; that is all events since the previous batch
+     * are recorded and returned all at once. Batches can be interleaved and
+     * split, and as usual events of the same sensor type are time-ordered.
+     *
+     * setDelay() is not affected and it behaves as usual.
+     *
+     * Each event has a timestamp associated with it, the timestamp
+     * must be accurate and correspond to the time at which the event
+     * physically happened.
+     *
+     * If internal h/w FIFOs fill-up before the timeout, then events are
+     * reported at that point. No event shall be dropped or lost.
+     *
+     *
+     * INTERACTION WITH SUSPEND MODE:
+     * ------------------------------
+     *
+     * By default batch mode doesn't significantly change the interaction with
+     * suspend mode, that is, sensors must continue to allow the SoC to
+     * go into suspend mode and sensors must stay active to fill their
+     * internal FIFO, in this mode, when the FIFO fills-up, it shall wrap
+     * around (basically behave like a circular buffer, overwriting events).
+     * As soon as the SoC comes out of suspend mode, a batch is produced with
+     * as much as the recent history as possible, and batch operation
+     * resumes as usual.
+     *
+     * The behavior described above allows applications to record the recent
+     * history of a set of sensor while keeping the SoC into suspend. It
+     * also allows the hardware to not have to rely on a wake-up interrupt line.
+     *
+     * There are cases however where an application cannot afford to lose
+     * any events, even when the device goes into suspend mode. The behavior
+     * specified above can be altered by setting the
+     * SENSORS_BATCH_WAKE_UPON_FIFO_FULL flag. If this flag is set, the SoC
+     * must be woken up from suspend and a batch must be returned before
+     * the FIFO fills-up. Enough head room must be allocated in the FIFO to allow
+     * the device to entirely come out of suspend (which might take a while and
+     * is device dependent) such that no event are lost.
+     *
+     *   If the hardware cannot support this mode, or, if the physical
+     *   FIFO is so small that the device would never be allowed to go into
+     *   suspend for at least 10 seconds, then this function MUST fail when
+     *   the flag SENSORS_BATCH_WAKE_UPON_FIFO_FULL is set, regardless of
+     *   the value of the timeout parameter.
+     *
+     * DRY RUN:
+     * --------
+     *
+     * If the flag SENSORS_BATCH_DRY_RUN is set, this function returns
+     * without modifying the batch mode or the event period and has no side
+     * effects, but returns errors as usual (as it would if this flag was
+     * not set). This flag is used to check if batch mode is available for a
+     * given configuration -- in particular for a given sensor at a given rate.
+     *
+     *
+     * Return values:
+     * --------------
+     *
+     * Because sensors must be independent, the return value must not depend
+     * on the state of the system (whether another sensor is on or not),
+     * nor on whether the flag SENSORS_BATCH_DRY_RUN is set (in other words,
+     * if a batch call with SENSORS_BATCH_DRY_RUN is successful,
+     * the same call without SENSORS_BATCH_DRY_RUN must succeed as well).
+     *
+     * If successful, 0 is returned.
+     * If the specified sensor doesn't support batch mode, -EINVAL is returned.
+     * If the specified sensor's trigger-mode is one-shot, -EINVAL is returned.
+     * If any of the constraint above cannot be satisfied, -EINVAL is returned.
+     *
+     * Note: the timeout parameter, when > 0, has no impact on whether this
+     *       function succeeds or fails.
+     *
+     * If timeout is set to 0, this function must succeed.
+     *
+     *
+     * IMPLEMENTATION NOTES:
+     * ---------------------
+     *
+     * batch mode, if supported, should happen at the hardware level,
+     * typically using hardware FIFOs. In particular, it SHALL NOT be
+     * implemented in the HAL, as this would be counter productive.
+     * The goal here is to save significant amounts of power.
+     *
+     * batch mode can be enabled or disabled at any time, in particular
+     * while the specified sensor is already enabled and this shall not
+     * result in the loss of events.
+     *
+     * COMPARATIVE IMPORTANCE OF BATCHING FOR DIFFERENT SENSORS:
+     * ---------------------------------------------------------
+     *
+     * On platforms on which hardware fifo size is limited, the system designers
+     * might have to choose how much fifo to reserve for each sensor. To help
+     * with this choice, Here is a list of applications made possible when
+     * batching is implemented on the different sensors.
+     *
+     * High value: Low power pedestrian dead reckoning
+     *   Target batching time: 20 seconds to 1 minute
+     *   Sensors to batch:
+     *    - Step detector
+     *    - Rotation vector or game rotation vector at 5Hz
+     *   Gives us step and heading while letting the AP go to Suspend.
+     *
+     * High value: Medium power activity/gesture recognition
+     *   Target batching time: 3 seconds
+     *   Sensors to batch: accelerometer between 20Hz and 50Hz
+     *   Allows recognizing arbitrary activities and gestures without having
+     *   to keep the AP fully awake while the data is collected.
+     *
+     * Medium-high value: Interrupt load reduction
+     *   Target batching time: < 1 second
+     *   Sensors to batch: any high frequency sensor.
+     *   If the gyroscope is set at 800Hz, even batching just 10 gyro events can
+     *   reduce the number of interrupts from 800/second to 80/second.
+     *
+     * Medium value: Continuous low frequency data collection
+     *   Target batching time: > 1 minute
+     *   Sensors to batch: barometer, humidity sensor, other low frequency
+     *     sensors.
+     *   Allows creating monitoring applications at low power.
+     *
+     * Medium value: Continuous full-sensors collection
+     *   Target batching time: > 1 minute
+     *   Sensors to batch: all, at high frequencies
+     *   Allows full collection of sensor data while leaving the AP in
+     *   suspend mode. Only to consider if fifo space is not an issue.
+     */
+    int (*batch)(struct sensors_poll_device_1* dev,
+            int handle, int flags, int64_t period_ns, int64_t timeout);
+
+    void (*reserved_procs[8])(void);
+
+} sensors_poll_device_1_t;
+
+
+
 /** convenience API for opening and closing a device */
 
 static inline int sensors_open(const struct hw_module_t* module,
@@ -480,6 +1131,16 @@
     return device->common.close(&device->common);
 }
 
+static inline int sensors_open_1(const struct hw_module_t* module,
+        sensors_poll_device_1_t** device) {
+    return module->methods->open(module,
+            SENSORS_HARDWARE_POLL, (struct hw_device_t**)device);
+}
+
+static inline int sensors_close_1(sensors_poll_device_1_t* device) {
+    return device->common.close(&device->common);
+}
+
 __END_DECLS
 
 #endif  // ANDROID_SENSORS_INTERFACE_H
diff --git a/modules/Android.mk b/modules/Android.mk
index faa8bb3..486b42d 100644
--- a/modules/Android.mk
+++ b/modules/Android.mk
@@ -1,2 +1,2 @@
-hardware_modules := gralloc hwcomposer audio nfc nfc-nci local_time power usbaudio audio_remote_submix
+hardware_modules := gralloc hwcomposer audio nfc nfc-nci local_time power usbaudio audio_remote_submix camera
 include $(call all-named-subdir-makefiles,$(hardware_modules))
diff --git a/modules/audio_remote_submix/audio_hw.cpp b/modules/audio_remote_submix/audio_hw.cpp
index 3756274..5e88ef7 100755
--- a/modules/audio_remote_submix/audio_hw.cpp
+++ b/modules/audio_remote_submix/audio_hw.cpp
@@ -186,18 +186,16 @@
 
         pthread_mutex_lock(&out->dev->lock);
 
-        MonoPipe* sink = out->dev->rsxSink.get();
-        if (sink != NULL) {
-            sink->incStrong(out);
-        } else {
-            pthread_mutex_unlock(&out->dev->lock);
-            return 0;
-        }
+        { // using the sink
+            sp<MonoPipe> sink = out->dev->rsxSink.get();
+            if (sink == 0) {
+                pthread_mutex_unlock(&out->dev->lock);
+                return 0;
+            }
 
-        ALOGI("shutdown");
-        sink->shutdown(true);
-
-        sink->decStrong(out);
+            ALOGI("shutdown");
+            sink->shutdown(true);
+        } // done using the sink
 
         pthread_mutex_unlock(&out->dev->lock);
     }
@@ -240,16 +238,16 @@
 
     out->dev->output_standby = false;
 
-    MonoPipe* sink = out->dev->rsxSink.get();
-    if (sink != NULL) {
+    sp<MonoPipe> sink = out->dev->rsxSink.get();
+    if (sink != 0) {
         if (sink->isShutdown()) {
+            sink.clear();
             pthread_mutex_unlock(&out->dev->lock);
             // the pipe has already been shutdown, this buffer will be lost but we must
             //   simulate timing so we don't drain the output faster than realtime
             usleep(frames * 1000000 / out_get_sample_rate(&stream->common));
             return bytes;
         }
-        sink->incStrong(buffer);
     } else {
         pthread_mutex_unlock(&out->dev->lock);
         ALOGE("out_write without a pipe!");
@@ -260,12 +258,13 @@
     pthread_mutex_unlock(&out->dev->lock);
 
     written_frames = sink->write(buffer, frames);
+
     if (written_frames < 0) {
         if (written_frames == (ssize_t)NEGOTIATE) {
             ALOGE("out_write() write to pipe returned NEGOTIATE");
 
             pthread_mutex_lock(&out->dev->lock);
-            sink->decStrong(buffer);
+            sink.clear();
             pthread_mutex_unlock(&out->dev->lock);
 
             written_frames = 0;
@@ -278,9 +277,7 @@
     }
 
     pthread_mutex_lock(&out->dev->lock);
-
-    sink->decStrong(buffer);
-
+    sink.clear();
     pthread_mutex_unlock(&out->dev->lock);
 
     if (written_frames < 0) {
@@ -414,45 +411,43 @@
     }
 
     in->read_counter_frames += frames_to_read;
-
-    MonoPipeReader* source = in->dev->rsxSource.get();
-    if (source != NULL) {
-        source->incStrong(buffer);
-    } else {
-        ALOGE("no audio pipe yet we're trying to read!");
-        pthread_mutex_unlock(&in->dev->lock);
-        usleep((bytes / frame_size) * 1000000 / in_get_sample_rate(&stream->common));
-        memset(buffer, 0, bytes);
-        return bytes;
-    }
-
-    pthread_mutex_unlock(&in->dev->lock);
-
-    // read the data from the pipe (it's non blocking)
     size_t remaining_frames = frames_to_read;
-    int attempts = 0;
-    char* buff = (char*)buffer;
-    while ((remaining_frames > 0) && (attempts < MAX_READ_ATTEMPTS)) {
-        attempts++;
-        frames_read = source->read(buff, remaining_frames, AudioBufferProvider::kInvalidPTS);
-        if (frames_read > 0) {
-            remaining_frames -= frames_read;
-            buff += frames_read * frame_size;
-            //ALOGV("  in_read (att=%d) got %ld frames, remaining=%u",
-            //      attempts, frames_read, remaining_frames);
-        } else {
-            //ALOGE("  in_read read returned %ld", frames_read);
-            usleep(READ_ATTEMPT_SLEEP_MS * 1000);
+
+    {
+        // about to read from audio source
+        sp<MonoPipeReader> source = in->dev->rsxSource.get();
+        if (source == 0) {
+            ALOGE("no audio pipe yet we're trying to read!");
+            pthread_mutex_unlock(&in->dev->lock);
+            usleep((bytes / frame_size) * 1000000 / in_get_sample_rate(&stream->common));
+            memset(buffer, 0, bytes);
+            return bytes;
         }
+
+        pthread_mutex_unlock(&in->dev->lock);
+
+        // read the data from the pipe (it's non blocking)
+        int attempts = 0;
+        char* buff = (char*)buffer;
+        while ((remaining_frames > 0) && (attempts < MAX_READ_ATTEMPTS)) {
+            attempts++;
+            frames_read = source->read(buff, remaining_frames, AudioBufferProvider::kInvalidPTS);
+            if (frames_read > 0) {
+                remaining_frames -= frames_read;
+                buff += frames_read * frame_size;
+                //ALOGV("  in_read (att=%d) got %ld frames, remaining=%u",
+                //      attempts, frames_read, remaining_frames);
+            } else {
+                //ALOGE("  in_read read returned %ld", frames_read);
+                usleep(READ_ATTEMPT_SLEEP_MS * 1000);
+            }
+        }
+        // done using the source
+        pthread_mutex_lock(&in->dev->lock);
+        source.clear();
+        pthread_mutex_unlock(&in->dev->lock);
     }
 
-    // done using the source
-    pthread_mutex_lock(&in->dev->lock);
-
-    source->decStrong(buffer);
-
-    pthread_mutex_unlock(&in->dev->lock);
-
     if (remaining_frames > 0) {
         ALOGV("  remaining_frames = %d", remaining_frames);
         memset(((char*)buffer)+ bytes - (remaining_frames * frame_size), 0,
@@ -571,8 +566,7 @@
     // initialize pipe
     {
         ALOGV("  initializing pipe");
-        const NBAIO_Format format =
-                config->sample_rate == 48000 ? Format_SR48_C2_I16 : Format_SR44_1_C2_I16;
+        const NBAIO_Format format = Format_from_SR_C(config->sample_rate, 2);
         const NBAIO_Format offers[1] = {format};
         size_t numCounterOffers = 0;
         // creating a MonoPipe with optional blocking set to true.
diff --git a/modules/camera/Android.mk b/modules/camera/Android.mk
new file mode 100644
index 0000000..eebffc1
--- /dev/null
+++ b/modules/camera/Android.mk
@@ -0,0 +1,39 @@
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := camera.default
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+
+LOCAL_C_INCLUDES += \
+	system/core/include \
+	system/media/camera/include \
+
+LOCAL_SRC_FILES := \
+	CameraHAL.cpp \
+	Camera.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+	libcamera_metadata \
+	libcutils \
+	liblog \
+
+LOCAL_CFLAGS += -Wall -Wextra -fvisibility=hidden
+
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/modules/camera/Camera.cpp b/modules/camera/Camera.cpp
new file mode 100644
index 0000000..203b772
--- /dev/null
+++ b/modules/camera/Camera.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdlib>
+#include <pthread.h>
+#include <hardware/camera3.h>
+#include "CameraHAL.h"
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Camera"
+#include <cutils/log.h>
+
+#define ATRACE_TAG (ATRACE_TAG_CAMERA | ATRACE_TAG_HAL)
+#include <cutils/trace.h>
+
+#include "Camera.h"
+
+namespace default_camera_hal {
+
+extern "C" {
+// Shim passed to the framework to close an opened device.
+static int close_device(hw_device_t* dev)
+{
+    camera3_device_t* cam_dev = reinterpret_cast<camera3_device_t*>(dev);
+    Camera* cam = static_cast<Camera*>(cam_dev->priv);
+    return cam->close();
+}
+} // extern "C"
+
+Camera::Camera(int id)
+  : mId(id),
+    mBusy(false),
+    mCallbackOps(NULL)
+{
+    pthread_mutex_init(&mMutex,
+                       NULL); // No pthread mutex attributes.
+
+    memset(&mDevice, 0, sizeof(mDevice));
+    mDevice.common.tag    = HARDWARE_DEVICE_TAG;
+    mDevice.common.close  = close_device;
+    mDevice.ops           = const_cast<camera3_device_ops_t*>(&sOps);
+    mDevice.priv          = this;
+}
+
+Camera::~Camera()
+{
+}
+
+int Camera::open(const hw_module_t *module, hw_device_t **device)
+{
+    ALOGI("%s:%d: Opening camera device", __func__, mId);
+    ATRACE_BEGIN(__func__);
+    pthread_mutex_lock(&mMutex);
+    if (mBusy) {
+        pthread_mutex_unlock(&mMutex);
+        ATRACE_END();
+        ALOGE("%s:%d: Error! Camera device already opened", __func__, mId);
+        return -EBUSY;
+    }
+
+    // TODO: open camera dev nodes, etc
+    mBusy = true;
+    mDevice.common.module = const_cast<hw_module_t*>(module);
+    *device = &mDevice.common;
+
+    pthread_mutex_unlock(&mMutex);
+    ATRACE_END();
+    return 0;
+}
+
+int Camera::close()
+{
+    ALOGI("%s:%d: Closing camera device", __func__, mId);
+    ATRACE_BEGIN(__func__);
+    pthread_mutex_lock(&mMutex);
+    if (!mBusy) {
+        pthread_mutex_unlock(&mMutex);
+        ATRACE_END();
+        ALOGE("%s:%d: Error! Camera device not open", __func__, mId);
+        return -EINVAL;
+    }
+
+    // TODO: close camera dev nodes, etc
+    mBusy = false;
+
+    pthread_mutex_unlock(&mMutex);
+    ATRACE_END();
+    return 0;
+}
+
+int Camera::initialize(const camera3_callback_ops_t *callback_ops)
+{
+    ALOGV("%s:%d: callback_ops=%p", __func__, mId, callback_ops);
+    mCallbackOps = callback_ops;
+    return 0;
+}
+
+int Camera::configureStreams(camera3_stream_configuration_t *stream_list)
+{
+    ALOGV("%s:%d: stream_list=%p", __func__, mId, stream_list);
+    // TODO: validate input, create internal stream representations
+    return 0;
+}
+
+int Camera::registerStreamBuffers(const camera3_stream_buffer_set_t *buf_set)
+{
+    ALOGV("%s:%d: buffer_set=%p", __func__, mId, buf_set);
+    // TODO: register buffers with hardware
+    return 0;
+}
+
+const camera_metadata_t* Camera::constructDefaultRequestSettings(int type)
+{
+    ALOGV("%s:%d: type=%d", __func__, mId, type);
+    // TODO: return statically built default request
+    return NULL;
+}
+
+int Camera::processCaptureRequest(camera3_capture_request_t *request)
+{
+    ALOGV("%s:%d: request=%p", __func__, mId, request);
+    ATRACE_BEGIN(__func__);
+
+    if (request == NULL) {
+        ALOGE("%s:%d: NULL request recieved", __func__, mId);
+        ATRACE_END();
+        return -EINVAL;
+    }
+
+    // TODO: verify request; submit request to hardware
+    ATRACE_END();
+    return 0;
+}
+
+void Camera::getMetadataVendorTagOps(vendor_tag_query_ops_t *ops)
+{
+    ALOGV("%s:%d: ops=%p", __func__, mId, ops);
+    // TODO: return vendor tag ops
+}
+
+void Camera::dump(int fd)
+{
+    ALOGV("%s:%d: Dumping to fd %d", fd);
+    // TODO: dprintf all relevant state to fd
+}
+
+extern "C" {
+// Get handle to camera from device priv data
+static Camera *camdev_to_camera(const camera3_device_t *dev)
+{
+    return reinterpret_cast<Camera*>(dev->priv);
+}
+
+static int initialize(const camera3_device_t *dev,
+        const camera3_callback_ops_t *callback_ops)
+{
+    return camdev_to_camera(dev)->initialize(callback_ops);
+}
+
+static int configure_streams(const camera3_device_t *dev,
+        camera3_stream_configuration_t *stream_list)
+{
+    return camdev_to_camera(dev)->configureStreams(stream_list);
+}
+
+static int register_stream_buffers(const camera3_device_t *dev,
+        const camera3_stream_buffer_set_t *buffer_set)
+{
+    return camdev_to_camera(dev)->registerStreamBuffers(buffer_set);
+}
+
+static const camera_metadata_t *construct_default_request_settings(
+        const camera3_device_t *dev, int type)
+{
+    return camdev_to_camera(dev)->constructDefaultRequestSettings(type);
+}
+
+static int process_capture_request(const camera3_device_t *dev,
+        camera3_capture_request_t *request)
+{
+    return camdev_to_camera(dev)->processCaptureRequest(request);
+}
+
+static void get_metadata_vendor_tag_ops(const camera3_device_t *dev,
+        vendor_tag_query_ops_t *ops)
+{
+    camdev_to_camera(dev)->getMetadataVendorTagOps(ops);
+}
+
+static void dump(const camera3_device_t *dev, int fd)
+{
+    camdev_to_camera(dev)->dump(fd);
+}
+} // extern "C"
+
+const camera3_device_ops_t Camera::sOps = {
+    .initialize              = default_camera_hal::initialize,
+    .configure_streams       = default_camera_hal::configure_streams,
+    .register_stream_buffers = default_camera_hal::register_stream_buffers,
+    .construct_default_request_settings =
+            default_camera_hal::construct_default_request_settings,
+    .process_capture_request = default_camera_hal::process_capture_request,
+    .get_metadata_vendor_tag_ops =
+            default_camera_hal::get_metadata_vendor_tag_ops,
+    .dump                    = default_camera_hal::dump
+};
+
+} // namespace default_camera_hal
diff --git a/modules/camera/Camera.h b/modules/camera/Camera.h
new file mode 100644
index 0000000..f2ad093
--- /dev/null
+++ b/modules/camera/Camera.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_H_
+#define CAMERA_H_
+
+#include <pthread.h>
+#include <hardware/hardware.h>
+#include <hardware/camera3.h>
+
+namespace default_camera_hal {
+// Camera represents a physical camera on a device.
+// This is constructed when the HAL module is loaded, one per physical camera.
+// It is opened by the framework, and must be closed before it can be opened
+// again.
+class Camera {
+    public:
+        // id is used to distinguish cameras. 0 <= id < NUM_CAMERAS.
+        // module is a handle to the HAL module, used when the device is opened.
+        Camera(int id);
+        ~Camera();
+
+        // Common Camera Device Operations (see <hardware/camera_common.h>)
+        int open(const hw_module_t *module, hw_device_t **device);
+        int close();
+
+        // Camera v3 Device Operations (see <hardware/camera3.h>)
+        int initialize(const camera3_callback_ops_t *callback_ops);
+        int configureStreams(camera3_stream_configuration_t *stream_list);
+        int registerStreamBuffers(const camera3_stream_buffer_set_t *buf_set);
+        const camera_metadata_t *constructDefaultRequestSettings(int type);
+        int processCaptureRequest(camera3_capture_request_t *request);
+        void getMetadataVendorTagOps(vendor_tag_query_ops_t *ops);
+        void dump(int fd);
+
+        // Camera device handle returned to framework for use
+        camera3_device_t mDevice;
+
+    private:
+        // Identifier used by framework to distinguish cameras
+        const int mId;
+        // Busy flag indicates camera is in use
+        bool mBusy;
+        // Camera device operations handle shared by all devices
+        const static camera3_device_ops_t sOps;
+        // Methods used to call back into the framework
+        const camera3_callback_ops_t *mCallbackOps;
+        // Lock protecting the Camera object for modifications
+        pthread_mutex_t mMutex;
+};
+} // namespace default_camera_hal
+
+#endif // CAMERA_H_
diff --git a/modules/camera/CameraHAL.cpp b/modules/camera/CameraHAL.cpp
new file mode 100644
index 0000000..6cae7d2
--- /dev/null
+++ b/modules/camera/CameraHAL.cpp
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdlib>
+#include <hardware/camera_common.h>
+#include <hardware/hardware.h>
+#include "Camera.h"
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DefaultCameraHAL"
+#include <cutils/log.h>
+
+#define ATRACE_TAG (ATRACE_TAG_CAMERA | ATRACE_TAG_HAL)
+#include <cutils/trace.h>
+
+#include "CameraHAL.h"
+
+/*
+ * This file serves as the entry point to the HAL.  It contains the module
+ * structure and functions used by the framework to load and interface to this
+ * HAL, as well as the handles to the individual camera devices.
+ */
+
+namespace default_camera_hal {
+
+// Default Camera HAL has 2 cameras, front and rear.
+static CameraHAL gCameraHAL(2);
+
+CameraHAL::CameraHAL(int num_cameras)
+  : mNumberOfCameras(num_cameras),
+    mCallbacks(NULL)
+{
+    int i;
+
+    // Allocate camera array and instantiate camera devices
+    mCameras = new Camera*[mNumberOfCameras];
+    for (i = 0; i < mNumberOfCameras; i++) {
+        mCameras[i] = new Camera(i);
+    }
+}
+
+CameraHAL::~CameraHAL()
+{
+    int i;
+
+    for (i = 0; i < mNumberOfCameras; i++) {
+        delete mCameras[i];
+    }
+    delete [] mCameras;
+}
+
+int CameraHAL::getNumberOfCameras()
+{
+    ALOGV("%s: %d", __func__, mNumberOfCameras);
+    return mNumberOfCameras;
+}
+
+int CameraHAL::getCameraInfo(int id, struct camera_info* info)
+{
+    ALOGV("%s: camera id %d: info=%p", __func__, id, info);
+    if (id < 0 || id >= mNumberOfCameras) {
+        ALOGE("%s: Invalid camera id %d", __func__, id);
+        return -ENODEV;
+    }
+    // TODO: return device-specific static metadata
+    return 0;
+}
+
+int CameraHAL::setCallbacks(const camera_module_callbacks_t *callbacks)
+{
+    ALOGV("%s : callbacks=%p", __func__, callbacks);
+    mCallbacks = callbacks;
+    return 0;
+}
+
+int CameraHAL::open(const hw_module_t* mod, const char* name, hw_device_t** dev)
+{
+    int id;
+    char *nameEnd;
+    Camera *cam;
+
+    ALOGV("%s: module=%p, name=%s, device=%p", __func__, mod, name, dev);
+    id = strtol(name, &nameEnd, 10);
+    if (nameEnd != NULL) {
+        ALOGE("%s: Invalid camera id name %s", __func__, name);
+        return -EINVAL;
+    } else if (id < 0 || id >= mNumberOfCameras) {
+        ALOGE("%s: Invalid camera id %d", __func__, id);
+        return -ENODEV;
+    }
+    return mCameras[id]->open(mod, dev);
+}
+
+extern "C" {
+
+static int get_number_of_cameras()
+{
+    return gCameraHAL.getNumberOfCameras();
+}
+
+static int get_camera_info(int id, struct camera_info* info)
+{
+    return gCameraHAL.getCameraInfo(id, info);
+}
+
+static int set_callbacks(const camera_module_callbacks_t *callbacks)
+{
+    return gCameraHAL.setCallbacks(callbacks);
+}
+
+static int open_dev(const hw_module_t* mod, const char* name, hw_device_t** dev)
+{
+    return gCameraHAL.open(mod, name, dev);
+}
+
+static hw_module_methods_t gCameraModuleMethods = {
+    open : open_dev
+};
+
+camera_module_t HAL_MODULE_INFO_SYM __attribute__ ((visibility("default"))) = {
+    common : {
+        tag                : HARDWARE_MODULE_TAG,
+        module_api_version : CAMERA_MODULE_API_VERSION_2_0,
+        hal_api_version    : HARDWARE_HAL_API_VERSION,
+        id                 : CAMERA_HARDWARE_MODULE_ID,
+        name               : "Default Camera HAL",
+        author             : "The Android Open Source Project",
+        methods            : &gCameraModuleMethods,
+        dso                : NULL,
+        reserved           : {0},
+    },
+    get_number_of_cameras : get_number_of_cameras,
+    get_camera_info       : get_camera_info,
+    set_callbacks         : set_callbacks
+};
+} // extern "C"
+
+} // namespace default_camera_hal
diff --git a/modules/camera/CameraHAL.h b/modules/camera/CameraHAL.h
new file mode 100644
index 0000000..ba0db4e
--- /dev/null
+++ b/modules/camera/CameraHAL.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_HAL_H_
+#define CAMERA_HAL_H_
+
+#include <cutils/bitops.h>
+#include <hardware/hardware.h>
+#include <hardware/camera_common.h>
+#include "Camera.h"
+
+namespace default_camera_hal {
+// CameraHAL contains all module state that isn't specific to an individual
+// camera device.
+class CameraHAL {
+    public:
+        CameraHAL(int num_cameras);
+        ~CameraHAL();
+
+        // Camera Module Interface (see <hardware/camera_common.h>)
+        int getNumberOfCameras();
+        int getCameraInfo(int camera_id, struct camera_info *info);
+        int setCallbacks(const camera_module_callbacks_t *callbacks);
+
+        // Hardware Module Interface (see <hardware/hardware.h>)
+        int open(const hw_module_t* mod, const char* name, hw_device_t** dev);
+
+    private:
+        // Number of cameras
+        const int mNumberOfCameras;
+        // Callback handle
+        const camera_module_callbacks_t *mCallbacks;
+        // Array of camera devices, contains mNumberOfCameras device pointers
+        Camera **mCameras;
+};
+} // namespace default_camera_hal
+
+#endif // CAMERA_HAL_H_
diff --git a/tests/camera2/Android.mk b/tests/camera2/Android.mk
index b312328..3daf9b8 100644
--- a/tests/camera2/Android.mk
+++ b/tests/camera2/Android.mk
@@ -21,6 +21,7 @@
 	libhardware \
 	libcamera_metadata \
 	libcameraservice \
+	libcamera_client \
 	libgui \
 	libsync \
 	libui \
@@ -35,6 +36,7 @@
 	external/gtest/include \
 	external/stlport/stlport \
 	system/media/camera/include \
+	frameworks/av/include/ \
 	frameworks/av/services/camera/libcameraservice \
 	frameworks/native/include \
 
diff --git a/tests/camera2/CameraFrameTests.cpp b/tests/camera2/CameraFrameTests.cpp
index 13d1b17..2b5b546 100644
--- a/tests/camera2/CameraFrameTests.cpp
+++ b/tests/camera2/CameraFrameTests.cpp
@@ -27,7 +27,7 @@
 #include "utils/StrongPointer.h"
 
 #include <gui/CpuConsumer.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
 
 #include <unistd.h>
 
diff --git a/tests/camera2/CameraMetadataTests.cpp b/tests/camera2/CameraMetadataTests.cpp
index d02d104..722e71e 100644
--- a/tests/camera2/CameraMetadataTests.cpp
+++ b/tests/camera2/CameraMetadataTests.cpp
@@ -29,7 +29,7 @@
 #include "utils/StrongPointer.h"
 
 #include <gui/CpuConsumer.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
 
 #include <string>
 
diff --git a/tests/camera2/CameraStreamFixture.h b/tests/camera2/CameraStreamFixture.h
index 569b9d1..c5db7ef 100644
--- a/tests/camera2/CameraStreamFixture.h
+++ b/tests/camera2/CameraStreamFixture.h
@@ -21,7 +21,7 @@
 #include <iostream>
 
 #include <gui/CpuConsumer.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
 #include <utils/Condition.h>
 #include <utils/Mutex.h>
 
@@ -39,8 +39,8 @@
 
 inline void PrintTo(const CameraStreamParams& p, ::std::ostream* os) {
     *os <<  "{ ";
-    *os <<  "Format: "    << p.mFormat    << ", ";
-    *os <<  "HeapCount: " << p.mHeapCount;
+    *os <<  "Format: 0x"  << std::hex << p.mFormat    << ", ";
+    *os <<  "HeapCount: " <<             p.mHeapCount;
     *os << " }";
 }
 
@@ -77,7 +77,7 @@
         {
             const int tag = ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES;
 
-            const android::camera2::CameraMetadata& staticInfo = device->info();
+            const CameraMetadata& staticInfo = device->info();
             camera_metadata_ro_entry entry = staticInfo.find(tag);
             ASSERT_NE(0u, entry.count)
                 << "Missing tag android.scaler.availableProcessedSizes";
@@ -142,7 +142,7 @@
         mCpuConsumer = new CpuConsumer(p.mHeapCount);
         mCpuConsumer->setName(String8("CameraStreamTest::mCpuConsumer"));
 
-        mNativeWindow = new SurfaceTextureClient(
+        mNativeWindow = new Surface(
             mCpuConsumer->getProducerInterface());
 
         ASSERT_EQ(OK,
diff --git a/tests/camera2/CameraStreamTests.cpp b/tests/camera2/CameraStreamTests.cpp
index b076296..5423a30 100644
--- a/tests/camera2/CameraStreamTests.cpp
+++ b/tests/camera2/CameraStreamTests.cpp
@@ -29,7 +29,7 @@
 #include "utils/StrongPointer.h"
 
 #include <gui/CpuConsumer.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
 
 #include "CameraStreamFixture.h"
 #include "TestExtensions.h"
@@ -142,6 +142,30 @@
         /*mHeapCount*/ 3
     },
     {
+        /*mFormat*/    HAL_PIXEL_FORMAT_Y8,
+        /*mHeapCount*/ 1
+    },
+    {
+        /*mFormat*/    HAL_PIXEL_FORMAT_Y8,
+        /*mHeapCount*/ 2
+    },
+    {
+        /*mFormat*/    HAL_PIXEL_FORMAT_Y8,
+        /*mHeapCount*/ 3
+    },
+    {
+        /*mFormat*/    HAL_PIXEL_FORMAT_Y16,
+        /*mHeapCount*/ 1
+    },
+    {
+        /*mFormat*/    HAL_PIXEL_FORMAT_Y16,
+        /*mHeapCount*/ 2
+    },
+    {
+        /*mFormat*/    HAL_PIXEL_FORMAT_Y16,
+        /*mHeapCount*/ 3
+    },
+    {
         /*mFormat*/    HAL_PIXEL_FORMAT_RAW_SENSOR,
         /*mHeapCount*/ 1
     },
diff --git a/tests/camera2/camera2.cpp b/tests/camera2/camera2.cpp
index 78e82af..de5b8c6 100644
--- a/tests/camera2/camera2.cpp
+++ b/tests/camera2/camera2.cpp
@@ -214,7 +214,7 @@
 
     }
 
-    void setUpStream(sp<ISurfaceTexture> consumer,
+    void setUpStream(sp<IGraphicBufferProducer> consumer,
             int width, int height, int format, int *id) {
         status_t res;
 
diff --git a/tests/camera2/camera2_utils.cpp b/tests/camera2/camera2_utils.cpp
index 166ac52..76a7233 100644
--- a/tests/camera2/camera2_utils.cpp
+++ b/tests/camera2/camera2_utils.cpp
@@ -317,12 +317,12 @@
     (type *)((char*)(ptr) - offsetof(type, member))
 #endif
 
-StreamAdapter::StreamAdapter(sp<ISurfaceTexture> consumer):
+StreamAdapter::StreamAdapter(sp<IGraphicBufferProducer> consumer):
         mState(UNINITIALIZED), mDevice(NULL),
         mId(-1),
         mWidth(0), mHeight(0), mFormat(0)
 {
-    mConsumerInterface = new SurfaceTextureClient(consumer);
+    mConsumerInterface = new Surface(consumer);
     camera2_stream_ops::dequeue_buffer = dequeue_buffer;
     camera2_stream_ops::enqueue_buffer = enqueue_buffer;
     camera2_stream_ops::cancel_buffer = cancel_buffer;
diff --git a/tests/camera2/camera2_utils.h b/tests/camera2/camera2_utils.h
index 757044b..0cdf4a3 100644
--- a/tests/camera2/camera2_utils.h
+++ b/tests/camera2/camera2_utils.h
@@ -22,7 +22,7 @@
 #include <system/camera_metadata.h>
 #include <hardware/camera2.h>
 
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
 #include <gui/CpuConsumer.h>
 
 #include <utils/List.h>
@@ -161,12 +161,12 @@
 };
 
 /**
- * Adapter from an ISurfaceTexture interface to camera2 device stream ops.
+ * Adapter from an IGraphicBufferProducer interface to camera2 device stream ops.
  * Also takes care of allocating/deallocating stream in device interface
  */
 class StreamAdapter: public camera2_stream_ops {
   public:
-    StreamAdapter(sp<ISurfaceTexture> consumer);
+    StreamAdapter(sp<IGraphicBufferProducer> consumer);
 
     ~StreamAdapter();