Merge "Test HAL for HIDL safe_union construct"
diff --git a/current.txt b/current.txt
index 018ef20..86f3b67 100644
--- a/current.txt
+++ b/current.txt
@@ -249,7 +249,8 @@
 # Future changes to HALs
 5804ca86611d72e5481f022b3a0c1b334217f2e4988dad25730c42af2d1f4d1c android.hardware.neuralnetworks@1.0::IDevice
 12e8dca4ab7d8aadd0ef8f1b438021938e2396139e85db2ed65783b08800aa52 android.hardware.neuralnetworks@1.0::IExecutionCallback
-86b77e06da756a76aa3685be88765852dd982a86d8c90b8b4fc1130ed4184c8f android.hardware.neuralnetworks@1.0::types
+574e8f1499436fb4075894dcae0b36682427956ecb114f17f1fe22d116a83c6b android.hardware.neuralnetworks@1.0::IPreparedModel
+1a5ae9793223658174258b523763c557abad6fb917df0b8e3cc097fc89035811 android.hardware.neuralnetworks@1.0::types
 
 # Documentation fixups for b/78135149
 9e7a0b650d0e461ece2cfec0e1072abf8676f592b41a7fb48f01e88fc3c8f780 android.hardware.broadcastradio@1.0::types
diff --git a/neuralnetworks/1.0/IPreparedModel.hal b/neuralnetworks/1.0/IPreparedModel.hal
index ee406fb..ecaa7f8 100644
--- a/neuralnetworks/1.0/IPreparedModel.hal
+++ b/neuralnetworks/1.0/IPreparedModel.hal
@@ -42,6 +42,12 @@
      * execute function. This callback must be provided with the ErrorStatus of
      * the execution.
      *
+     * If the prepared model was prepared from a model wherein all
+     * tensor operands have fully specified dimensions, and the inputs
+     * to the function are valid, then the execution should launch
+     * and complete successfully (ErrorStatus::NONE). There must be
+     * no failure unless the device itself is in a bad state.
+     *
      * Multiple threads can call the execute function on the same IPreparedModel
      * object concurrently with different requests.
      *
diff --git a/neuralnetworks/1.0/types.hal b/neuralnetworks/1.0/types.hal
index 1ae1726..887fdf1 100644
--- a/neuralnetworks/1.0/types.hal
+++ b/neuralnetworks/1.0/types.hal
@@ -596,8 +596,8 @@
      * * 0: A 4-D tensor, of shape [batches, height, width, depth].
      *
      * Outputs:
-     * * 0: The output 4-D tensor, of shape
-     *      [batches, out_height, out_width, depth].
+     * * 0: The output 4-D tensor, of the same shape as input
+     *      [batches, height, width, depth].
      */
     L2_NORMALIZATION = 11,
 
@@ -1282,8 +1282,8 @@
      *      input height and width.
      *
      * Outputs:
-     * * 0: The output 4-D tensor, of shape [batch, height/block_size,
-     *      width/block_size, depth*block_size*block_size].
+     * * 0: The output 4-D tensor, of shape [batches, height/block_size,
+     *      width/block_size, depth_in*block_size*block_size].
      */
     SPACE_TO_DEPTH = 26,
 
@@ -1410,28 +1410,36 @@
 enum OperandLifeTime : int32_t {
     /**
      * The operand is internal to the model. It's created by an operation and
-     * consumed by other operations.
+     * consumed by other operations. It must be an output operand of
+     * exactly one operation.
      */
     TEMPORARY_VARIABLE,
 
     /**
-     * The operand is an input of the model. An operand can't be both
-     * input and output of a model.
+     * The operand is an input of the model. It must not be an output
+     * operand of any operation.
+     *
+     * An operand can't be both input and output of a model.
      */
     MODEL_INPUT,
 
     /**
-     * The operand is an output of the model.
+     * The operand is an output of the model. It must be an output
+     * operand of exactly one operation.
+     *
+     * An operand can't be both input and output of a model.
      */
     MODEL_OUTPUT,
 
     /**
-     * The operand is a constant found in Model.operandValues.
+     * The operand is a constant found in Model.operandValues. It must
+     * not be an output operand of any operation.
      */
     CONSTANT_COPY,
 
     /**
-     * The operand is a constant that was specified via a Memory object.
+     * The operand is a constant that was specified via a Memory
+     * object. It must not be an output operand of any operation.
      */
     CONSTANT_REFERENCE,
 
@@ -1641,19 +1649,21 @@
     /**
      * All operations included in the model.
      *
-     * The operations are sorted into execution order.
+     * The operations are sorted into execution order. Every operand
+     * with lifetime MODEL_OUTPUT or TEMPORARY_VARIABLE must be
+     * written before it is read.
      */
     vec<Operation> operations;
 
     /**
-     * Input indexes of the model.
+     * Input indexes of the model. There must be at least one.
      *
      * Each value corresponds to the index of the operand in "operands".
      */
     vec<uint32_t> inputIndexes;
 
     /**
-     * Output indexes of the model.
+     * Output indexes of the model. There must be at least one.
      *
      * Each value corresponds to the index of the operand in "operands".
      */
@@ -1668,8 +1678,7 @@
     vec<uint8_t> operandValues;
 
     /**
-     * A collection of shared memory pools containing operand data that were
-     * registered by the model.
+     * A collection of shared memory pools containing operand values.
      *
      * An operand's value must be located here if and only if Operand::lifetime
      * equals OperandLifeTime::CONSTANT_REFERENCE.
@@ -1721,6 +1730,9 @@
  * 1) Provides the input and output data to be used when executing the model.
  * 2) Specifies any updates to the input operand metadata that were left
  *    unspecified at model preparation time.
+ *
+ * An output must not overlap with any other output, with an input, or
+ * with an operand of lifetime CONSTANT_REFERENCE.
  */
 struct Request {
     /**
diff --git a/neuralnetworks/1.1/Android.bp b/neuralnetworks/1.1/Android.bp
index 9365d4e..24d8396 100644
--- a/neuralnetworks/1.1/Android.bp
+++ b/neuralnetworks/1.1/Android.bp
@@ -15,6 +15,8 @@
         "android.hidl.base@1.0",
     ],
     types: [
+        "Capabilities",
+        "ExecutionPreference",
         "Model",
         "Operation",
         "OperationType",
diff --git a/neuralnetworks/1.1/types.hal b/neuralnetworks/1.1/types.hal
index e4c656d..7b2a21a 100644
--- a/neuralnetworks/1.1/types.hal
+++ b/neuralnetworks/1.1/types.hal
@@ -137,13 +137,19 @@
      * * 1: A 2-D Tensor of {@link OperandType::TENSOR_INT32}, the paddings
      *      for each spatial dimension of the input tensor. The shape of the
      *      tensor must be {rank(input0), 2}.
-     *      padding[i, 0] specifies the number of element to be padded in the
+     *      padding[i, 0] specifies the number of elements to be padded in the
      *      front of dimension i.
-     *      padding[i, 1] specifies the number of element to be padded after the
+     *      padding[i, 1] specifies the number of elements to be padded after the
      *      end of dimension i.
      *
      * Outputs:
-     * * 0: A tensor of the same {@link OperandType} as input0.
+     * * 0: A tensor of the same {@link OperandType} as input0. The
+     *      output tensor has the same rank as input0, and each
+     *      dimension of the output tensor has the same size as the
+     *      corresponding dimension of the input tensor plus the size
+     *      of the padding:
+     *          output0.dimension[i] =
+     *              padding[i, 0] + input0.dimension[i] + padding[i, 1]
      */
     PAD = 32,
 
@@ -377,19 +383,21 @@
     /**
      * All operations included in the model.
      *
-     * The operations are sorted into execution order.
+     * The operations are sorted into execution order. Every operand
+     * with lifetime MODEL_OUTPUT or TEMPORARY_VARIABLE must be
+     * written before it is read.
      */
     vec<Operation> operations;
 
     /**
-     * Input indexes of the model.
+     * Input indexes of the model. There must be at least one.
      *
      * Each value corresponds to the index of the operand in "operands".
      */
     vec<uint32_t> inputIndexes;
 
     /**
-     * Output indexes of the model.
+     * Output indexes of the model. There must be at least one.
      *
      * Each value corresponds to the index of the operand in "operands".
      */
@@ -404,8 +412,7 @@
     vec<uint8_t> operandValues;
 
     /**
-     * A collection of shared memory pools containing operand data that were
-     * registered by the model.
+     * A collection of shared memory pools containing operand values.
      *
      * An operand's value must be located here if and only if Operand::lifetime
      * equals OperandLifeTime::CONSTANT_REFERENCE.
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp b/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp
index 47498b0..d38bc23 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp
@@ -136,7 +136,9 @@
             EXPECT_EQ(std::cv_status::no_timeout, wait());
             EXPECT_EQ(serial, radioRsp->rspInfo.serial);
             EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
-            EXPECT_EQ(RadioError::PASSWORD_INCORRECT, radioRsp->rspInfo.error);
+            ASSERT_TRUE(CheckAnyOfErrors(
+                radioRsp->rspInfo.error,
+                {RadioError::PASSWORD_INCORRECT, RadioError::REQUEST_NOT_SUPPORTED}));
         }
     }
 }
@@ -159,7 +161,9 @@
             EXPECT_EQ(std::cv_status::no_timeout, wait());
             EXPECT_EQ(serial, radioRsp->rspInfo.serial);
             EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
-            EXPECT_EQ(RadioError::PASSWORD_INCORRECT, radioRsp->rspInfo.error);
+            ASSERT_TRUE(CheckAnyOfErrors(
+                radioRsp->rspInfo.error,
+                {RadioError::PASSWORD_INCORRECT, RadioError::REQUEST_NOT_SUPPORTED}));
         }
     }
 }
diff --git a/radio/1.2/vts/functional/radio_hidl_hal_api.cpp b/radio/1.2/vts/functional/radio_hidl_hal_api.cpp
index 9284fd8..03911da 100644
--- a/radio/1.2/vts/functional/radio_hidl_hal_api.cpp
+++ b/radio/1.2/vts/functional/radio_hidl_hal_api.cpp
@@ -43,7 +43,13 @@
     if (cardStatus.base.cardState == CardState::ABSENT) {
         ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::SIM_ABSENT}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
-        ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::NONE}));
+        // REQUEST_NOT_SUPPORTED should not be allowed as it is not an optional API. However, the
+        // comments in the hal were not updated to indicate that, hence allowing it as a valid
+        // error for now. This should be fixed correctly, possibly in a future version of the hal
+        // (b/110421924). This is being allowed because some vendors do not support
+        // this request on dual sim devices.
+        ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                                     {RadioError::NONE, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -69,7 +75,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -105,7 +112,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -141,7 +149,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -177,7 +186,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -213,7 +223,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -249,7 +260,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -285,7 +297,8 @@
                                      {RadioError::SIM_ABSENT, RadioError::INVALID_ARGUMENTS}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
         ASSERT_TRUE(
-            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::INVALID_ARGUMENTS}));
+            CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                             {RadioError::INVALID_ARGUMENTS, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -320,7 +333,8 @@
         ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
                                      {RadioError::NONE, RadioError::SIM_ABSENT}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
-        ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::NONE}));
+        ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                                     {RadioError::NONE, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
@@ -356,7 +370,8 @@
         ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
                                      {RadioError::NONE, RadioError::SIM_ABSENT}));
     } else if (cardStatus.base.cardState == CardState::PRESENT) {
-        ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error, {RadioError::NONE}));
+        ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_2->rspInfo.error,
+                                     {RadioError::NONE, RadioError::REQUEST_NOT_SUPPORTED}));
     }
 }
 
diff --git a/radio/1.2/vts/functional/radio_response.cpp b/radio/1.2/vts/functional/radio_response.cpp
index f6bead2..c5c7b14 100644
--- a/radio/1.2/vts/functional/radio_response.cpp
+++ b/radio/1.2/vts/functional/radio_response.cpp
@@ -653,7 +653,9 @@
     return Void();
 }
 
-Return<void> RadioResponse_v1_2::setIndicationFilterResponse(const RadioResponseInfo& /*info*/) {
+Return<void> RadioResponse_v1_2::setIndicationFilterResponse(const RadioResponseInfo& info) {
+    rspInfo = info;
+    parent_v1_2.notify(info.serial);
     return Void();
 }
 
@@ -745,13 +747,17 @@
 }
 
 Return<void> RadioResponse_v1_2::getVoiceRegistrationStateResponse_1_2(
-    const RadioResponseInfo& /*info*/,
+    const RadioResponseInfo& info,
     const ::android::hardware::radio::V1_2::VoiceRegStateResult& /*voiceRegResponse*/) {
+    rspInfo = info;
+    parent_v1_2.notify(info.serial);
     return Void();
 }
 
 Return<void> RadioResponse_v1_2::getDataRegistrationStateResponse_1_2(
-    const RadioResponseInfo& /*info*/,
+    const RadioResponseInfo& info,
     const ::android::hardware::radio::V1_2::DataRegStateResult& /*dataRegResponse*/) {
+    rspInfo = info;
+    parent_v1_2.notify(info.serial);
     return Void();
-}
\ No newline at end of file
+}