Merge "Clean up dumpstate HAL 1.1 VTS test."
diff --git a/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp b/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp
index 278d1f4..fb01ad0 100644
--- a/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp
+++ b/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp
@@ -336,7 +336,7 @@
     ASSERT_EQ(0U, result.args->formattedMessage_.size());
 }
 
-// Simulates the framework candelling an ongoing prompt
+// Simulates the framework cancelling an ongoing prompt
 TEST_F(ConfirmationUIHidlTest, AbortTest) {
     static constexpr char test_prompt[] = "Me first, gimme gimme!";
     static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3};
@@ -354,6 +354,92 @@
     ASSERT_EQ(0U, result.args->formattedMessage_.size());
 }
 
+// Tests if the confirmation dialog can successfully render 100 'W' characters as required by
+// the design guidelines.
+TEST_F(ConfirmationUIHidlTest, PortableMessageTest1) {
+    static constexpr char test_prompt[] =
+            "WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW"
+            "WWWWWWWWWWWWWW";
+    static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3};
+    sp<ConfirmationTestCallback> conf_cb = new ConfirmationTestCallback;
+    hidl_string prompt_text(test_prompt);
+    hidl_vec<uint8_t> extra(test_extra, test_extra + 3);
+    ASSERT_HAL_CALL(ResponseCode::OK,
+                    confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {}));
+
+    confirmator().abort();
+
+    auto result = conf_cb->WaitForCallback();
+    ASSERT_EQ(ResponseCode::Aborted, result.args->error_);
+    ASSERT_EQ(0U, result.args->confirmationToken_.size());
+    ASSERT_EQ(0U, result.args->formattedMessage_.size());
+}
+
+// Tests if the confirmation dialog can successfully render 100 'W' characters as required by
+// the design guidelines in magnified mode.
+TEST_F(ConfirmationUIHidlTest, PortableMessageTest1Magnified) {
+    static constexpr char test_prompt[] =
+            "WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW"
+            "WWWWWWWWWWWWWW";
+    static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3};
+    sp<ConfirmationTestCallback> conf_cb = new ConfirmationTestCallback;
+    hidl_string prompt_text(test_prompt);
+    hidl_vec<uint8_t> extra(test_extra, test_extra + 3);
+    ASSERT_HAL_CALL(ResponseCode::OK,
+                    confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en",
+                                                         {UIOption::AccessibilityMagnified}));
+
+    confirmator().abort();
+
+    auto result = conf_cb->WaitForCallback();
+    ASSERT_EQ(ResponseCode::Aborted, result.args->error_);
+    ASSERT_EQ(0U, result.args->confirmationToken_.size());
+    ASSERT_EQ(0U, result.args->formattedMessage_.size());
+}
+
+// Tests if the confirmation dialog can successfully render 8 groups of 12 'W' characters as
+// required by the design guidelines.
+TEST_F(ConfirmationUIHidlTest, PortableMessageTest2) {
+    static constexpr char test_prompt[] =
+            "WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW "
+            "WWWWWWWWWWWW WWWWWWWWWWWW";
+    static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3};
+    sp<ConfirmationTestCallback> conf_cb = new ConfirmationTestCallback;
+    hidl_string prompt_text(test_prompt);
+    hidl_vec<uint8_t> extra(test_extra, test_extra + 3);
+    ASSERT_HAL_CALL(ResponseCode::OK,
+                    confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {}));
+
+    confirmator().abort();
+
+    auto result = conf_cb->WaitForCallback();
+    ASSERT_EQ(ResponseCode::Aborted, result.args->error_);
+    ASSERT_EQ(0U, result.args->confirmationToken_.size());
+    ASSERT_EQ(0U, result.args->formattedMessage_.size());
+}
+
+// Tests if the confirmation dialog can successfully render 8 groups of 12 'W' characters as
+// required by the design guidelines in magnified mode.
+TEST_F(ConfirmationUIHidlTest, PortableMessageTest2Magnified) {
+    static constexpr char test_prompt[] =
+            "WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW WWWWWWWWWWWW "
+            "WWWWWWWWWWWW WWWWWWWWWWWW";
+    static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3};
+    sp<ConfirmationTestCallback> conf_cb = new ConfirmationTestCallback;
+    hidl_string prompt_text(test_prompt);
+    hidl_vec<uint8_t> extra(test_extra, test_extra + 3);
+    ASSERT_HAL_CALL(ResponseCode::OK,
+                    confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en",
+                                                         {UIOption::AccessibilityMagnified}));
+
+    confirmator().abort();
+
+    auto result = conf_cb->WaitForCallback();
+    ASSERT_EQ(ResponseCode::Aborted, result.args->error_);
+    ASSERT_EQ(0U, result.args->confirmationToken_.size());
+    ASSERT_EQ(0U, result.args->formattedMessage_.size());
+}
+
 // Passing malformed UTF-8 to the confirmation UI
 // This test passes a string that ends in the middle of a multibyte character
 TEST_F(ConfirmationUIHidlTest, MalformedUTF8Test1) {
diff --git a/current.txt b/current.txt
index 5eb7a6f..d0666a8 100644
--- a/current.txt
+++ b/current.txt
@@ -630,7 +630,7 @@
 2fa3679ad7c94b5e88724adcd560c561041068a4ca565c63830e68101988746a android.hardware.neuralnetworks@1.3::IFencedExecutionCallback
 237b23b126a66f3432658020fed78cdd06ba6297459436fe6bae0ba753370833 android.hardware.neuralnetworks@1.3::IPreparedModel
 0439a1fbbec7f16e5e4c653d85ac685d51bfafbae15b8f8cca530acdd7d6a8ce android.hardware.neuralnetworks@1.3::IPreparedModelCallback
-abbc4e1a969881c9f8ab587add5b5e75b08df834c9c969c013ae38cb4bb16f6a android.hardware.neuralnetworks@1.3::types
+2fabd246f985d94a0172dacefb0d6cf19e2aeb2d5f17752653988ef39570a52d android.hardware.neuralnetworks@1.3::types
 3e01d4446cd69fd1c48f8572efd97487bc179564b32bd795800b97bbe10be37b android.hardware.wifi@1.4::IWifi
 a64467bae843569f0d465c5be7f0c7a5b987985b55a3ef4794dd5afc68538650 android.hardware.wifi.supplicant@1.3::ISupplicant
 44445b8a03d7b9e68b2fbd954672c18a8fce9e32851b0692f4f4ab3407f86ecb android.hardware.wifi.supplicant@1.3::ISupplicantStaIface
diff --git a/neuralnetworks/1.3/types.hal b/neuralnetworks/1.3/types.hal
index c5dc08c..530f984 100644
--- a/neuralnetworks/1.3/types.hal
+++ b/neuralnetworks/1.3/types.hal
@@ -2556,6 +2556,30 @@
      *      A 3-D tensor of shape:
      *        If time-major: [max_time, batch_size, bw_output_size]
      *        If batch-major: [batch_size, max_time, bw_output_size]
+     * * 2: The forward activation state output.
+     *      A 2-D tensor of shape [batch_size, fw_output_size] containing an
+     *      activation state from the last time step in the sequence. This
+     *      output is optional and can be omitted. If this output is present
+     *      then outputs 3-5 must be present as well.
+     *      Available since HAL version 1.3.
+     * * 3: The forward cell state output.
+     *      A tensor of shape [batch_size, fw_cell_size] containing a cell state
+     *      from the last time step in the sequence. This output is optional
+     *      and can be omitted. If this output is present
+     *      then outputs 2, 4, 5 must be present as well.
+     *      Available since HAL version 1.3.
+     * * 4: The backward activation state output.
+     *      A 2-D tensor of shape [batch_size, bw_output_size] containing an
+     *      activation state from the last time step in the sequence. This
+     *      output is optional and can be omitted. If this output is present
+     *      then outputs 2, 3, 5 must be present as well.
+     *      Available since HAL version 1.3.
+     * * 5: The backward cell state output.
+     *      A tensor of shape [batch_size, bw_cell_size] containing a cell state
+     *      from the last time step in the sequence. This output is optional
+     *      and can be omitted. If this output is present
+     *      then outputs 2-4 must be present as well.
+     *      Available since HAL version 1.3.
      */
     BIDIRECTIONAL_SEQUENCE_LSTM = @1.2::OperationType:BIDIRECTIONAL_SEQUENCE_LSTM,
 
@@ -2673,6 +2697,18 @@
      *      (timeMajor). If it is set to true, then the shape is set to
      *      [maxTime, batchSize, bwNumUnits], otherwise the shape is set to
      *      [batchSize, maxTime, bwNumUnits].
+     * * 2: The forward hidden state output.
+     *      A 2-D tensor of shape [batchSize, fwNumUnits] containing a hidden
+     *      state from the last time step in the sequence. This output is
+     *      optional and can be omitted. If this output is present then output
+     *      3 must be present as well.
+     *      Available since HAL version 1.3.
+     * * 3: The backward hidden state output.
+     *      A 2-D tensor of shape [batchSize, bwNumUnits] containing a hidden
+     *      state from the last time step in the sequence. This output is
+     *      optional and can be omitted. If this output is present then output
+     *      2 must be present as well.
+     *      Available since HAL version 1.3.
      */
     BIDIRECTIONAL_SEQUENCE_RNN = @1.2::OperationType:BIDIRECTIONAL_SEQUENCE_RNN,
 
@@ -4656,6 +4692,15 @@
      *      A 3-D tensor of shape:
      *        If time-major: [max_time, batch_size, output_size]
      *        If batch-major: [batch_size, max_time, output_size]
+     * * 1: A tensor of shape [batch_size, output_size] containing a hidden
+     *      state from the last time step in the sequence. This output is
+     *      optional and can be omitted. If this output is present then
+     *      output #2 must be present as well.
+     *      Available since HAL version 1.3.
+     * * 2: A tensor of shape [batch_size, cell_size] containing a cell state
+     *      from the last time step in the sequence. This output is optional
+     *      and can be omitted.
+     *      Available since HAL version 1.3.
      */
     UNIDIRECTIONAL_SEQUENCE_LSTM = @1.2::OperationType:UNIDIRECTIONAL_SEQUENCE_LSTM,
 
@@ -4711,6 +4756,10 @@
      *      it is set to 1, then the output has a shape [maxTime, batchSize,
      *      numUnits], otherwise the output has a shape [batchSize, maxTime,
      *      numUnits].
+     * * 1: A tensor of shape [batchSize, numUnits] containing hidden state
+     *      from the last time step in the sequence. This output is optional
+     *      and can be omitted.
+     *      Available since HAL version 1.3.
      */
     UNIDIRECTIONAL_SEQUENCE_RNN = @1.2::OperationType:UNIDIRECTIONAL_SEQUENCE_RNN,
 
diff --git a/neuralnetworks/1.3/vts/functional/ValidateModel.cpp b/neuralnetworks/1.3/vts/functional/ValidateModel.cpp
index 0a35e2d..b9ea430 100644
--- a/neuralnetworks/1.3/vts/functional/ValidateModel.cpp
+++ b/neuralnetworks/1.3/vts/functional/ValidateModel.cpp
@@ -527,9 +527,15 @@
                 }
             }
         }
-        // BIDIRECTIONAL_SEQUENCE_LSTM and BIDIRECTIONAL_SEQUENCE_RNN can have either one or two
-        // outputs depending on their mergeOutputs parameter.
-        if (operation.type == OperationType::BIDIRECTIONAL_SEQUENCE_LSTM ||
+        // BIDIRECTIONAL_SEQUENCE_LSTM and BIDIRECTIONAL_SEQUENCE_RNN can have
+        // either one, two, three or four outputs depending on their
+        // mergeOutputs parameter and if state outputs are provided.
+        // UNIDIRECTIONAL_SEQUENCE_LSTM and UNIDIRECTIONAL_SEQUENCE_RNN can have
+        // either one or three outputs depending on whether state outputs are
+        // provided.
+        if (operation.type == OperationType::UNIDIRECTIONAL_SEQUENCE_LSTM ||
+            operation.type == OperationType::UNIDIRECTIONAL_SEQUENCE_RNN ||
+            operation.type == OperationType::BIDIRECTIONAL_SEQUENCE_LSTM ||
             operation.type == OperationType::BIDIRECTIONAL_SEQUENCE_RNN) {
             for (const size_t outOprand : operation.outputs) {
                 if (operand == outOprand) {