| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //! Container for messages that are sent via binder. |
| 18 | |
| 19 | use crate::binder::AsNative; |
| 20 | use crate::error::{status_result, Result, StatusCode}; |
| 21 | use crate::proxy::SpIBinder; |
| 22 | use crate::sys; |
| 23 | |
| 24 | use std::convert::TryInto; |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 25 | use std::fmt; |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 26 | use std::marker::PhantomData; |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 27 | use std::mem::ManuallyDrop; |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 28 | use std::ptr::{self, NonNull}; |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 29 | |
| 30 | mod file_descriptor; |
| 31 | mod parcelable; |
| Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 32 | mod parcelable_holder; |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 33 | |
| 34 | pub use self::file_descriptor::ParcelFileDescriptor; |
| 35 | pub use self::parcelable::{ |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 36 | Deserialize, DeserializeArray, DeserializeOption, Parcelable, Serialize, SerializeArray, |
| Andrew Walbran | e9573af | 2024-01-11 16:34:16 +0000 | [diff] [blame] | 37 | SerializeOption, UnstructuredParcelable, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG, |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 38 | }; |
| Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 39 | pub use self::parcelable_holder::{ParcelableHolder, ParcelableMetadata}; |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 40 | |
| 41 | /// Container for a message (data and object references) that can be sent |
| 42 | /// through Binder. |
| 43 | /// |
| 44 | /// A Parcel can contain both serialized data that will be deserialized on the |
| 45 | /// other side of the IPC, and references to live Binder objects that will |
| 46 | /// result in the other side receiving a proxy Binder connected with the |
| 47 | /// original Binder in the Parcel. |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 48 | /// |
| 49 | /// This type represents a parcel that is owned by Rust code. |
| 50 | #[repr(transparent)] |
| 51 | pub struct Parcel { |
| 52 | ptr: NonNull<sys::AParcel>, |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 53 | } |
| 54 | |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 55 | /// Safety: This type guarantees that it owns the AParcel and that all access to |
| 56 | /// the AParcel happens through the Parcel, so it is ok to send across threads. |
| Alice Ryhl | 657de46 | 2023-10-23 17:18:30 +0000 | [diff] [blame] | 57 | /// |
| 58 | /// It would not be okay to implement Sync, because that would allow you to call |
| 59 | /// the reading methods from several threads in parallel, which would be a data |
| 60 | /// race on the cursor position inside the AParcel. |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 61 | unsafe impl Send for Parcel {} |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 62 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 63 | /// Container for a message (data and object references) that can be sent |
| 64 | /// through Binder. |
| 65 | /// |
| 66 | /// This object is a borrowed variant of [`Parcel`]. It is a separate type from |
| 67 | /// `&mut Parcel` because it is not valid to `mem::swap` two parcels. |
| 68 | #[repr(transparent)] |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 69 | pub struct BorrowedParcel<'a> { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 70 | ptr: NonNull<sys::AParcel>, |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 71 | _lifetime: PhantomData<&'a mut Parcel>, |
| 72 | } |
| 73 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 74 | impl Parcel { |
| 75 | /// Create a new empty `Parcel`. |
| 76 | pub fn new() -> Parcel { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 77 | // Safety: If `AParcel_create` succeeds, it always returns |
| 78 | // a valid pointer. If it fails, the process will crash. |
| 79 | let ptr = unsafe { sys::AParcel_create() }; |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 80 | Self { ptr: NonNull::new(ptr).expect("AParcel_create returned null pointer") } |
| Alice Ryhl | 05f5a2c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 81 | } |
| 82 | |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 83 | /// Create an owned reference to a parcel object from a raw pointer. |
| 84 | /// |
| 85 | /// # Safety |
| 86 | /// |
| 87 | /// This constructor is safe if the raw pointer parameter is either null |
| 88 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. The |
| 89 | /// parcel object must be owned by the caller prior to this call, as this |
| 90 | /// constructor takes ownership of the parcel and will destroy it on drop. |
| 91 | /// |
| 92 | /// Additionally, the caller must guarantee that it is valid to take |
| 93 | /// ownership of the AParcel object. All future access to the AParcel |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 94 | /// must happen through this `Parcel`. |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 95 | /// |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 96 | /// Because `Parcel` implements `Send`, the pointer must never point to any |
| 97 | /// thread-local data, e.g., a variable on the stack, either directly or |
| 98 | /// indirectly. |
| 99 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<Parcel> { |
| 100 | NonNull::new(ptr).map(|ptr| Self { ptr }) |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | /// Consume the parcel, transferring ownership to the caller. |
| 104 | pub(crate) fn into_raw(self) -> *mut sys::AParcel { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 105 | let ptr = self.ptr.as_ptr(); |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 106 | let _ = ManuallyDrop::new(self); |
| 107 | ptr |
| 108 | } |
| 109 | |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 110 | /// Get a borrowed view into the contents of this `Parcel`. |
| 111 | pub fn borrowed(&mut self) -> BorrowedParcel<'_> { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 112 | // Safety: The raw pointer is a valid pointer to an AParcel, and the |
| 113 | // lifetime of the returned `BorrowedParcel` is tied to `self`, so the |
| 114 | // borrow checker will ensure that the `AParcel` can only be accessed |
| 115 | // via the `BorrowParcel` until it goes out of scope. |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 116 | BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData } |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 117 | } |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 118 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 119 | /// Get an immutable borrowed view into the contents of this `Parcel`. |
| 120 | pub fn borrowed_ref(&self) -> &BorrowedParcel<'_> { |
| 121 | // Safety: Parcel and BorrowedParcel are both represented in the same |
| 122 | // way as a NonNull<sys::AParcel> due to their use of repr(transparent), |
| 123 | // so casting references as done here is valid. |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 124 | unsafe { &*(self as *const Parcel as *const BorrowedParcel<'_>) } |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 125 | } |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 126 | } |
| 127 | |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 128 | impl Default for Parcel { |
| 129 | fn default() -> Self { |
| 130 | Self::new() |
| 131 | } |
| 132 | } |
| 133 | |
| 134 | impl Clone for Parcel { |
| 135 | fn clone(&self) -> Self { |
| 136 | let mut new_parcel = Self::new(); |
| 137 | new_parcel |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 138 | .borrowed() |
| 139 | .append_all_from(self.borrowed_ref()) |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 140 | .expect("Failed to append from Parcel"); |
| 141 | new_parcel |
| 142 | } |
| 143 | } |
| 144 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 145 | impl<'a> BorrowedParcel<'a> { |
| 146 | /// Create a borrowed reference to a parcel object from a raw pointer. |
| 147 | /// |
| 148 | /// # Safety |
| 149 | /// |
| 150 | /// This constructor is safe if the raw pointer parameter is either null |
| 151 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. |
| 152 | /// |
| 153 | /// Since the raw pointer is not restricted by any lifetime, the lifetime on |
| 154 | /// the returned `BorrowedParcel` object can be chosen arbitrarily by the |
| 155 | /// caller. The caller must ensure it is valid to mutably borrow the AParcel |
| 156 | /// for the duration of the lifetime that the caller chooses. Note that |
| 157 | /// since this is a mutable borrow, it must have exclusive access to the |
| 158 | /// AParcel for the duration of the borrow. |
| 159 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<BorrowedParcel<'a>> { |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 160 | Some(Self { ptr: NonNull::new(ptr)?, _lifetime: PhantomData }) |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 161 | } |
| 162 | |
| 163 | /// Get a sub-reference to this reference to the parcel. |
| 164 | pub fn reborrow(&mut self) -> BorrowedParcel<'_> { |
| 165 | // Safety: The raw pointer is a valid pointer to an AParcel, and the |
| 166 | // lifetime of the returned `BorrowedParcel` is tied to `self`, so the |
| 167 | // borrow checker will ensure that the `AParcel` can only be accessed |
| 168 | // via the `BorrowParcel` until it goes out of scope. |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 169 | BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData } |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 170 | } |
| 171 | } |
| 172 | |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 173 | /// Safety: The `Parcel` constructors guarantee that a `Parcel` object will |
| 174 | /// always contain a valid pointer to an `AParcel`. |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 175 | unsafe impl AsNative<sys::AParcel> for Parcel { |
| 176 | fn as_native(&self) -> *const sys::AParcel { |
| 177 | self.ptr.as_ptr() |
| 178 | } |
| 179 | |
| 180 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 181 | self.ptr.as_ptr() |
| 182 | } |
| 183 | } |
| 184 | |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 185 | /// Safety: The `BorrowedParcel` constructors guarantee that a `BorrowedParcel` |
| 186 | /// object will always contain a valid pointer to an `AParcel`. |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 187 | unsafe impl<'a> AsNative<sys::AParcel> for BorrowedParcel<'a> { |
| 188 | fn as_native(&self) -> *const sys::AParcel { |
| 189 | self.ptr.as_ptr() |
| 190 | } |
| 191 | |
| 192 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 193 | self.ptr.as_ptr() |
| 194 | } |
| 195 | } |
| 196 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 197 | // Data serialization methods |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 198 | impl<'a> BorrowedParcel<'a> { |
| Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 199 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| Matthew Maurer | 75d7112 | 2024-10-09 22:20:41 +0000 | [diff] [blame] | 200 | #[cfg(not(android_ndk))] |
| Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 201 | pub fn mark_sensitive(&mut self) { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 202 | // Safety: guaranteed to have a parcel object, and this method never fails |
| 203 | unsafe { sys::AParcel_markSensitive(self.as_native()) } |
| Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 204 | } |
| 205 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 206 | /// Write a type that implements [`Serialize`] to the parcel. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 207 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 208 | parcelable.serialize(self) |
| 209 | } |
| 210 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 211 | /// Writes the length of a slice to the parcel. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 212 | /// |
| 213 | /// This is used in AIDL-generated client side code to indicate the |
| 214 | /// allocated space for an output array parameter. |
| 215 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 216 | if let Some(slice) = slice { |
| 217 | let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?; |
| 218 | self.write(&len) |
| 219 | } else { |
| 220 | self.write(&-1i32) |
| 221 | } |
| 222 | } |
| 223 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 224 | /// Perform a series of writes to the parcel, prepended with the length |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 225 | /// (in bytes) of the written data. |
| 226 | /// |
| 227 | /// The length `0i32` will be written to the parcel first, followed by the |
| 228 | /// writes performed by the callback. The initial length will then be |
| 229 | /// updated to the length of all data written by the callback, plus the |
| 230 | /// size of the length elemement itself (4 bytes). |
| 231 | /// |
| 232 | /// # Examples |
| 233 | /// |
| 234 | /// After the following call: |
| 235 | /// |
| 236 | /// ``` |
| 237 | /// # use binder::{Binder, Interface, Parcel}; |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 238 | /// # let mut parcel = Parcel::new(); |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 239 | /// parcel.sized_write(|subparcel| { |
| 240 | /// subparcel.write(&1u32)?; |
| 241 | /// subparcel.write(&2u32)?; |
| 242 | /// subparcel.write(&3u32) |
| 243 | /// }); |
| 244 | /// ``` |
| 245 | /// |
| 246 | /// `parcel` will contain the following: |
| 247 | /// |
| 248 | /// ```ignore |
| 249 | /// [16i32, 1u32, 2u32, 3u32] |
| 250 | /// ``` |
| 251 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 252 | where |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 253 | for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>, |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 254 | { |
| 255 | let start = self.get_data_position(); |
| 256 | self.write(&0i32)?; |
| 257 | { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 258 | let mut subparcel = WritableSubParcel(self.reborrow()); |
| 259 | f(&mut subparcel)?; |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 260 | } |
| 261 | let end = self.get_data_position(); |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 262 | // Safety: start is less than the current size of the parcel data |
| 263 | // buffer, because we just got it with `get_data_position`. |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 264 | unsafe { |
| 265 | self.set_data_position(start)?; |
| 266 | } |
| 267 | assert!(end >= start); |
| 268 | self.write(&(end - start))?; |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 269 | // Safety: end is less than the current size of the parcel data |
| 270 | // buffer, because we just got it with `get_data_position`. |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 271 | unsafe { |
| 272 | self.set_data_position(end)?; |
| 273 | } |
| 274 | Ok(()) |
| 275 | } |
| 276 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 277 | /// Returns the current position in the parcel data. |
| 278 | pub fn get_data_position(&self) -> i32 { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 279 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 280 | // `AParcel`, and this call is otherwise safe. |
| 281 | unsafe { sys::AParcel_getDataPosition(self.as_native()) } |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 282 | } |
| 283 | |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 284 | /// Returns the total size of the parcel. |
| 285 | pub fn get_data_size(&self) -> i32 { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 286 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 287 | // `AParcel`, and this call is otherwise safe. |
| 288 | unsafe { sys::AParcel_getDataSize(self.as_native()) } |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 289 | } |
| 290 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 291 | /// Move the current read/write position in the parcel. |
| 292 | /// |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 293 | /// # Safety |
| 294 | /// |
| 295 | /// This method is safe if `pos` is less than the current size of the parcel |
| 296 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 297 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 298 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 299 | /// on that. |
| 300 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 301 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 302 | // `AParcel`, and the caller guarantees that `pos` is within bounds. |
| 303 | status_result(unsafe { sys::AParcel_setDataPosition(self.as_native(), pos) }) |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 304 | } |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 305 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 306 | /// Append a subset of another parcel. |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 307 | /// |
| 308 | /// This appends `size` bytes of data from `other` starting at offset |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 309 | /// `start` to the current parcel, or returns an error if not possible. |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 310 | pub fn append_from( |
| 311 | &mut self, |
| 312 | other: &impl AsNative<sys::AParcel>, |
| 313 | start: i32, |
| 314 | size: i32, |
| 315 | ) -> Result<()> { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 316 | // Safety: `Parcel::appendFrom` from C++ checks that `start` |
| 317 | // and `size` are in bounds, and returns an error otherwise. |
| 318 | // Both `self` and `other` always contain valid pointers. |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 319 | let status = unsafe { |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 320 | sys::AParcel_appendFrom(other.as_native(), self.as_native_mut(), start, size) |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 321 | }; |
| 322 | status_result(status) |
| 323 | } |
| 324 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 325 | /// Append the contents of another parcel. |
| 326 | pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> { |
| 327 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 328 | // `AParcel`, and this call is otherwise safe. |
| 329 | let size = unsafe { sys::AParcel_getDataSize(other.as_native()) }; |
| 330 | self.append_from(other, 0, size) |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 331 | } |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 332 | } |
| 333 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 334 | /// A segment of a writable parcel, used for [`BorrowedParcel::sized_write`]. |
| 335 | pub struct WritableSubParcel<'a>(BorrowedParcel<'a>); |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 336 | |
| 337 | impl<'a> WritableSubParcel<'a> { |
| 338 | /// Write a type that implements [`Serialize`] to the sub-parcel. |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 339 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 340 | parcelable.serialize(&mut self.0) |
| 341 | } |
| 342 | } |
| 343 | |
| 344 | impl Parcel { |
| 345 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| Matthew Maurer | 75d7112 | 2024-10-09 22:20:41 +0000 | [diff] [blame] | 346 | #[cfg(not(android_ndk))] |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 347 | pub fn mark_sensitive(&mut self) { |
| 348 | self.borrowed().mark_sensitive() |
| 349 | } |
| 350 | |
| 351 | /// Write a type that implements [`Serialize`] to the parcel. |
| 352 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 353 | self.borrowed().write(parcelable) |
| 354 | } |
| 355 | |
| 356 | /// Writes the length of a slice to the parcel. |
| 357 | /// |
| 358 | /// This is used in AIDL-generated client side code to indicate the |
| 359 | /// allocated space for an output array parameter. |
| 360 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 361 | self.borrowed().write_slice_size(slice) |
| 362 | } |
| 363 | |
| 364 | /// Perform a series of writes to the parcel, prepended with the length |
| 365 | /// (in bytes) of the written data. |
| 366 | /// |
| 367 | /// The length `0i32` will be written to the parcel first, followed by the |
| 368 | /// writes performed by the callback. The initial length will then be |
| 369 | /// updated to the length of all data written by the callback, plus the |
| 370 | /// size of the length elemement itself (4 bytes). |
| 371 | /// |
| 372 | /// # Examples |
| 373 | /// |
| 374 | /// After the following call: |
| 375 | /// |
| 376 | /// ``` |
| 377 | /// # use binder::{Binder, Interface, Parcel}; |
| 378 | /// # let mut parcel = Parcel::new(); |
| 379 | /// parcel.sized_write(|subparcel| { |
| 380 | /// subparcel.write(&1u32)?; |
| 381 | /// subparcel.write(&2u32)?; |
| 382 | /// subparcel.write(&3u32) |
| 383 | /// }); |
| 384 | /// ``` |
| 385 | /// |
| 386 | /// `parcel` will contain the following: |
| 387 | /// |
| 388 | /// ```ignore |
| 389 | /// [16i32, 1u32, 2u32, 3u32] |
| 390 | /// ``` |
| 391 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
| 392 | where |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 393 | for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>, |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 394 | { |
| 395 | self.borrowed().sized_write(f) |
| 396 | } |
| 397 | |
| 398 | /// Returns the current position in the parcel data. |
| 399 | pub fn get_data_position(&self) -> i32 { |
| 400 | self.borrowed_ref().get_data_position() |
| 401 | } |
| 402 | |
| 403 | /// Returns the total size of the parcel. |
| 404 | pub fn get_data_size(&self) -> i32 { |
| 405 | self.borrowed_ref().get_data_size() |
| 406 | } |
| 407 | |
| 408 | /// Move the current read/write position in the parcel. |
| 409 | /// |
| 410 | /// # Safety |
| 411 | /// |
| 412 | /// This method is safe if `pos` is less than the current size of the parcel |
| 413 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 414 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 415 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 416 | /// on that. |
| 417 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 418 | // Safety: We have the same safety requirements as |
| 419 | // `BorrowedParcel::set_data_position`. |
| 420 | unsafe { self.borrowed_ref().set_data_position(pos) } |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 421 | } |
| 422 | |
| 423 | /// Append a subset of another parcel. |
| 424 | /// |
| 425 | /// This appends `size` bytes of data from `other` starting at offset |
| 426 | /// `start` to the current parcel, or returns an error if not possible. |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 427 | pub fn append_from( |
| 428 | &mut self, |
| 429 | other: &impl AsNative<sys::AParcel>, |
| 430 | start: i32, |
| 431 | size: i32, |
| 432 | ) -> Result<()> { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 433 | self.borrowed().append_from(other, start, size) |
| 434 | } |
| 435 | |
| 436 | /// Append the contents of another parcel. |
| 437 | pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> { |
| 438 | self.borrowed().append_all_from(other) |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 439 | } |
| 440 | } |
| 441 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 442 | // Data deserialization methods |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 443 | impl<'a> BorrowedParcel<'a> { |
| 444 | /// Attempt to read a type that implements [`Deserialize`] from this parcel. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 445 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 446 | D::deserialize(self) |
| 447 | } |
| 448 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 449 | /// Attempt to read a type that implements [`Deserialize`] from this parcel |
| 450 | /// onto an existing value. This operation will overwrite the old value |
| 451 | /// partially or completely, depending on how much data is available. |
| Andrei Homescu | 5000615 | 2021-05-01 07:34:51 +0000 | [diff] [blame] | 452 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 453 | x.deserialize_from(self) |
| 454 | } |
| 455 | |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 456 | /// Safely read a sized parcelable. |
| 457 | /// |
| 458 | /// Read the size of a parcelable, compute the end position |
| 459 | /// of that parcelable, then build a sized readable sub-parcel |
| 460 | /// and call a closure with the sub-parcel as its parameter. |
| 461 | /// The closure can keep reading data from the sub-parcel |
| 462 | /// until it runs out of input data. The closure is responsible |
| Andrei Homescu | 3e9c13a | 2023-05-09 02:48:22 +0000 | [diff] [blame] | 463 | /// for calling `ReadableSubParcel::has_more_data` to check for |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 464 | /// more data before every read, at least until Rust generators |
| 465 | /// are stabilized. |
| 466 | /// After the closure returns, skip to the end of the current |
| 467 | /// parcelable regardless of how much the closure has read. |
| 468 | /// |
| 469 | /// # Examples |
| 470 | /// |
| 471 | /// ```no_run |
| 472 | /// let mut parcelable = Default::default(); |
| 473 | /// parcel.sized_read(|subparcel| { |
| 474 | /// if subparcel.has_more_data() { |
| 475 | /// parcelable.a = subparcel.read()?; |
| 476 | /// } |
| 477 | /// if subparcel.has_more_data() { |
| 478 | /// parcelable.b = subparcel.read()?; |
| 479 | /// } |
| 480 | /// Ok(()) |
| 481 | /// }); |
| 482 | /// ``` |
| 483 | /// |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 484 | pub fn sized_read<F>(&self, f: F) -> Result<()> |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 485 | where |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 486 | for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>, |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 487 | { |
| 488 | let start = self.get_data_position(); |
| 489 | let parcelable_size: i32 = self.read()?; |
| Steven Moreland | 6d9e077 | 2022-01-15 02:10:18 +0000 | [diff] [blame] | 490 | if parcelable_size < 4 { |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 491 | return Err(StatusCode::BAD_VALUE); |
| 492 | } |
| 493 | |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 494 | let end = start.checked_add(parcelable_size).ok_or(StatusCode::BAD_VALUE)?; |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 495 | if end > self.get_data_size() { |
| 496 | return Err(StatusCode::NOT_ENOUGH_DATA); |
| 497 | } |
| 498 | |
| 499 | let subparcel = ReadableSubParcel { |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 500 | parcel: BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData }, |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 501 | end_position: end, |
| 502 | }; |
| 503 | f(subparcel)?; |
| 504 | |
| 505 | // Advance the data position to the actual end, |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 506 | // in case the closure read less data than was available. |
| 507 | // |
| 508 | // Safety: end must be less than the current size of the parcel, because |
| 509 | // we checked above against `get_data_size`. |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 510 | unsafe { |
| 511 | self.set_data_position(end)?; |
| 512 | } |
| 513 | |
| 514 | Ok(()) |
| 515 | } |
| 516 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 517 | /// Read a vector size from the parcel and resize the given output vector to |
| 518 | /// be correctly sized for that amount of data. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 519 | /// |
| 520 | /// This method is used in AIDL-generated server side code for methods that |
| 521 | /// take a mutable slice reference parameter. |
| 522 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 523 | let len: i32 = self.read()?; |
| 524 | |
| 525 | if len < 0 { |
| 526 | return Err(StatusCode::UNEXPECTED_NULL); |
| 527 | } |
| 528 | |
| 529 | // usize in Rust may be 16-bit, so i32 may not fit |
| 530 | let len = len.try_into().unwrap(); |
| 531 | out_vec.resize_with(len, Default::default); |
| 532 | |
| 533 | Ok(()) |
| 534 | } |
| 535 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 536 | /// Read a vector size from the parcel and either create a correctly sized |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 537 | /// vector for that amount of data or set the output parameter to None if |
| 538 | /// the vector should be null. |
| 539 | /// |
| 540 | /// This method is used in AIDL-generated server side code for methods that |
| 541 | /// take a mutable slice reference parameter. |
| 542 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 543 | &self, |
| 544 | out_vec: &mut Option<Vec<D>>, |
| 545 | ) -> Result<()> { |
| 546 | let len: i32 = self.read()?; |
| 547 | |
| 548 | if len < 0 { |
| 549 | *out_vec = None; |
| 550 | } else { |
| 551 | // usize in Rust may be 16-bit, so i32 may not fit |
| 552 | let len = len.try_into().unwrap(); |
| 553 | let mut vec = Vec::with_capacity(len); |
| 554 | vec.resize_with(len, Default::default); |
| 555 | *out_vec = Some(vec); |
| 556 | } |
| 557 | |
| 558 | Ok(()) |
| 559 | } |
| 560 | } |
| 561 | |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 562 | /// A segment of a readable parcel, used for [`Parcel::sized_read`]. |
| 563 | pub struct ReadableSubParcel<'a> { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 564 | parcel: BorrowedParcel<'a>, |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 565 | end_position: i32, |
| 566 | } |
| 567 | |
| 568 | impl<'a> ReadableSubParcel<'a> { |
| 569 | /// Read a type that implements [`Deserialize`] from the sub-parcel. |
| 570 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 571 | D::deserialize(&self.parcel) |
| Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 572 | } |
| 573 | |
| 574 | /// Check if the sub-parcel has more data to read |
| 575 | pub fn has_more_data(&self) -> bool { |
| 576 | self.parcel.get_data_position() < self.end_position |
| 577 | } |
| 578 | } |
| 579 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 580 | impl Parcel { |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 581 | /// Attempt to read a type that implements [`Deserialize`] from this parcel. |
| 582 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 583 | self.borrowed_ref().read() |
| 584 | } |
| 585 | |
| 586 | /// Attempt to read a type that implements [`Deserialize`] from this parcel |
| 587 | /// onto an existing value. This operation will overwrite the old value |
| 588 | /// partially or completely, depending on how much data is available. |
| 589 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 590 | self.borrowed_ref().read_onto(x) |
| 591 | } |
| 592 | |
| 593 | /// Safely read a sized parcelable. |
| 594 | /// |
| 595 | /// Read the size of a parcelable, compute the end position |
| 596 | /// of that parcelable, then build a sized readable sub-parcel |
| 597 | /// and call a closure with the sub-parcel as its parameter. |
| 598 | /// The closure can keep reading data from the sub-parcel |
| 599 | /// until it runs out of input data. The closure is responsible |
| Andrei Homescu | 3e9c13a | 2023-05-09 02:48:22 +0000 | [diff] [blame] | 600 | /// for calling `ReadableSubParcel::has_more_data` to check for |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 601 | /// more data before every read, at least until Rust generators |
| 602 | /// are stabilized. |
| 603 | /// After the closure returns, skip to the end of the current |
| 604 | /// parcelable regardless of how much the closure has read. |
| 605 | /// |
| 606 | /// # Examples |
| 607 | /// |
| 608 | /// ```no_run |
| 609 | /// let mut parcelable = Default::default(); |
| 610 | /// parcel.sized_read(|subparcel| { |
| 611 | /// if subparcel.has_more_data() { |
| 612 | /// parcelable.a = subparcel.read()?; |
| 613 | /// } |
| 614 | /// if subparcel.has_more_data() { |
| 615 | /// parcelable.b = subparcel.read()?; |
| 616 | /// } |
| 617 | /// Ok(()) |
| 618 | /// }); |
| 619 | /// ``` |
| 620 | /// |
| 621 | pub fn sized_read<F>(&self, f: F) -> Result<()> |
| 622 | where |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 623 | for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>, |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 624 | { |
| 625 | self.borrowed_ref().sized_read(f) |
| 626 | } |
| 627 | |
| 628 | /// Read a vector size from the parcel and resize the given output vector to |
| 629 | /// be correctly sized for that amount of data. |
| 630 | /// |
| 631 | /// This method is used in AIDL-generated server side code for methods that |
| 632 | /// take a mutable slice reference parameter. |
| 633 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 634 | self.borrowed_ref().resize_out_vec(out_vec) |
| 635 | } |
| 636 | |
| 637 | /// Read a vector size from the parcel and either create a correctly sized |
| 638 | /// vector for that amount of data or set the output parameter to None if |
| 639 | /// the vector should be null. |
| 640 | /// |
| 641 | /// This method is used in AIDL-generated server side code for methods that |
| 642 | /// take a mutable slice reference parameter. |
| 643 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 644 | &self, |
| 645 | out_vec: &mut Option<Vec<D>>, |
| 646 | ) -> Result<()> { |
| 647 | self.borrowed_ref().resize_nullable_out_vec(out_vec) |
| 648 | } |
| 649 | } |
| 650 | |
| 651 | // Internal APIs |
| 652 | impl<'a> BorrowedParcel<'a> { |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 653 | pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> { |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 654 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 655 | // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return |
| 656 | // null or a valid pointer to an `AIBinder`, both of which are |
| 657 | // valid, safe inputs to `AParcel_writeStrongBinder`. |
| 658 | // |
| 659 | // This call does not take ownership of the binder. However, it does |
| 660 | // require a mutable pointer, which we cannot extract from an |
| 661 | // immutable reference, so we clone the binder, incrementing the |
| 662 | // refcount before the call. The refcount will be immediately |
| 663 | // decremented when this temporary is dropped. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 664 | unsafe { |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 665 | status_result(sys::AParcel_writeStrongBinder( |
| 666 | self.as_native_mut(), |
| 667 | binder.cloned().as_native_mut(), |
| 668 | )) |
| 669 | } |
| 670 | } |
| 671 | |
| 672 | pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> { |
| 673 | let mut binder = ptr::null_mut(); |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 674 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 675 | // `AParcel`. We pass a valid, mutable out pointer to the `binder` |
| 676 | // parameter. After this call, `binder` will be either null or a |
| 677 | // valid pointer to an `AIBinder` owned by the caller. |
| 678 | let status = unsafe { sys::AParcel_readStrongBinder(self.as_native(), &mut binder) }; |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 679 | |
| 680 | status_result(status)?; |
| 681 | |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 682 | // Safety: `binder` is either null or a valid, owned pointer at this |
| 683 | // point, so can be safely passed to `SpIBinder::from_raw`. |
| 684 | Ok(unsafe { SpIBinder::from_raw(binder) }) |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 685 | } |
| 686 | } |
| 687 | |
| 688 | impl Drop for Parcel { |
| 689 | fn drop(&mut self) { |
| 690 | // Run the C++ Parcel complete object destructor |
| Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 691 | // |
| 692 | // Safety: `Parcel` always contains a valid pointer to an |
| 693 | // `AParcel`. Since we own the parcel, we can safely delete it |
| 694 | // here. |
| 695 | unsafe { sys::AParcel_delete(self.ptr.as_ptr()) } |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 696 | } |
| 697 | } |
| 698 | |
| Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 699 | impl fmt::Debug for Parcel { |
| 700 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 701 | f.debug_struct("Parcel").finish() |
| Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 702 | } |
| 703 | } |
| 704 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 705 | impl<'a> fmt::Debug for BorrowedParcel<'a> { |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 706 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 707 | f.debug_struct("BorrowedParcel").finish() |
| Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 708 | } |
| 709 | } |
| 710 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 711 | #[test] |
| 712 | fn test_read_write() { |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 713 | let mut parcel = Parcel::new(); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 714 | let start = parcel.get_data_position(); |
| 715 | |
| 716 | assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 717 | assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 718 | assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 719 | assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 720 | assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 721 | assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 722 | assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 723 | assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 724 | assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 725 | assert_eq!(parcel.read::<Option<String>>(), Ok(None)); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 726 | assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL)); |
| 727 | |
| Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 728 | assert_eq!(parcel.borrowed_ref().read_binder().err(), Some(StatusCode::BAD_TYPE)); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 729 | |
| 730 | parcel.write(&1i32).unwrap(); |
| 731 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 732 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 733 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 734 | unsafe { |
| 735 | parcel.set_data_position(start).unwrap(); |
| 736 | } |
| 737 | |
| 738 | let i: i32 = parcel.read().unwrap(); |
| 739 | assert_eq!(i, 1i32); |
| 740 | } |
| 741 | |
| 742 | #[test] |
| 743 | #[allow(clippy::float_cmp)] |
| 744 | fn test_read_data() { |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 745 | let mut parcel = Parcel::new(); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 746 | let str_start = parcel.get_data_position(); |
| 747 | |
| 748 | parcel.write(&b"Hello, Binder!\0"[..]).unwrap(); |
| 749 | // Skip over string length |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 750 | // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't |
| 751 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 752 | unsafe { |
| 753 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 754 | } |
| 755 | assert_eq!(parcel.read::<i32>().unwrap(), 15); |
| 756 | let start = parcel.get_data_position(); |
| 757 | |
| Chris Wailes | 45fd294 | 2021-07-26 19:18:41 -0700 | [diff] [blame] | 758 | assert!(parcel.read::<bool>().unwrap()); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 759 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 760 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 761 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 762 | unsafe { |
| 763 | assert!(parcel.set_data_position(start).is_ok()); |
| 764 | } |
| 765 | |
| 766 | assert_eq!(parcel.read::<i8>().unwrap(), 72i8); |
| 767 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 768 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 769 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 770 | unsafe { |
| 771 | assert!(parcel.set_data_position(start).is_ok()); |
| 772 | } |
| 773 | |
| 774 | assert_eq!(parcel.read::<u16>().unwrap(), 25928); |
| 775 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 776 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 777 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 778 | unsafe { |
| 779 | assert!(parcel.set_data_position(start).is_ok()); |
| 780 | } |
| 781 | |
| 782 | assert_eq!(parcel.read::<i32>().unwrap(), 1819043144); |
| 783 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 784 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 785 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 786 | unsafe { |
| 787 | assert!(parcel.set_data_position(start).is_ok()); |
| 788 | } |
| 789 | |
| 790 | assert_eq!(parcel.read::<u32>().unwrap(), 1819043144); |
| 791 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 792 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 793 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 794 | unsafe { |
| 795 | assert!(parcel.set_data_position(start).is_ok()); |
| 796 | } |
| 797 | |
| 798 | assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912); |
| 799 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 800 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 801 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 802 | unsafe { |
| 803 | assert!(parcel.set_data_position(start).is_ok()); |
| 804 | } |
| 805 | |
| 806 | assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912); |
| 807 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 808 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 809 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 810 | unsafe { |
| 811 | assert!(parcel.set_data_position(start).is_ok()); |
| 812 | } |
| 813 | |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 814 | assert_eq!(parcel.read::<f32>().unwrap(), 1143139100000000000000000000.0); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 815 | assert_eq!(parcel.read::<f32>().unwrap(), 40.043392); |
| 816 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 817 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 818 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 819 | unsafe { |
| 820 | assert!(parcel.set_data_position(start).is_ok()); |
| 821 | } |
| 822 | |
| 823 | assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815); |
| 824 | |
| 825 | // Skip back to before the string length |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 826 | // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't |
| 827 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 828 | unsafe { |
| 829 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 830 | } |
| 831 | |
| 832 | assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0"); |
| 833 | } |
| 834 | |
| 835 | #[test] |
| 836 | fn test_utf8_utf16_conversions() { |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 837 | let mut parcel = Parcel::new(); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 838 | let start = parcel.get_data_position(); |
| 839 | |
| 840 | assert!(parcel.write("Hello, Binder!").is_ok()); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 841 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 842 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 843 | unsafe { |
| 844 | assert!(parcel.set_data_position(start).is_ok()); |
| 845 | } |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 846 | assert_eq!(parcel.read::<Option<String>>().unwrap().unwrap(), "Hello, Binder!",); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 847 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 848 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 849 | unsafe { |
| 850 | assert!(parcel.set_data_position(start).is_ok()); |
| 851 | } |
| Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 852 | |
| 853 | assert!(parcel.write("Embedded null \0 inside a string").is_ok()); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 854 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 855 | // made it any shorter since we got the position. |
| Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 856 | unsafe { |
| 857 | assert!(parcel.set_data_position(start).is_ok()); |
| 858 | } |
| 859 | assert_eq!( |
| 860 | parcel.read::<Option<String>>().unwrap().unwrap(), |
| 861 | "Embedded null \0 inside a string", |
| 862 | ); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 863 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 864 | // made it any shorter since we got the position. |
| Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 865 | unsafe { |
| 866 | assert!(parcel.set_data_position(start).is_ok()); |
| 867 | } |
| 868 | |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 869 | assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok()); |
| 870 | assert!(parcel |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 871 | .write(&[String::from("str4"), String::from("str5"), String::from("str6"),][..]) |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 872 | .is_ok()); |
| 873 | |
| 874 | let s1 = "Hello, Binder!"; |
| 875 | let s2 = "This is a utf8 string."; |
| 876 | let s3 = "Some more text here."; |
| 877 | |
| 878 | assert!(parcel.write(&[s1, s2, s3][..]).is_ok()); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 879 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 880 | // made it any shorter since we got the position. |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 881 | unsafe { |
| 882 | assert!(parcel.set_data_position(start).is_ok()); |
| 883 | } |
| 884 | |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 885 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str1", "str2", "str3"]); |
| 886 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str4", "str5", "str6"]); |
| Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 887 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]); |
| 888 | } |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 889 | |
| 890 | #[test] |
| 891 | fn test_sized_write() { |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 892 | let mut parcel = Parcel::new(); |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 893 | let start = parcel.get_data_position(); |
| 894 | |
| 895 | let arr = [1i32, 2i32, 3i32]; |
| 896 | |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 897 | parcel |
| 898 | .sized_write(|subparcel| subparcel.write(&arr[..])) |
| 899 | .expect("Could not perform sized write"); |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 900 | |
| 901 | // i32 sub-parcel length + i32 array length + 3 i32 elements |
| 902 | let expected_len = 20i32; |
| 903 | |
| 904 | assert_eq!(parcel.get_data_position(), start + expected_len); |
| 905 | |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 906 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 907 | // made it any shorter since we got the position. |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 908 | unsafe { |
| 909 | parcel.set_data_position(start).unwrap(); |
| 910 | } |
| 911 | |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 912 | assert_eq!(expected_len, parcel.read().unwrap(),); |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 913 | |
| Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 914 | assert_eq!(parcel.read::<Vec<i32>>().unwrap(), &arr,); |
| Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 915 | } |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 916 | |
| 917 | #[test] |
| 918 | fn test_append_from() { |
| 919 | let mut parcel1 = Parcel::new(); |
| 920 | parcel1.write(&42i32).expect("Could not perform write"); |
| 921 | |
| 922 | let mut parcel2 = Parcel::new(); |
| 923 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 924 | assert_eq!(4, parcel2.get_data_size()); |
| 925 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 926 | assert_eq!(8, parcel2.get_data_size()); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 927 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 928 | // empty. |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 929 | unsafe { |
| 930 | parcel2.set_data_position(0).unwrap(); |
| 931 | } |
| 932 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 933 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 934 | |
| 935 | let mut parcel2 = Parcel::new(); |
| 936 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 937 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2)); |
| 938 | assert_eq!(4, parcel2.get_data_size()); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 939 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 940 | // empty. |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 941 | unsafe { |
| 942 | parcel2.set_data_position(0).unwrap(); |
| 943 | } |
| 944 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 945 | |
| 946 | let mut parcel2 = Parcel::new(); |
| 947 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 948 | assert_eq!(2, parcel2.get_data_size()); |
| Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 949 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 950 | // empty. |
| Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 951 | unsafe { |
| 952 | parcel2.set_data_position(0).unwrap(); |
| 953 | } |
| 954 | assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>()); |
| 955 | |
| 956 | let mut parcel2 = Parcel::new(); |
| 957 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2)); |
| 958 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4)); |
| 959 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4)); |
| 960 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1)); |
| 961 | } |