Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //! Container for messages that are sent via binder. |
| 18 | |
| 19 | use crate::binder::AsNative; |
| 20 | use crate::error::{status_result, Result, StatusCode}; |
| 21 | use crate::proxy::SpIBinder; |
| 22 | use crate::sys; |
| 23 | |
| 24 | use std::convert::TryInto; |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 25 | use std::fmt; |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 26 | use std::marker::PhantomData; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 27 | use std::mem::ManuallyDrop; |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 28 | use std::ptr::{self, NonNull}; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 29 | |
| 30 | mod file_descriptor; |
| 31 | mod parcelable; |
Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 32 | mod parcelable_holder; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 33 | |
| 34 | pub use self::file_descriptor::ParcelFileDescriptor; |
| 35 | pub use self::parcelable::{ |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 36 | Deserialize, DeserializeArray, DeserializeOption, Parcelable, Serialize, SerializeArray, |
| 37 | SerializeOption, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG, |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 38 | }; |
Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 39 | pub use self::parcelable_holder::{ParcelableHolder, ParcelableMetadata}; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 40 | |
| 41 | /// Container for a message (data and object references) that can be sent |
| 42 | /// through Binder. |
| 43 | /// |
| 44 | /// A Parcel can contain both serialized data that will be deserialized on the |
| 45 | /// other side of the IPC, and references to live Binder objects that will |
| 46 | /// result in the other side receiving a proxy Binder connected with the |
| 47 | /// original Binder in the Parcel. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 48 | /// |
| 49 | /// This type represents a parcel that is owned by Rust code. |
| 50 | #[repr(transparent)] |
| 51 | pub struct Parcel { |
| 52 | ptr: NonNull<sys::AParcel>, |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 53 | } |
| 54 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 55 | /// Safety: This type guarantees that it owns the AParcel and that all access to |
| 56 | /// the AParcel happens through the Parcel, so it is ok to send across threads. |
Alice Ryhl | 657de46 | 2023-10-23 17:18:30 +0000 | [diff] [blame] | 57 | /// |
| 58 | /// It would not be okay to implement Sync, because that would allow you to call |
| 59 | /// the reading methods from several threads in parallel, which would be a data |
| 60 | /// race on the cursor position inside the AParcel. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 61 | unsafe impl Send for Parcel {} |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 62 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 63 | /// Container for a message (data and object references) that can be sent |
| 64 | /// through Binder. |
| 65 | /// |
| 66 | /// This object is a borrowed variant of [`Parcel`]. It is a separate type from |
| 67 | /// `&mut Parcel` because it is not valid to `mem::swap` two parcels. |
| 68 | #[repr(transparent)] |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 69 | pub struct BorrowedParcel<'a> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 70 | ptr: NonNull<sys::AParcel>, |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 71 | _lifetime: PhantomData<&'a mut Parcel>, |
| 72 | } |
| 73 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 74 | impl Parcel { |
| 75 | /// Create a new empty `Parcel`. |
| 76 | pub fn new() -> Parcel { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 77 | // Safety: If `AParcel_create` succeeds, it always returns |
| 78 | // a valid pointer. If it fails, the process will crash. |
| 79 | let ptr = unsafe { sys::AParcel_create() }; |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 80 | Self { ptr: NonNull::new(ptr).expect("AParcel_create returned null pointer") } |
Alice Ryhl | 05f5a2c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 81 | } |
| 82 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 83 | /// Create an owned reference to a parcel object from a raw pointer. |
| 84 | /// |
| 85 | /// # Safety |
| 86 | /// |
| 87 | /// This constructor is safe if the raw pointer parameter is either null |
| 88 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. The |
| 89 | /// parcel object must be owned by the caller prior to this call, as this |
| 90 | /// constructor takes ownership of the parcel and will destroy it on drop. |
| 91 | /// |
| 92 | /// Additionally, the caller must guarantee that it is valid to take |
| 93 | /// ownership of the AParcel object. All future access to the AParcel |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 94 | /// must happen through this `Parcel`. |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 95 | /// |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 96 | /// Because `Parcel` implements `Send`, the pointer must never point to any |
| 97 | /// thread-local data, e.g., a variable on the stack, either directly or |
| 98 | /// indirectly. |
| 99 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<Parcel> { |
| 100 | NonNull::new(ptr).map(|ptr| Self { ptr }) |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | /// Consume the parcel, transferring ownership to the caller. |
| 104 | pub(crate) fn into_raw(self) -> *mut sys::AParcel { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 105 | let ptr = self.ptr.as_ptr(); |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 106 | let _ = ManuallyDrop::new(self); |
| 107 | ptr |
| 108 | } |
| 109 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 110 | /// Get a borrowed view into the contents of this `Parcel`. |
| 111 | pub fn borrowed(&mut self) -> BorrowedParcel<'_> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 112 | // Safety: The raw pointer is a valid pointer to an AParcel, and the |
| 113 | // lifetime of the returned `BorrowedParcel` is tied to `self`, so the |
| 114 | // borrow checker will ensure that the `AParcel` can only be accessed |
| 115 | // via the `BorrowParcel` until it goes out of scope. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 116 | BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData } |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 117 | } |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 118 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 119 | /// Get an immutable borrowed view into the contents of this `Parcel`. |
| 120 | pub fn borrowed_ref(&self) -> &BorrowedParcel<'_> { |
| 121 | // Safety: Parcel and BorrowedParcel are both represented in the same |
| 122 | // way as a NonNull<sys::AParcel> due to their use of repr(transparent), |
| 123 | // so casting references as done here is valid. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 124 | unsafe { &*(self as *const Parcel as *const BorrowedParcel<'_>) } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 125 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 126 | } |
| 127 | |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 128 | impl Default for Parcel { |
| 129 | fn default() -> Self { |
| 130 | Self::new() |
| 131 | } |
| 132 | } |
| 133 | |
| 134 | impl Clone for Parcel { |
| 135 | fn clone(&self) -> Self { |
| 136 | let mut new_parcel = Self::new(); |
| 137 | new_parcel |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 138 | .borrowed() |
| 139 | .append_all_from(self.borrowed_ref()) |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 140 | .expect("Failed to append from Parcel"); |
| 141 | new_parcel |
| 142 | } |
| 143 | } |
| 144 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 145 | impl<'a> BorrowedParcel<'a> { |
| 146 | /// Create a borrowed reference to a parcel object from a raw pointer. |
| 147 | /// |
| 148 | /// # Safety |
| 149 | /// |
| 150 | /// This constructor is safe if the raw pointer parameter is either null |
| 151 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. |
| 152 | /// |
| 153 | /// Since the raw pointer is not restricted by any lifetime, the lifetime on |
| 154 | /// the returned `BorrowedParcel` object can be chosen arbitrarily by the |
| 155 | /// caller. The caller must ensure it is valid to mutably borrow the AParcel |
| 156 | /// for the duration of the lifetime that the caller chooses. Note that |
| 157 | /// since this is a mutable borrow, it must have exclusive access to the |
| 158 | /// AParcel for the duration of the borrow. |
| 159 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<BorrowedParcel<'a>> { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 160 | Some(Self { ptr: NonNull::new(ptr)?, _lifetime: PhantomData }) |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 161 | } |
| 162 | |
| 163 | /// Get a sub-reference to this reference to the parcel. |
| 164 | pub fn reborrow(&mut self) -> BorrowedParcel<'_> { |
| 165 | // Safety: The raw pointer is a valid pointer to an AParcel, and the |
| 166 | // lifetime of the returned `BorrowedParcel` is tied to `self`, so the |
| 167 | // borrow checker will ensure that the `AParcel` can only be accessed |
| 168 | // via the `BorrowParcel` until it goes out of scope. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 169 | BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData } |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 170 | } |
| 171 | } |
| 172 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 173 | /// Safety: The `Parcel` constructors guarantee that a `Parcel` object will |
| 174 | /// always contain a valid pointer to an `AParcel`. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 175 | unsafe impl AsNative<sys::AParcel> for Parcel { |
| 176 | fn as_native(&self) -> *const sys::AParcel { |
| 177 | self.ptr.as_ptr() |
| 178 | } |
| 179 | |
| 180 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 181 | self.ptr.as_ptr() |
| 182 | } |
| 183 | } |
| 184 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 185 | /// Safety: The `BorrowedParcel` constructors guarantee that a `BorrowedParcel` |
| 186 | /// object will always contain a valid pointer to an `AParcel`. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 187 | unsafe impl<'a> AsNative<sys::AParcel> for BorrowedParcel<'a> { |
| 188 | fn as_native(&self) -> *const sys::AParcel { |
| 189 | self.ptr.as_ptr() |
| 190 | } |
| 191 | |
| 192 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 193 | self.ptr.as_ptr() |
| 194 | } |
| 195 | } |
| 196 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 197 | // Data serialization methods |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 198 | impl<'a> BorrowedParcel<'a> { |
Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 199 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| 200 | pub fn mark_sensitive(&mut self) { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 201 | // Safety: guaranteed to have a parcel object, and this method never fails |
| 202 | unsafe { sys::AParcel_markSensitive(self.as_native()) } |
Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 203 | } |
| 204 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 205 | /// Write a type that implements [`Serialize`] to the parcel. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 206 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 207 | parcelable.serialize(self) |
| 208 | } |
| 209 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 210 | /// Writes the length of a slice to the parcel. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 211 | /// |
| 212 | /// This is used in AIDL-generated client side code to indicate the |
| 213 | /// allocated space for an output array parameter. |
| 214 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 215 | if let Some(slice) = slice { |
| 216 | let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?; |
| 217 | self.write(&len) |
| 218 | } else { |
| 219 | self.write(&-1i32) |
| 220 | } |
| 221 | } |
| 222 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 223 | /// Perform a series of writes to the parcel, prepended with the length |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 224 | /// (in bytes) of the written data. |
| 225 | /// |
| 226 | /// The length `0i32` will be written to the parcel first, followed by the |
| 227 | /// writes performed by the callback. The initial length will then be |
| 228 | /// updated to the length of all data written by the callback, plus the |
| 229 | /// size of the length elemement itself (4 bytes). |
| 230 | /// |
| 231 | /// # Examples |
| 232 | /// |
| 233 | /// After the following call: |
| 234 | /// |
| 235 | /// ``` |
| 236 | /// # use binder::{Binder, Interface, Parcel}; |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 237 | /// # let mut parcel = Parcel::new(); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 238 | /// parcel.sized_write(|subparcel| { |
| 239 | /// subparcel.write(&1u32)?; |
| 240 | /// subparcel.write(&2u32)?; |
| 241 | /// subparcel.write(&3u32) |
| 242 | /// }); |
| 243 | /// ``` |
| 244 | /// |
| 245 | /// `parcel` will contain the following: |
| 246 | /// |
| 247 | /// ```ignore |
| 248 | /// [16i32, 1u32, 2u32, 3u32] |
| 249 | /// ``` |
| 250 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 251 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 252 | for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>, |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 253 | { |
| 254 | let start = self.get_data_position(); |
| 255 | self.write(&0i32)?; |
| 256 | { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 257 | let mut subparcel = WritableSubParcel(self.reborrow()); |
| 258 | f(&mut subparcel)?; |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 259 | } |
| 260 | let end = self.get_data_position(); |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 261 | // Safety: start is less than the current size of the parcel data |
| 262 | // buffer, because we just got it with `get_data_position`. |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 263 | unsafe { |
| 264 | self.set_data_position(start)?; |
| 265 | } |
| 266 | assert!(end >= start); |
| 267 | self.write(&(end - start))?; |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 268 | // Safety: end is less than the current size of the parcel data |
| 269 | // buffer, because we just got it with `get_data_position`. |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 270 | unsafe { |
| 271 | self.set_data_position(end)?; |
| 272 | } |
| 273 | Ok(()) |
| 274 | } |
| 275 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 276 | /// Returns the current position in the parcel data. |
| 277 | pub fn get_data_position(&self) -> i32 { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 278 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 279 | // `AParcel`, and this call is otherwise safe. |
| 280 | unsafe { sys::AParcel_getDataPosition(self.as_native()) } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 281 | } |
| 282 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 283 | /// Returns the total size of the parcel. |
| 284 | pub fn get_data_size(&self) -> i32 { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 285 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 286 | // `AParcel`, and this call is otherwise safe. |
| 287 | unsafe { sys::AParcel_getDataSize(self.as_native()) } |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 288 | } |
| 289 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 290 | /// Move the current read/write position in the parcel. |
| 291 | /// |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 292 | /// # Safety |
| 293 | /// |
| 294 | /// This method is safe if `pos` is less than the current size of the parcel |
| 295 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 296 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 297 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 298 | /// on that. |
| 299 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 300 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 301 | // `AParcel`, and the caller guarantees that `pos` is within bounds. |
| 302 | status_result(unsafe { sys::AParcel_setDataPosition(self.as_native(), pos) }) |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 303 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 304 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 305 | /// Append a subset of another parcel. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 306 | /// |
| 307 | /// This appends `size` bytes of data from `other` starting at offset |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 308 | /// `start` to the current parcel, or returns an error if not possible. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 309 | pub fn append_from( |
| 310 | &mut self, |
| 311 | other: &impl AsNative<sys::AParcel>, |
| 312 | start: i32, |
| 313 | size: i32, |
| 314 | ) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 315 | // Safety: `Parcel::appendFrom` from C++ checks that `start` |
| 316 | // and `size` are in bounds, and returns an error otherwise. |
| 317 | // Both `self` and `other` always contain valid pointers. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 318 | let status = unsafe { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 319 | sys::AParcel_appendFrom(other.as_native(), self.as_native_mut(), start, size) |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 320 | }; |
| 321 | status_result(status) |
| 322 | } |
| 323 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 324 | /// Append the contents of another parcel. |
| 325 | pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> { |
| 326 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 327 | // `AParcel`, and this call is otherwise safe. |
| 328 | let size = unsafe { sys::AParcel_getDataSize(other.as_native()) }; |
| 329 | self.append_from(other, 0, size) |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 330 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 331 | } |
| 332 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 333 | /// A segment of a writable parcel, used for [`BorrowedParcel::sized_write`]. |
| 334 | pub struct WritableSubParcel<'a>(BorrowedParcel<'a>); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 335 | |
| 336 | impl<'a> WritableSubParcel<'a> { |
| 337 | /// Write a type that implements [`Serialize`] to the sub-parcel. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 338 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 339 | parcelable.serialize(&mut self.0) |
| 340 | } |
| 341 | } |
| 342 | |
| 343 | impl Parcel { |
| 344 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| 345 | pub fn mark_sensitive(&mut self) { |
| 346 | self.borrowed().mark_sensitive() |
| 347 | } |
| 348 | |
| 349 | /// Write a type that implements [`Serialize`] to the parcel. |
| 350 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 351 | self.borrowed().write(parcelable) |
| 352 | } |
| 353 | |
| 354 | /// Writes the length of a slice to the parcel. |
| 355 | /// |
| 356 | /// This is used in AIDL-generated client side code to indicate the |
| 357 | /// allocated space for an output array parameter. |
| 358 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 359 | self.borrowed().write_slice_size(slice) |
| 360 | } |
| 361 | |
| 362 | /// Perform a series of writes to the parcel, prepended with the length |
| 363 | /// (in bytes) of the written data. |
| 364 | /// |
| 365 | /// The length `0i32` will be written to the parcel first, followed by the |
| 366 | /// writes performed by the callback. The initial length will then be |
| 367 | /// updated to the length of all data written by the callback, plus the |
| 368 | /// size of the length elemement itself (4 bytes). |
| 369 | /// |
| 370 | /// # Examples |
| 371 | /// |
| 372 | /// After the following call: |
| 373 | /// |
| 374 | /// ``` |
| 375 | /// # use binder::{Binder, Interface, Parcel}; |
| 376 | /// # let mut parcel = Parcel::new(); |
| 377 | /// parcel.sized_write(|subparcel| { |
| 378 | /// subparcel.write(&1u32)?; |
| 379 | /// subparcel.write(&2u32)?; |
| 380 | /// subparcel.write(&3u32) |
| 381 | /// }); |
| 382 | /// ``` |
| 383 | /// |
| 384 | /// `parcel` will contain the following: |
| 385 | /// |
| 386 | /// ```ignore |
| 387 | /// [16i32, 1u32, 2u32, 3u32] |
| 388 | /// ``` |
| 389 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
| 390 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 391 | for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>, |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 392 | { |
| 393 | self.borrowed().sized_write(f) |
| 394 | } |
| 395 | |
| 396 | /// Returns the current position in the parcel data. |
| 397 | pub fn get_data_position(&self) -> i32 { |
| 398 | self.borrowed_ref().get_data_position() |
| 399 | } |
| 400 | |
| 401 | /// Returns the total size of the parcel. |
| 402 | pub fn get_data_size(&self) -> i32 { |
| 403 | self.borrowed_ref().get_data_size() |
| 404 | } |
| 405 | |
| 406 | /// Move the current read/write position in the parcel. |
| 407 | /// |
| 408 | /// # Safety |
| 409 | /// |
| 410 | /// This method is safe if `pos` is less than the current size of the parcel |
| 411 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 412 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 413 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 414 | /// on that. |
| 415 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 416 | // Safety: We have the same safety requirements as |
| 417 | // `BorrowedParcel::set_data_position`. |
| 418 | unsafe { self.borrowed_ref().set_data_position(pos) } |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 419 | } |
| 420 | |
| 421 | /// Append a subset of another parcel. |
| 422 | /// |
| 423 | /// This appends `size` bytes of data from `other` starting at offset |
| 424 | /// `start` to the current parcel, or returns an error if not possible. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 425 | pub fn append_from( |
| 426 | &mut self, |
| 427 | other: &impl AsNative<sys::AParcel>, |
| 428 | start: i32, |
| 429 | size: i32, |
| 430 | ) -> Result<()> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 431 | self.borrowed().append_from(other, start, size) |
| 432 | } |
| 433 | |
| 434 | /// Append the contents of another parcel. |
| 435 | pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> { |
| 436 | self.borrowed().append_all_from(other) |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 437 | } |
| 438 | } |
| 439 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 440 | // Data deserialization methods |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 441 | impl<'a> BorrowedParcel<'a> { |
| 442 | /// Attempt to read a type that implements [`Deserialize`] from this parcel. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 443 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 444 | D::deserialize(self) |
| 445 | } |
| 446 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 447 | /// Attempt to read a type that implements [`Deserialize`] from this parcel |
| 448 | /// onto an existing value. This operation will overwrite the old value |
| 449 | /// partially or completely, depending on how much data is available. |
Andrei Homescu | 5000615 | 2021-05-01 07:34:51 +0000 | [diff] [blame] | 450 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 451 | x.deserialize_from(self) |
| 452 | } |
| 453 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 454 | /// Safely read a sized parcelable. |
| 455 | /// |
| 456 | /// Read the size of a parcelable, compute the end position |
| 457 | /// of that parcelable, then build a sized readable sub-parcel |
| 458 | /// and call a closure with the sub-parcel as its parameter. |
| 459 | /// The closure can keep reading data from the sub-parcel |
| 460 | /// until it runs out of input data. The closure is responsible |
Andrei Homescu | 3e9c13a | 2023-05-09 02:48:22 +0000 | [diff] [blame] | 461 | /// for calling `ReadableSubParcel::has_more_data` to check for |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 462 | /// more data before every read, at least until Rust generators |
| 463 | /// are stabilized. |
| 464 | /// After the closure returns, skip to the end of the current |
| 465 | /// parcelable regardless of how much the closure has read. |
| 466 | /// |
| 467 | /// # Examples |
| 468 | /// |
| 469 | /// ```no_run |
| 470 | /// let mut parcelable = Default::default(); |
| 471 | /// parcel.sized_read(|subparcel| { |
| 472 | /// if subparcel.has_more_data() { |
| 473 | /// parcelable.a = subparcel.read()?; |
| 474 | /// } |
| 475 | /// if subparcel.has_more_data() { |
| 476 | /// parcelable.b = subparcel.read()?; |
| 477 | /// } |
| 478 | /// Ok(()) |
| 479 | /// }); |
| 480 | /// ``` |
| 481 | /// |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 482 | pub fn sized_read<F>(&self, f: F) -> Result<()> |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 483 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 484 | for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>, |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 485 | { |
| 486 | let start = self.get_data_position(); |
| 487 | let parcelable_size: i32 = self.read()?; |
Steven Moreland | 6d9e077 | 2022-01-15 02:10:18 +0000 | [diff] [blame] | 488 | if parcelable_size < 4 { |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 489 | return Err(StatusCode::BAD_VALUE); |
| 490 | } |
| 491 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 492 | let end = start.checked_add(parcelable_size).ok_or(StatusCode::BAD_VALUE)?; |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 493 | if end > self.get_data_size() { |
| 494 | return Err(StatusCode::NOT_ENOUGH_DATA); |
| 495 | } |
| 496 | |
| 497 | let subparcel = ReadableSubParcel { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 498 | parcel: BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData }, |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 499 | end_position: end, |
| 500 | }; |
| 501 | f(subparcel)?; |
| 502 | |
| 503 | // Advance the data position to the actual end, |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 504 | // in case the closure read less data than was available. |
| 505 | // |
| 506 | // Safety: end must be less than the current size of the parcel, because |
| 507 | // we checked above against `get_data_size`. |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 508 | unsafe { |
| 509 | self.set_data_position(end)?; |
| 510 | } |
| 511 | |
| 512 | Ok(()) |
| 513 | } |
| 514 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 515 | /// Read a vector size from the parcel and resize the given output vector to |
| 516 | /// be correctly sized for that amount of data. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 517 | /// |
| 518 | /// This method is used in AIDL-generated server side code for methods that |
| 519 | /// take a mutable slice reference parameter. |
| 520 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 521 | let len: i32 = self.read()?; |
| 522 | |
| 523 | if len < 0 { |
| 524 | return Err(StatusCode::UNEXPECTED_NULL); |
| 525 | } |
| 526 | |
| 527 | // usize in Rust may be 16-bit, so i32 may not fit |
| 528 | let len = len.try_into().unwrap(); |
| 529 | out_vec.resize_with(len, Default::default); |
| 530 | |
| 531 | Ok(()) |
| 532 | } |
| 533 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 534 | /// Read a vector size from the parcel and either create a correctly sized |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 535 | /// vector for that amount of data or set the output parameter to None if |
| 536 | /// the vector should be null. |
| 537 | /// |
| 538 | /// This method is used in AIDL-generated server side code for methods that |
| 539 | /// take a mutable slice reference parameter. |
| 540 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 541 | &self, |
| 542 | out_vec: &mut Option<Vec<D>>, |
| 543 | ) -> Result<()> { |
| 544 | let len: i32 = self.read()?; |
| 545 | |
| 546 | if len < 0 { |
| 547 | *out_vec = None; |
| 548 | } else { |
| 549 | // usize in Rust may be 16-bit, so i32 may not fit |
| 550 | let len = len.try_into().unwrap(); |
| 551 | let mut vec = Vec::with_capacity(len); |
| 552 | vec.resize_with(len, Default::default); |
| 553 | *out_vec = Some(vec); |
| 554 | } |
| 555 | |
| 556 | Ok(()) |
| 557 | } |
| 558 | } |
| 559 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 560 | /// A segment of a readable parcel, used for [`Parcel::sized_read`]. |
| 561 | pub struct ReadableSubParcel<'a> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 562 | parcel: BorrowedParcel<'a>, |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 563 | end_position: i32, |
| 564 | } |
| 565 | |
| 566 | impl<'a> ReadableSubParcel<'a> { |
| 567 | /// Read a type that implements [`Deserialize`] from the sub-parcel. |
| 568 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 569 | D::deserialize(&self.parcel) |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 570 | } |
| 571 | |
| 572 | /// Check if the sub-parcel has more data to read |
| 573 | pub fn has_more_data(&self) -> bool { |
| 574 | self.parcel.get_data_position() < self.end_position |
| 575 | } |
| 576 | } |
| 577 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 578 | impl Parcel { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 579 | /// Attempt to read a type that implements [`Deserialize`] from this parcel. |
| 580 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 581 | self.borrowed_ref().read() |
| 582 | } |
| 583 | |
| 584 | /// Attempt to read a type that implements [`Deserialize`] from this parcel |
| 585 | /// onto an existing value. This operation will overwrite the old value |
| 586 | /// partially or completely, depending on how much data is available. |
| 587 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 588 | self.borrowed_ref().read_onto(x) |
| 589 | } |
| 590 | |
| 591 | /// Safely read a sized parcelable. |
| 592 | /// |
| 593 | /// Read the size of a parcelable, compute the end position |
| 594 | /// of that parcelable, then build a sized readable sub-parcel |
| 595 | /// and call a closure with the sub-parcel as its parameter. |
| 596 | /// The closure can keep reading data from the sub-parcel |
| 597 | /// until it runs out of input data. The closure is responsible |
Andrei Homescu | 3e9c13a | 2023-05-09 02:48:22 +0000 | [diff] [blame] | 598 | /// for calling `ReadableSubParcel::has_more_data` to check for |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 599 | /// more data before every read, at least until Rust generators |
| 600 | /// are stabilized. |
| 601 | /// After the closure returns, skip to the end of the current |
| 602 | /// parcelable regardless of how much the closure has read. |
| 603 | /// |
| 604 | /// # Examples |
| 605 | /// |
| 606 | /// ```no_run |
| 607 | /// let mut parcelable = Default::default(); |
| 608 | /// parcel.sized_read(|subparcel| { |
| 609 | /// if subparcel.has_more_data() { |
| 610 | /// parcelable.a = subparcel.read()?; |
| 611 | /// } |
| 612 | /// if subparcel.has_more_data() { |
| 613 | /// parcelable.b = subparcel.read()?; |
| 614 | /// } |
| 615 | /// Ok(()) |
| 616 | /// }); |
| 617 | /// ``` |
| 618 | /// |
| 619 | pub fn sized_read<F>(&self, f: F) -> Result<()> |
| 620 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 621 | for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>, |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 622 | { |
| 623 | self.borrowed_ref().sized_read(f) |
| 624 | } |
| 625 | |
| 626 | /// Read a vector size from the parcel and resize the given output vector to |
| 627 | /// be correctly sized for that amount of data. |
| 628 | /// |
| 629 | /// This method is used in AIDL-generated server side code for methods that |
| 630 | /// take a mutable slice reference parameter. |
| 631 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 632 | self.borrowed_ref().resize_out_vec(out_vec) |
| 633 | } |
| 634 | |
| 635 | /// Read a vector size from the parcel and either create a correctly sized |
| 636 | /// vector for that amount of data or set the output parameter to None if |
| 637 | /// the vector should be null. |
| 638 | /// |
| 639 | /// This method is used in AIDL-generated server side code for methods that |
| 640 | /// take a mutable slice reference parameter. |
| 641 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 642 | &self, |
| 643 | out_vec: &mut Option<Vec<D>>, |
| 644 | ) -> Result<()> { |
| 645 | self.borrowed_ref().resize_nullable_out_vec(out_vec) |
| 646 | } |
| 647 | } |
| 648 | |
| 649 | // Internal APIs |
| 650 | impl<'a> BorrowedParcel<'a> { |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 651 | pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 652 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 653 | // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return |
| 654 | // null or a valid pointer to an `AIBinder`, both of which are |
| 655 | // valid, safe inputs to `AParcel_writeStrongBinder`. |
| 656 | // |
| 657 | // This call does not take ownership of the binder. However, it does |
| 658 | // require a mutable pointer, which we cannot extract from an |
| 659 | // immutable reference, so we clone the binder, incrementing the |
| 660 | // refcount before the call. The refcount will be immediately |
| 661 | // decremented when this temporary is dropped. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 662 | unsafe { |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 663 | status_result(sys::AParcel_writeStrongBinder( |
| 664 | self.as_native_mut(), |
| 665 | binder.cloned().as_native_mut(), |
| 666 | )) |
| 667 | } |
| 668 | } |
| 669 | |
| 670 | pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> { |
| 671 | let mut binder = ptr::null_mut(); |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 672 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 673 | // `AParcel`. We pass a valid, mutable out pointer to the `binder` |
| 674 | // parameter. After this call, `binder` will be either null or a |
| 675 | // valid pointer to an `AIBinder` owned by the caller. |
| 676 | let status = unsafe { sys::AParcel_readStrongBinder(self.as_native(), &mut binder) }; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 677 | |
| 678 | status_result(status)?; |
| 679 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 680 | // Safety: `binder` is either null or a valid, owned pointer at this |
| 681 | // point, so can be safely passed to `SpIBinder::from_raw`. |
| 682 | Ok(unsafe { SpIBinder::from_raw(binder) }) |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 683 | } |
| 684 | } |
| 685 | |
| 686 | impl Drop for Parcel { |
| 687 | fn drop(&mut self) { |
| 688 | // Run the C++ Parcel complete object destructor |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 689 | // |
| 690 | // Safety: `Parcel` always contains a valid pointer to an |
| 691 | // `AParcel`. Since we own the parcel, we can safely delete it |
| 692 | // here. |
| 693 | unsafe { sys::AParcel_delete(self.ptr.as_ptr()) } |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 694 | } |
| 695 | } |
| 696 | |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 697 | impl fmt::Debug for Parcel { |
| 698 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 699 | f.debug_struct("Parcel").finish() |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 700 | } |
| 701 | } |
| 702 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 703 | impl<'a> fmt::Debug for BorrowedParcel<'a> { |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 704 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 705 | f.debug_struct("BorrowedParcel").finish() |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 706 | } |
| 707 | } |
| 708 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 709 | #[test] |
| 710 | fn test_read_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 711 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 712 | let start = parcel.get_data_position(); |
| 713 | |
| 714 | assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 715 | assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 716 | assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 717 | assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 718 | assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 719 | assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 720 | assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 721 | assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 722 | assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 723 | assert_eq!(parcel.read::<Option<String>>(), Ok(None)); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 724 | assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL)); |
| 725 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 726 | assert_eq!(parcel.borrowed_ref().read_binder().err(), Some(StatusCode::BAD_TYPE)); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 727 | |
| 728 | parcel.write(&1i32).unwrap(); |
| 729 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 730 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 731 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 732 | unsafe { |
| 733 | parcel.set_data_position(start).unwrap(); |
| 734 | } |
| 735 | |
| 736 | let i: i32 = parcel.read().unwrap(); |
| 737 | assert_eq!(i, 1i32); |
| 738 | } |
| 739 | |
| 740 | #[test] |
| 741 | #[allow(clippy::float_cmp)] |
| 742 | fn test_read_data() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 743 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 744 | let str_start = parcel.get_data_position(); |
| 745 | |
| 746 | parcel.write(&b"Hello, Binder!\0"[..]).unwrap(); |
| 747 | // Skip over string length |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 748 | // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't |
| 749 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 750 | unsafe { |
| 751 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 752 | } |
| 753 | assert_eq!(parcel.read::<i32>().unwrap(), 15); |
| 754 | let start = parcel.get_data_position(); |
| 755 | |
Chris Wailes | 45fd294 | 2021-07-26 19:18:41 -0700 | [diff] [blame] | 756 | assert!(parcel.read::<bool>().unwrap()); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 757 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 758 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 759 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 760 | unsafe { |
| 761 | assert!(parcel.set_data_position(start).is_ok()); |
| 762 | } |
| 763 | |
| 764 | assert_eq!(parcel.read::<i8>().unwrap(), 72i8); |
| 765 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 766 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 767 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 768 | unsafe { |
| 769 | assert!(parcel.set_data_position(start).is_ok()); |
| 770 | } |
| 771 | |
| 772 | assert_eq!(parcel.read::<u16>().unwrap(), 25928); |
| 773 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 774 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 775 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 776 | unsafe { |
| 777 | assert!(parcel.set_data_position(start).is_ok()); |
| 778 | } |
| 779 | |
| 780 | assert_eq!(parcel.read::<i32>().unwrap(), 1819043144); |
| 781 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 782 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 783 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 784 | unsafe { |
| 785 | assert!(parcel.set_data_position(start).is_ok()); |
| 786 | } |
| 787 | |
| 788 | assert_eq!(parcel.read::<u32>().unwrap(), 1819043144); |
| 789 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 790 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 791 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 792 | unsafe { |
| 793 | assert!(parcel.set_data_position(start).is_ok()); |
| 794 | } |
| 795 | |
| 796 | assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912); |
| 797 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 798 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 799 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 800 | unsafe { |
| 801 | assert!(parcel.set_data_position(start).is_ok()); |
| 802 | } |
| 803 | |
| 804 | assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912); |
| 805 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 806 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 807 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 808 | unsafe { |
| 809 | assert!(parcel.set_data_position(start).is_ok()); |
| 810 | } |
| 811 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 812 | assert_eq!(parcel.read::<f32>().unwrap(), 1143139100000000000000000000.0); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 813 | assert_eq!(parcel.read::<f32>().unwrap(), 40.043392); |
| 814 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 815 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 816 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 817 | unsafe { |
| 818 | assert!(parcel.set_data_position(start).is_ok()); |
| 819 | } |
| 820 | |
| 821 | assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815); |
| 822 | |
| 823 | // Skip back to before the string length |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 824 | // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't |
| 825 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 826 | unsafe { |
| 827 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 828 | } |
| 829 | |
| 830 | assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0"); |
| 831 | } |
| 832 | |
| 833 | #[test] |
| 834 | fn test_utf8_utf16_conversions() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 835 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 836 | let start = parcel.get_data_position(); |
| 837 | |
| 838 | assert!(parcel.write("Hello, Binder!").is_ok()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 839 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 840 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 841 | unsafe { |
| 842 | assert!(parcel.set_data_position(start).is_ok()); |
| 843 | } |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 844 | assert_eq!(parcel.read::<Option<String>>().unwrap().unwrap(), "Hello, Binder!",); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 845 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 846 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 847 | unsafe { |
| 848 | assert!(parcel.set_data_position(start).is_ok()); |
| 849 | } |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 850 | |
| 851 | assert!(parcel.write("Embedded null \0 inside a string").is_ok()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 852 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 853 | // made it any shorter since we got the position. |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 854 | unsafe { |
| 855 | assert!(parcel.set_data_position(start).is_ok()); |
| 856 | } |
| 857 | assert_eq!( |
| 858 | parcel.read::<Option<String>>().unwrap().unwrap(), |
| 859 | "Embedded null \0 inside a string", |
| 860 | ); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 861 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 862 | // made it any shorter since we got the position. |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 863 | unsafe { |
| 864 | assert!(parcel.set_data_position(start).is_ok()); |
| 865 | } |
| 866 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 867 | assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok()); |
| 868 | assert!(parcel |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 869 | .write(&[String::from("str4"), String::from("str5"), String::from("str6"),][..]) |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 870 | .is_ok()); |
| 871 | |
| 872 | let s1 = "Hello, Binder!"; |
| 873 | let s2 = "This is a utf8 string."; |
| 874 | let s3 = "Some more text here."; |
| 875 | |
| 876 | assert!(parcel.write(&[s1, s2, s3][..]).is_ok()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 877 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 878 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 879 | unsafe { |
| 880 | assert!(parcel.set_data_position(start).is_ok()); |
| 881 | } |
| 882 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 883 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str1", "str2", "str3"]); |
| 884 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str4", "str5", "str6"]); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 885 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]); |
| 886 | } |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 887 | |
| 888 | #[test] |
| 889 | fn test_sized_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 890 | let mut parcel = Parcel::new(); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 891 | let start = parcel.get_data_position(); |
| 892 | |
| 893 | let arr = [1i32, 2i32, 3i32]; |
| 894 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 895 | parcel |
| 896 | .sized_write(|subparcel| subparcel.write(&arr[..])) |
| 897 | .expect("Could not perform sized write"); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 898 | |
| 899 | // i32 sub-parcel length + i32 array length + 3 i32 elements |
| 900 | let expected_len = 20i32; |
| 901 | |
| 902 | assert_eq!(parcel.get_data_position(), start + expected_len); |
| 903 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 904 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 905 | // made it any shorter since we got the position. |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 906 | unsafe { |
| 907 | parcel.set_data_position(start).unwrap(); |
| 908 | } |
| 909 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 910 | assert_eq!(expected_len, parcel.read().unwrap(),); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 911 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 912 | assert_eq!(parcel.read::<Vec<i32>>().unwrap(), &arr,); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 913 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 914 | |
| 915 | #[test] |
| 916 | fn test_append_from() { |
| 917 | let mut parcel1 = Parcel::new(); |
| 918 | parcel1.write(&42i32).expect("Could not perform write"); |
| 919 | |
| 920 | let mut parcel2 = Parcel::new(); |
| 921 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 922 | assert_eq!(4, parcel2.get_data_size()); |
| 923 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 924 | assert_eq!(8, parcel2.get_data_size()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 925 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 926 | // empty. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 927 | unsafe { |
| 928 | parcel2.set_data_position(0).unwrap(); |
| 929 | } |
| 930 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 931 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 932 | |
| 933 | let mut parcel2 = Parcel::new(); |
| 934 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 935 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2)); |
| 936 | assert_eq!(4, parcel2.get_data_size()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 937 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 938 | // empty. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 939 | unsafe { |
| 940 | parcel2.set_data_position(0).unwrap(); |
| 941 | } |
| 942 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 943 | |
| 944 | let mut parcel2 = Parcel::new(); |
| 945 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 946 | assert_eq!(2, parcel2.get_data_size()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 947 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 948 | // empty. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 949 | unsafe { |
| 950 | parcel2.set_data_position(0).unwrap(); |
| 951 | } |
| 952 | assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>()); |
| 953 | |
| 954 | let mut parcel2 = Parcel::new(); |
| 955 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2)); |
| 956 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4)); |
| 957 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4)); |
| 958 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1)); |
| 959 | } |