Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //! Container for messages that are sent via binder. |
| 18 | |
| 19 | use crate::binder::AsNative; |
| 20 | use crate::error::{status_result, Result, StatusCode}; |
| 21 | use crate::proxy::SpIBinder; |
| 22 | use crate::sys; |
| 23 | |
| 24 | use std::convert::TryInto; |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 25 | use std::fmt; |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 26 | use std::marker::PhantomData; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 27 | use std::mem::ManuallyDrop; |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 28 | use std::ptr::{self, NonNull}; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 29 | |
| 30 | mod file_descriptor; |
| 31 | mod parcelable; |
Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 32 | mod parcelable_holder; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 33 | |
| 34 | pub use self::file_descriptor::ParcelFileDescriptor; |
| 35 | pub use self::parcelable::{ |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 36 | Deserialize, DeserializeArray, DeserializeOption, Parcelable, Serialize, SerializeArray, |
| 37 | SerializeOption, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG, |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 38 | }; |
Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 39 | pub use self::parcelable_holder::{ParcelableHolder, ParcelableMetadata}; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 40 | |
| 41 | /// Container for a message (data and object references) that can be sent |
| 42 | /// through Binder. |
| 43 | /// |
| 44 | /// A Parcel can contain both serialized data that will be deserialized on the |
| 45 | /// other side of the IPC, and references to live Binder objects that will |
| 46 | /// result in the other side receiving a proxy Binder connected with the |
| 47 | /// original Binder in the Parcel. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 48 | /// |
| 49 | /// This type represents a parcel that is owned by Rust code. |
| 50 | #[repr(transparent)] |
| 51 | pub struct Parcel { |
| 52 | ptr: NonNull<sys::AParcel>, |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 53 | } |
| 54 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 55 | /// Safety: This type guarantees that it owns the AParcel and that all access to |
| 56 | /// the AParcel happens through the Parcel, so it is ok to send across threads. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 57 | unsafe impl Send for Parcel {} |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 58 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 59 | /// Container for a message (data and object references) that can be sent |
| 60 | /// through Binder. |
| 61 | /// |
| 62 | /// This object is a borrowed variant of [`Parcel`]. It is a separate type from |
| 63 | /// `&mut Parcel` because it is not valid to `mem::swap` two parcels. |
| 64 | #[repr(transparent)] |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 65 | pub struct BorrowedParcel<'a> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 66 | ptr: NonNull<sys::AParcel>, |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 67 | _lifetime: PhantomData<&'a mut Parcel>, |
| 68 | } |
| 69 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 70 | impl Parcel { |
| 71 | /// Create a new empty `Parcel`. |
| 72 | pub fn new() -> Parcel { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 73 | // Safety: If `AParcel_create` succeeds, it always returns |
| 74 | // a valid pointer. If it fails, the process will crash. |
| 75 | let ptr = unsafe { sys::AParcel_create() }; |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 76 | Self { ptr: NonNull::new(ptr).expect("AParcel_create returned null pointer") } |
Alice Ryhl | 05f5a2c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 77 | } |
| 78 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 79 | /// Create an owned reference to a parcel object from a raw pointer. |
| 80 | /// |
| 81 | /// # Safety |
| 82 | /// |
| 83 | /// This constructor is safe if the raw pointer parameter is either null |
| 84 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. The |
| 85 | /// parcel object must be owned by the caller prior to this call, as this |
| 86 | /// constructor takes ownership of the parcel and will destroy it on drop. |
| 87 | /// |
| 88 | /// Additionally, the caller must guarantee that it is valid to take |
| 89 | /// ownership of the AParcel object. All future access to the AParcel |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 90 | /// must happen through this `Parcel`. |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 91 | /// |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 92 | /// Because `Parcel` implements `Send`, the pointer must never point to any |
| 93 | /// thread-local data, e.g., a variable on the stack, either directly or |
| 94 | /// indirectly. |
| 95 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<Parcel> { |
| 96 | NonNull::new(ptr).map(|ptr| Self { ptr }) |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 97 | } |
| 98 | |
| 99 | /// Consume the parcel, transferring ownership to the caller. |
| 100 | pub(crate) fn into_raw(self) -> *mut sys::AParcel { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 101 | let ptr = self.ptr.as_ptr(); |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 102 | let _ = ManuallyDrop::new(self); |
| 103 | ptr |
| 104 | } |
| 105 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 106 | /// Get a borrowed view into the contents of this `Parcel`. |
| 107 | pub fn borrowed(&mut self) -> BorrowedParcel<'_> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 108 | // Safety: The raw pointer is a valid pointer to an AParcel, and the |
| 109 | // lifetime of the returned `BorrowedParcel` is tied to `self`, so the |
| 110 | // borrow checker will ensure that the `AParcel` can only be accessed |
| 111 | // via the `BorrowParcel` until it goes out of scope. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 112 | BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData } |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 113 | } |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 114 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 115 | /// Get an immutable borrowed view into the contents of this `Parcel`. |
| 116 | pub fn borrowed_ref(&self) -> &BorrowedParcel<'_> { |
| 117 | // Safety: Parcel and BorrowedParcel are both represented in the same |
| 118 | // way as a NonNull<sys::AParcel> due to their use of repr(transparent), |
| 119 | // so casting references as done here is valid. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 120 | unsafe { &*(self as *const Parcel as *const BorrowedParcel<'_>) } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 121 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 122 | } |
| 123 | |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 124 | impl Default for Parcel { |
| 125 | fn default() -> Self { |
| 126 | Self::new() |
| 127 | } |
| 128 | } |
| 129 | |
| 130 | impl Clone for Parcel { |
| 131 | fn clone(&self) -> Self { |
| 132 | let mut new_parcel = Self::new(); |
| 133 | new_parcel |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 134 | .borrowed() |
| 135 | .append_all_from(self.borrowed_ref()) |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 136 | .expect("Failed to append from Parcel"); |
| 137 | new_parcel |
| 138 | } |
| 139 | } |
| 140 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 141 | impl<'a> BorrowedParcel<'a> { |
| 142 | /// Create a borrowed reference to a parcel object from a raw pointer. |
| 143 | /// |
| 144 | /// # Safety |
| 145 | /// |
| 146 | /// This constructor is safe if the raw pointer parameter is either null |
| 147 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. |
| 148 | /// |
| 149 | /// Since the raw pointer is not restricted by any lifetime, the lifetime on |
| 150 | /// the returned `BorrowedParcel` object can be chosen arbitrarily by the |
| 151 | /// caller. The caller must ensure it is valid to mutably borrow the AParcel |
| 152 | /// for the duration of the lifetime that the caller chooses. Note that |
| 153 | /// since this is a mutable borrow, it must have exclusive access to the |
| 154 | /// AParcel for the duration of the borrow. |
| 155 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<BorrowedParcel<'a>> { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 156 | Some(Self { ptr: NonNull::new(ptr)?, _lifetime: PhantomData }) |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 157 | } |
| 158 | |
| 159 | /// Get a sub-reference to this reference to the parcel. |
| 160 | pub fn reborrow(&mut self) -> BorrowedParcel<'_> { |
| 161 | // Safety: The raw pointer is a valid pointer to an AParcel, and the |
| 162 | // lifetime of the returned `BorrowedParcel` is tied to `self`, so the |
| 163 | // borrow checker will ensure that the `AParcel` can only be accessed |
| 164 | // via the `BorrowParcel` until it goes out of scope. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 165 | BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData } |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 166 | } |
| 167 | } |
| 168 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 169 | /// Safety: The `Parcel` constructors guarantee that a `Parcel` object will |
| 170 | /// always contain a valid pointer to an `AParcel`. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 171 | unsafe impl AsNative<sys::AParcel> for Parcel { |
| 172 | fn as_native(&self) -> *const sys::AParcel { |
| 173 | self.ptr.as_ptr() |
| 174 | } |
| 175 | |
| 176 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 177 | self.ptr.as_ptr() |
| 178 | } |
| 179 | } |
| 180 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 181 | /// Safety: The `BorrowedParcel` constructors guarantee that a `BorrowedParcel` |
| 182 | /// object will always contain a valid pointer to an `AParcel`. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 183 | unsafe impl<'a> AsNative<sys::AParcel> for BorrowedParcel<'a> { |
| 184 | fn as_native(&self) -> *const sys::AParcel { |
| 185 | self.ptr.as_ptr() |
| 186 | } |
| 187 | |
| 188 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 189 | self.ptr.as_ptr() |
| 190 | } |
| 191 | } |
| 192 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 193 | // Data serialization methods |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 194 | impl<'a> BorrowedParcel<'a> { |
Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 195 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| 196 | pub fn mark_sensitive(&mut self) { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 197 | // Safety: guaranteed to have a parcel object, and this method never fails |
| 198 | unsafe { sys::AParcel_markSensitive(self.as_native()) } |
Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 199 | } |
| 200 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 201 | /// Write a type that implements [`Serialize`] to the parcel. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 202 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 203 | parcelable.serialize(self) |
| 204 | } |
| 205 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 206 | /// Writes the length of a slice to the parcel. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 207 | /// |
| 208 | /// This is used in AIDL-generated client side code to indicate the |
| 209 | /// allocated space for an output array parameter. |
| 210 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 211 | if let Some(slice) = slice { |
| 212 | let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?; |
| 213 | self.write(&len) |
| 214 | } else { |
| 215 | self.write(&-1i32) |
| 216 | } |
| 217 | } |
| 218 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 219 | /// Perform a series of writes to the parcel, prepended with the length |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 220 | /// (in bytes) of the written data. |
| 221 | /// |
| 222 | /// The length `0i32` will be written to the parcel first, followed by the |
| 223 | /// writes performed by the callback. The initial length will then be |
| 224 | /// updated to the length of all data written by the callback, plus the |
| 225 | /// size of the length elemement itself (4 bytes). |
| 226 | /// |
| 227 | /// # Examples |
| 228 | /// |
| 229 | /// After the following call: |
| 230 | /// |
| 231 | /// ``` |
| 232 | /// # use binder::{Binder, Interface, Parcel}; |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 233 | /// # let mut parcel = Parcel::new(); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 234 | /// parcel.sized_write(|subparcel| { |
| 235 | /// subparcel.write(&1u32)?; |
| 236 | /// subparcel.write(&2u32)?; |
| 237 | /// subparcel.write(&3u32) |
| 238 | /// }); |
| 239 | /// ``` |
| 240 | /// |
| 241 | /// `parcel` will contain the following: |
| 242 | /// |
| 243 | /// ```ignore |
| 244 | /// [16i32, 1u32, 2u32, 3u32] |
| 245 | /// ``` |
| 246 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 247 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 248 | for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>, |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 249 | { |
| 250 | let start = self.get_data_position(); |
| 251 | self.write(&0i32)?; |
| 252 | { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 253 | let mut subparcel = WritableSubParcel(self.reborrow()); |
| 254 | f(&mut subparcel)?; |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 255 | } |
| 256 | let end = self.get_data_position(); |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 257 | // Safety: start is less than the current size of the parcel data |
| 258 | // buffer, because we just got it with `get_data_position`. |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 259 | unsafe { |
| 260 | self.set_data_position(start)?; |
| 261 | } |
| 262 | assert!(end >= start); |
| 263 | self.write(&(end - start))?; |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 264 | // Safety: end is less than the current size of the parcel data |
| 265 | // buffer, because we just got it with `get_data_position`. |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 266 | unsafe { |
| 267 | self.set_data_position(end)?; |
| 268 | } |
| 269 | Ok(()) |
| 270 | } |
| 271 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 272 | /// Returns the current position in the parcel data. |
| 273 | pub fn get_data_position(&self) -> i32 { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 274 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 275 | // `AParcel`, and this call is otherwise safe. |
| 276 | unsafe { sys::AParcel_getDataPosition(self.as_native()) } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 277 | } |
| 278 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 279 | /// Returns the total size of the parcel. |
| 280 | pub fn get_data_size(&self) -> i32 { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 281 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 282 | // `AParcel`, and this call is otherwise safe. |
| 283 | unsafe { sys::AParcel_getDataSize(self.as_native()) } |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 284 | } |
| 285 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 286 | /// Move the current read/write position in the parcel. |
| 287 | /// |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 288 | /// # Safety |
| 289 | /// |
| 290 | /// This method is safe if `pos` is less than the current size of the parcel |
| 291 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 292 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 293 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 294 | /// on that. |
| 295 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 296 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 297 | // `AParcel`, and the caller guarantees that `pos` is within bounds. |
| 298 | status_result(unsafe { sys::AParcel_setDataPosition(self.as_native(), pos) }) |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 299 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 300 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 301 | /// Append a subset of another parcel. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 302 | /// |
| 303 | /// This appends `size` bytes of data from `other` starting at offset |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 304 | /// `start` to the current parcel, or returns an error if not possible. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 305 | pub fn append_from( |
| 306 | &mut self, |
| 307 | other: &impl AsNative<sys::AParcel>, |
| 308 | start: i32, |
| 309 | size: i32, |
| 310 | ) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 311 | // Safety: `Parcel::appendFrom` from C++ checks that `start` |
| 312 | // and `size` are in bounds, and returns an error otherwise. |
| 313 | // Both `self` and `other` always contain valid pointers. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 314 | let status = unsafe { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 315 | sys::AParcel_appendFrom(other.as_native(), self.as_native_mut(), start, size) |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 316 | }; |
| 317 | status_result(status) |
| 318 | } |
| 319 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 320 | /// Append the contents of another parcel. |
| 321 | pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> { |
| 322 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 323 | // `AParcel`, and this call is otherwise safe. |
| 324 | let size = unsafe { sys::AParcel_getDataSize(other.as_native()) }; |
| 325 | self.append_from(other, 0, size) |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 326 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 327 | } |
| 328 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 329 | /// A segment of a writable parcel, used for [`BorrowedParcel::sized_write`]. |
| 330 | pub struct WritableSubParcel<'a>(BorrowedParcel<'a>); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 331 | |
| 332 | impl<'a> WritableSubParcel<'a> { |
| 333 | /// Write a type that implements [`Serialize`] to the sub-parcel. |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 334 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 335 | parcelable.serialize(&mut self.0) |
| 336 | } |
| 337 | } |
| 338 | |
| 339 | impl Parcel { |
| 340 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| 341 | pub fn mark_sensitive(&mut self) { |
| 342 | self.borrowed().mark_sensitive() |
| 343 | } |
| 344 | |
| 345 | /// Write a type that implements [`Serialize`] to the parcel. |
| 346 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 347 | self.borrowed().write(parcelable) |
| 348 | } |
| 349 | |
| 350 | /// Writes the length of a slice to the parcel. |
| 351 | /// |
| 352 | /// This is used in AIDL-generated client side code to indicate the |
| 353 | /// allocated space for an output array parameter. |
| 354 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 355 | self.borrowed().write_slice_size(slice) |
| 356 | } |
| 357 | |
| 358 | /// Perform a series of writes to the parcel, prepended with the length |
| 359 | /// (in bytes) of the written data. |
| 360 | /// |
| 361 | /// The length `0i32` will be written to the parcel first, followed by the |
| 362 | /// writes performed by the callback. The initial length will then be |
| 363 | /// updated to the length of all data written by the callback, plus the |
| 364 | /// size of the length elemement itself (4 bytes). |
| 365 | /// |
| 366 | /// # Examples |
| 367 | /// |
| 368 | /// After the following call: |
| 369 | /// |
| 370 | /// ``` |
| 371 | /// # use binder::{Binder, Interface, Parcel}; |
| 372 | /// # let mut parcel = Parcel::new(); |
| 373 | /// parcel.sized_write(|subparcel| { |
| 374 | /// subparcel.write(&1u32)?; |
| 375 | /// subparcel.write(&2u32)?; |
| 376 | /// subparcel.write(&3u32) |
| 377 | /// }); |
| 378 | /// ``` |
| 379 | /// |
| 380 | /// `parcel` will contain the following: |
| 381 | /// |
| 382 | /// ```ignore |
| 383 | /// [16i32, 1u32, 2u32, 3u32] |
| 384 | /// ``` |
| 385 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
| 386 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 387 | for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>, |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 388 | { |
| 389 | self.borrowed().sized_write(f) |
| 390 | } |
| 391 | |
| 392 | /// Returns the current position in the parcel data. |
| 393 | pub fn get_data_position(&self) -> i32 { |
| 394 | self.borrowed_ref().get_data_position() |
| 395 | } |
| 396 | |
| 397 | /// Returns the total size of the parcel. |
| 398 | pub fn get_data_size(&self) -> i32 { |
| 399 | self.borrowed_ref().get_data_size() |
| 400 | } |
| 401 | |
| 402 | /// Move the current read/write position in the parcel. |
| 403 | /// |
| 404 | /// # Safety |
| 405 | /// |
| 406 | /// This method is safe if `pos` is less than the current size of the parcel |
| 407 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 408 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 409 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 410 | /// on that. |
| 411 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 412 | // Safety: We have the same safety requirements as |
| 413 | // `BorrowedParcel::set_data_position`. |
| 414 | unsafe { self.borrowed_ref().set_data_position(pos) } |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 415 | } |
| 416 | |
| 417 | /// Append a subset of another parcel. |
| 418 | /// |
| 419 | /// This appends `size` bytes of data from `other` starting at offset |
| 420 | /// `start` to the current parcel, or returns an error if not possible. |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 421 | pub fn append_from( |
| 422 | &mut self, |
| 423 | other: &impl AsNative<sys::AParcel>, |
| 424 | start: i32, |
| 425 | size: i32, |
| 426 | ) -> Result<()> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 427 | self.borrowed().append_from(other, start, size) |
| 428 | } |
| 429 | |
| 430 | /// Append the contents of another parcel. |
| 431 | pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> { |
| 432 | self.borrowed().append_all_from(other) |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 433 | } |
| 434 | } |
| 435 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 436 | // Data deserialization methods |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 437 | impl<'a> BorrowedParcel<'a> { |
| 438 | /// Attempt to read a type that implements [`Deserialize`] from this parcel. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 439 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 440 | D::deserialize(self) |
| 441 | } |
| 442 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 443 | /// Attempt to read a type that implements [`Deserialize`] from this parcel |
| 444 | /// onto an existing value. This operation will overwrite the old value |
| 445 | /// partially or completely, depending on how much data is available. |
Andrei Homescu | 5000615 | 2021-05-01 07:34:51 +0000 | [diff] [blame] | 446 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 447 | x.deserialize_from(self) |
| 448 | } |
| 449 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 450 | /// Safely read a sized parcelable. |
| 451 | /// |
| 452 | /// Read the size of a parcelable, compute the end position |
| 453 | /// of that parcelable, then build a sized readable sub-parcel |
| 454 | /// and call a closure with the sub-parcel as its parameter. |
| 455 | /// The closure can keep reading data from the sub-parcel |
| 456 | /// until it runs out of input data. The closure is responsible |
Andrei Homescu | 3e9c13a | 2023-05-09 02:48:22 +0000 | [diff] [blame] | 457 | /// for calling `ReadableSubParcel::has_more_data` to check for |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 458 | /// more data before every read, at least until Rust generators |
| 459 | /// are stabilized. |
| 460 | /// After the closure returns, skip to the end of the current |
| 461 | /// parcelable regardless of how much the closure has read. |
| 462 | /// |
| 463 | /// # Examples |
| 464 | /// |
| 465 | /// ```no_run |
| 466 | /// let mut parcelable = Default::default(); |
| 467 | /// parcel.sized_read(|subparcel| { |
| 468 | /// if subparcel.has_more_data() { |
| 469 | /// parcelable.a = subparcel.read()?; |
| 470 | /// } |
| 471 | /// if subparcel.has_more_data() { |
| 472 | /// parcelable.b = subparcel.read()?; |
| 473 | /// } |
| 474 | /// Ok(()) |
| 475 | /// }); |
| 476 | /// ``` |
| 477 | /// |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 478 | pub fn sized_read<F>(&self, f: F) -> Result<()> |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 479 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 480 | for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>, |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 481 | { |
| 482 | let start = self.get_data_position(); |
| 483 | let parcelable_size: i32 = self.read()?; |
Steven Moreland | 6d9e077 | 2022-01-15 02:10:18 +0000 | [diff] [blame] | 484 | if parcelable_size < 4 { |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 485 | return Err(StatusCode::BAD_VALUE); |
| 486 | } |
| 487 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 488 | let end = start.checked_add(parcelable_size).ok_or(StatusCode::BAD_VALUE)?; |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 489 | if end > self.get_data_size() { |
| 490 | return Err(StatusCode::NOT_ENOUGH_DATA); |
| 491 | } |
| 492 | |
| 493 | let subparcel = ReadableSubParcel { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 494 | parcel: BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData }, |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 495 | end_position: end, |
| 496 | }; |
| 497 | f(subparcel)?; |
| 498 | |
| 499 | // Advance the data position to the actual end, |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 500 | // in case the closure read less data than was available. |
| 501 | // |
| 502 | // Safety: end must be less than the current size of the parcel, because |
| 503 | // we checked above against `get_data_size`. |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 504 | unsafe { |
| 505 | self.set_data_position(end)?; |
| 506 | } |
| 507 | |
| 508 | Ok(()) |
| 509 | } |
| 510 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 511 | /// Read a vector size from the parcel and resize the given output vector to |
| 512 | /// be correctly sized for that amount of data. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 513 | /// |
| 514 | /// This method is used in AIDL-generated server side code for methods that |
| 515 | /// take a mutable slice reference parameter. |
| 516 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 517 | let len: i32 = self.read()?; |
| 518 | |
| 519 | if len < 0 { |
| 520 | return Err(StatusCode::UNEXPECTED_NULL); |
| 521 | } |
| 522 | |
| 523 | // usize in Rust may be 16-bit, so i32 may not fit |
| 524 | let len = len.try_into().unwrap(); |
| 525 | out_vec.resize_with(len, Default::default); |
| 526 | |
| 527 | Ok(()) |
| 528 | } |
| 529 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 530 | /// Read a vector size from the parcel and either create a correctly sized |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 531 | /// vector for that amount of data or set the output parameter to None if |
| 532 | /// the vector should be null. |
| 533 | /// |
| 534 | /// This method is used in AIDL-generated server side code for methods that |
| 535 | /// take a mutable slice reference parameter. |
| 536 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 537 | &self, |
| 538 | out_vec: &mut Option<Vec<D>>, |
| 539 | ) -> Result<()> { |
| 540 | let len: i32 = self.read()?; |
| 541 | |
| 542 | if len < 0 { |
| 543 | *out_vec = None; |
| 544 | } else { |
| 545 | // usize in Rust may be 16-bit, so i32 may not fit |
| 546 | let len = len.try_into().unwrap(); |
| 547 | let mut vec = Vec::with_capacity(len); |
| 548 | vec.resize_with(len, Default::default); |
| 549 | *out_vec = Some(vec); |
| 550 | } |
| 551 | |
| 552 | Ok(()) |
| 553 | } |
| 554 | } |
| 555 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 556 | /// A segment of a readable parcel, used for [`Parcel::sized_read`]. |
| 557 | pub struct ReadableSubParcel<'a> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 558 | parcel: BorrowedParcel<'a>, |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 559 | end_position: i32, |
| 560 | } |
| 561 | |
| 562 | impl<'a> ReadableSubParcel<'a> { |
| 563 | /// Read a type that implements [`Deserialize`] from the sub-parcel. |
| 564 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 565 | D::deserialize(&self.parcel) |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 566 | } |
| 567 | |
| 568 | /// Check if the sub-parcel has more data to read |
| 569 | pub fn has_more_data(&self) -> bool { |
| 570 | self.parcel.get_data_position() < self.end_position |
| 571 | } |
| 572 | } |
| 573 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 574 | impl Parcel { |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 575 | /// Attempt to read a type that implements [`Deserialize`] from this parcel. |
| 576 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 577 | self.borrowed_ref().read() |
| 578 | } |
| 579 | |
| 580 | /// Attempt to read a type that implements [`Deserialize`] from this parcel |
| 581 | /// onto an existing value. This operation will overwrite the old value |
| 582 | /// partially or completely, depending on how much data is available. |
| 583 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 584 | self.borrowed_ref().read_onto(x) |
| 585 | } |
| 586 | |
| 587 | /// Safely read a sized parcelable. |
| 588 | /// |
| 589 | /// Read the size of a parcelable, compute the end position |
| 590 | /// of that parcelable, then build a sized readable sub-parcel |
| 591 | /// and call a closure with the sub-parcel as its parameter. |
| 592 | /// The closure can keep reading data from the sub-parcel |
| 593 | /// until it runs out of input data. The closure is responsible |
Andrei Homescu | 3e9c13a | 2023-05-09 02:48:22 +0000 | [diff] [blame] | 594 | /// for calling `ReadableSubParcel::has_more_data` to check for |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 595 | /// more data before every read, at least until Rust generators |
| 596 | /// are stabilized. |
| 597 | /// After the closure returns, skip to the end of the current |
| 598 | /// parcelable regardless of how much the closure has read. |
| 599 | /// |
| 600 | /// # Examples |
| 601 | /// |
| 602 | /// ```no_run |
| 603 | /// let mut parcelable = Default::default(); |
| 604 | /// parcel.sized_read(|subparcel| { |
| 605 | /// if subparcel.has_more_data() { |
| 606 | /// parcelable.a = subparcel.read()?; |
| 607 | /// } |
| 608 | /// if subparcel.has_more_data() { |
| 609 | /// parcelable.b = subparcel.read()?; |
| 610 | /// } |
| 611 | /// Ok(()) |
| 612 | /// }); |
| 613 | /// ``` |
| 614 | /// |
| 615 | pub fn sized_read<F>(&self, f: F) -> Result<()> |
| 616 | where |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 617 | for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>, |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 618 | { |
| 619 | self.borrowed_ref().sized_read(f) |
| 620 | } |
| 621 | |
| 622 | /// Read a vector size from the parcel and resize the given output vector to |
| 623 | /// be correctly sized for that amount of data. |
| 624 | /// |
| 625 | /// This method is used in AIDL-generated server side code for methods that |
| 626 | /// take a mutable slice reference parameter. |
| 627 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 628 | self.borrowed_ref().resize_out_vec(out_vec) |
| 629 | } |
| 630 | |
| 631 | /// Read a vector size from the parcel and either create a correctly sized |
| 632 | /// vector for that amount of data or set the output parameter to None if |
| 633 | /// the vector should be null. |
| 634 | /// |
| 635 | /// This method is used in AIDL-generated server side code for methods that |
| 636 | /// take a mutable slice reference parameter. |
| 637 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 638 | &self, |
| 639 | out_vec: &mut Option<Vec<D>>, |
| 640 | ) -> Result<()> { |
| 641 | self.borrowed_ref().resize_nullable_out_vec(out_vec) |
| 642 | } |
| 643 | } |
| 644 | |
| 645 | // Internal APIs |
| 646 | impl<'a> BorrowedParcel<'a> { |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 647 | pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> { |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 648 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 649 | // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return |
| 650 | // null or a valid pointer to an `AIBinder`, both of which are |
| 651 | // valid, safe inputs to `AParcel_writeStrongBinder`. |
| 652 | // |
| 653 | // This call does not take ownership of the binder. However, it does |
| 654 | // require a mutable pointer, which we cannot extract from an |
| 655 | // immutable reference, so we clone the binder, incrementing the |
| 656 | // refcount before the call. The refcount will be immediately |
| 657 | // decremented when this temporary is dropped. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 658 | unsafe { |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 659 | status_result(sys::AParcel_writeStrongBinder( |
| 660 | self.as_native_mut(), |
| 661 | binder.cloned().as_native_mut(), |
| 662 | )) |
| 663 | } |
| 664 | } |
| 665 | |
| 666 | pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> { |
| 667 | let mut binder = ptr::null_mut(); |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 668 | // Safety: `BorrowedParcel` always contains a valid pointer to an |
| 669 | // `AParcel`. We pass a valid, mutable out pointer to the `binder` |
| 670 | // parameter. After this call, `binder` will be either null or a |
| 671 | // valid pointer to an `AIBinder` owned by the caller. |
| 672 | let status = unsafe { sys::AParcel_readStrongBinder(self.as_native(), &mut binder) }; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 673 | |
| 674 | status_result(status)?; |
| 675 | |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 676 | // Safety: `binder` is either null or a valid, owned pointer at this |
| 677 | // point, so can be safely passed to `SpIBinder::from_raw`. |
| 678 | Ok(unsafe { SpIBinder::from_raw(binder) }) |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 679 | } |
| 680 | } |
| 681 | |
| 682 | impl Drop for Parcel { |
| 683 | fn drop(&mut self) { |
| 684 | // Run the C++ Parcel complete object destructor |
Andrew Walbran | 2f3ff9f | 2023-07-07 16:58:13 +0100 | [diff] [blame] | 685 | // |
| 686 | // Safety: `Parcel` always contains a valid pointer to an |
| 687 | // `AParcel`. Since we own the parcel, we can safely delete it |
| 688 | // here. |
| 689 | unsafe { sys::AParcel_delete(self.ptr.as_ptr()) } |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 690 | } |
| 691 | } |
| 692 | |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 693 | impl fmt::Debug for Parcel { |
| 694 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 695 | f.debug_struct("Parcel").finish() |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 696 | } |
| 697 | } |
| 698 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 699 | impl<'a> fmt::Debug for BorrowedParcel<'a> { |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 700 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 701 | f.debug_struct("BorrowedParcel").finish() |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 702 | } |
| 703 | } |
| 704 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 705 | #[test] |
| 706 | fn test_read_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 707 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 708 | let start = parcel.get_data_position(); |
| 709 | |
| 710 | assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 711 | assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 712 | assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 713 | assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 714 | assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 715 | assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 716 | assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 717 | assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 718 | assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 719 | assert_eq!(parcel.read::<Option<String>>(), Ok(None)); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 720 | assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL)); |
| 721 | |
Alice Ryhl | 8618c48 | 2021-11-09 15:35:35 +0000 | [diff] [blame] | 722 | assert_eq!(parcel.borrowed_ref().read_binder().err(), Some(StatusCode::BAD_TYPE)); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 723 | |
| 724 | parcel.write(&1i32).unwrap(); |
| 725 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 726 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 727 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 728 | unsafe { |
| 729 | parcel.set_data_position(start).unwrap(); |
| 730 | } |
| 731 | |
| 732 | let i: i32 = parcel.read().unwrap(); |
| 733 | assert_eq!(i, 1i32); |
| 734 | } |
| 735 | |
| 736 | #[test] |
| 737 | #[allow(clippy::float_cmp)] |
| 738 | fn test_read_data() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 739 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 740 | let str_start = parcel.get_data_position(); |
| 741 | |
| 742 | parcel.write(&b"Hello, Binder!\0"[..]).unwrap(); |
| 743 | // Skip over string length |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 744 | // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't |
| 745 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 746 | unsafe { |
| 747 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 748 | } |
| 749 | assert_eq!(parcel.read::<i32>().unwrap(), 15); |
| 750 | let start = parcel.get_data_position(); |
| 751 | |
Chris Wailes | 45fd294 | 2021-07-26 19:18:41 -0700 | [diff] [blame] | 752 | assert!(parcel.read::<bool>().unwrap()); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 753 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 754 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 755 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 756 | unsafe { |
| 757 | assert!(parcel.set_data_position(start).is_ok()); |
| 758 | } |
| 759 | |
| 760 | assert_eq!(parcel.read::<i8>().unwrap(), 72i8); |
| 761 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 762 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 763 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 764 | unsafe { |
| 765 | assert!(parcel.set_data_position(start).is_ok()); |
| 766 | } |
| 767 | |
| 768 | assert_eq!(parcel.read::<u16>().unwrap(), 25928); |
| 769 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 770 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 771 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 772 | unsafe { |
| 773 | assert!(parcel.set_data_position(start).is_ok()); |
| 774 | } |
| 775 | |
| 776 | assert_eq!(parcel.read::<i32>().unwrap(), 1819043144); |
| 777 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 778 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 779 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 780 | unsafe { |
| 781 | assert!(parcel.set_data_position(start).is_ok()); |
| 782 | } |
| 783 | |
| 784 | assert_eq!(parcel.read::<u32>().unwrap(), 1819043144); |
| 785 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 786 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 787 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 788 | unsafe { |
| 789 | assert!(parcel.set_data_position(start).is_ok()); |
| 790 | } |
| 791 | |
| 792 | assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912); |
| 793 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 794 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 795 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 796 | unsafe { |
| 797 | assert!(parcel.set_data_position(start).is_ok()); |
| 798 | } |
| 799 | |
| 800 | assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912); |
| 801 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 802 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 803 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 804 | unsafe { |
| 805 | assert!(parcel.set_data_position(start).is_ok()); |
| 806 | } |
| 807 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 808 | assert_eq!(parcel.read::<f32>().unwrap(), 1143139100000000000000000000.0); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 809 | assert_eq!(parcel.read::<f32>().unwrap(), 40.043392); |
| 810 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 811 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 812 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 813 | unsafe { |
| 814 | assert!(parcel.set_data_position(start).is_ok()); |
| 815 | } |
| 816 | |
| 817 | assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815); |
| 818 | |
| 819 | // Skip back to before the string length |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 820 | // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't |
| 821 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 822 | unsafe { |
| 823 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 824 | } |
| 825 | |
| 826 | assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0"); |
| 827 | } |
| 828 | |
| 829 | #[test] |
| 830 | fn test_utf8_utf16_conversions() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 831 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 832 | let start = parcel.get_data_position(); |
| 833 | |
| 834 | assert!(parcel.write("Hello, Binder!").is_ok()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 835 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 836 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 837 | unsafe { |
| 838 | assert!(parcel.set_data_position(start).is_ok()); |
| 839 | } |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 840 | assert_eq!(parcel.read::<Option<String>>().unwrap().unwrap(), "Hello, Binder!",); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 841 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 842 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 843 | unsafe { |
| 844 | assert!(parcel.set_data_position(start).is_ok()); |
| 845 | } |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 846 | |
| 847 | assert!(parcel.write("Embedded null \0 inside a string").is_ok()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 848 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 849 | // made it any shorter since we got the position. |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 850 | unsafe { |
| 851 | assert!(parcel.set_data_position(start).is_ok()); |
| 852 | } |
| 853 | assert_eq!( |
| 854 | parcel.read::<Option<String>>().unwrap().unwrap(), |
| 855 | "Embedded null \0 inside a string", |
| 856 | ); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 857 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 858 | // made it any shorter since we got the position. |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 859 | unsafe { |
| 860 | assert!(parcel.set_data_position(start).is_ok()); |
| 861 | } |
| 862 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 863 | assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok()); |
| 864 | assert!(parcel |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 865 | .write(&[String::from("str4"), String::from("str5"), String::from("str6"),][..]) |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 866 | .is_ok()); |
| 867 | |
| 868 | let s1 = "Hello, Binder!"; |
| 869 | let s2 = "This is a utf8 string."; |
| 870 | let s3 = "Some more text here."; |
| 871 | |
| 872 | assert!(parcel.write(&[s1, s2, s3][..]).is_ok()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 873 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 874 | // made it any shorter since we got the position. |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 875 | unsafe { |
| 876 | assert!(parcel.set_data_position(start).is_ok()); |
| 877 | } |
| 878 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 879 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str1", "str2", "str3"]); |
| 880 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str4", "str5", "str6"]); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 881 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]); |
| 882 | } |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 883 | |
| 884 | #[test] |
| 885 | fn test_sized_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 886 | let mut parcel = Parcel::new(); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 887 | let start = parcel.get_data_position(); |
| 888 | |
| 889 | let arr = [1i32, 2i32, 3i32]; |
| 890 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 891 | parcel |
| 892 | .sized_write(|subparcel| subparcel.write(&arr[..])) |
| 893 | .expect("Could not perform sized write"); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 894 | |
| 895 | // i32 sub-parcel length + i32 array length + 3 i32 elements |
| 896 | let expected_len = 20i32; |
| 897 | |
| 898 | assert_eq!(parcel.get_data_position(), start + expected_len); |
| 899 | |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 900 | // SAFETY: start is less than the current size of the parcel data buffer, because we haven't |
| 901 | // made it any shorter since we got the position. |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 902 | unsafe { |
| 903 | parcel.set_data_position(start).unwrap(); |
| 904 | } |
| 905 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 906 | assert_eq!(expected_len, parcel.read().unwrap(),); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 907 | |
Matthew Maurer | e268a9f | 2022-07-26 09:31:30 -0700 | [diff] [blame] | 908 | assert_eq!(parcel.read::<Vec<i32>>().unwrap(), &arr,); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 909 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 910 | |
| 911 | #[test] |
| 912 | fn test_append_from() { |
| 913 | let mut parcel1 = Parcel::new(); |
| 914 | parcel1.write(&42i32).expect("Could not perform write"); |
| 915 | |
| 916 | let mut parcel2 = Parcel::new(); |
| 917 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 918 | assert_eq!(4, parcel2.get_data_size()); |
| 919 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 920 | assert_eq!(8, parcel2.get_data_size()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 921 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 922 | // empty. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 923 | unsafe { |
| 924 | parcel2.set_data_position(0).unwrap(); |
| 925 | } |
| 926 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 927 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 928 | |
| 929 | let mut parcel2 = Parcel::new(); |
| 930 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 931 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2)); |
| 932 | assert_eq!(4, parcel2.get_data_size()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 933 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 934 | // empty. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 935 | unsafe { |
| 936 | parcel2.set_data_position(0).unwrap(); |
| 937 | } |
| 938 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 939 | |
| 940 | let mut parcel2 = Parcel::new(); |
| 941 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 942 | assert_eq!(2, parcel2.get_data_size()); |
Andrew Walbran | 4ed9d77 | 2023-07-21 18:21:05 +0100 | [diff] [blame] | 943 | // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not |
| 944 | // empty. |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 945 | unsafe { |
| 946 | parcel2.set_data_position(0).unwrap(); |
| 947 | } |
| 948 | assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>()); |
| 949 | |
| 950 | let mut parcel2 = Parcel::new(); |
| 951 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2)); |
| 952 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4)); |
| 953 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4)); |
| 954 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1)); |
| 955 | } |