Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //! Container for messages that are sent via binder. |
| 18 | |
| 19 | use crate::binder::AsNative; |
| 20 | use crate::error::{status_result, Result, StatusCode}; |
| 21 | use crate::proxy::SpIBinder; |
| 22 | use crate::sys; |
| 23 | |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 24 | use std::cell::RefCell; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 25 | use std::convert::TryInto; |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 26 | use std::marker::PhantomData; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 27 | use std::mem::ManuallyDrop; |
| 28 | use std::ptr; |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 29 | use std::fmt; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 30 | |
| 31 | mod file_descriptor; |
| 32 | mod parcelable; |
Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 33 | mod parcelable_holder; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 34 | |
| 35 | pub use self::file_descriptor::ParcelFileDescriptor; |
| 36 | pub use self::parcelable::{ |
| 37 | Deserialize, DeserializeArray, DeserializeOption, Serialize, SerializeArray, SerializeOption, |
Andrei Homescu | 083e353 | 2021-09-08 00:36:18 +0000 | [diff] [blame] | 38 | Parcelable, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG, |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 39 | }; |
Andrei Homescu | ea40621 | 2021-09-03 02:55:00 +0000 | [diff] [blame] | 40 | pub use self::parcelable_holder::{ParcelableHolder, ParcelableMetadata}; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 41 | |
| 42 | /// Container for a message (data and object references) that can be sent |
| 43 | /// through Binder. |
| 44 | /// |
| 45 | /// A Parcel can contain both serialized data that will be deserialized on the |
| 46 | /// other side of the IPC, and references to live Binder objects that will |
| 47 | /// result in the other side receiving a proxy Binder connected with the |
| 48 | /// original Binder in the Parcel. |
| 49 | pub enum Parcel { |
| 50 | /// Owned parcel pointer |
| 51 | Owned(*mut sys::AParcel), |
| 52 | /// Borrowed parcel pointer (will not be destroyed on drop) |
| 53 | Borrowed(*mut sys::AParcel), |
| 54 | } |
| 55 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 56 | /// A variant of Parcel that is known to be owned. |
| 57 | pub struct OwnedParcel { |
| 58 | ptr: *mut sys::AParcel, |
| 59 | } |
| 60 | |
| 61 | /// # Safety |
| 62 | /// |
| 63 | /// This type guarantees that it owns the AParcel and that all access to |
| 64 | /// the AParcel happens through the OwnedParcel, so it is ok to send across |
| 65 | /// threads. |
| 66 | unsafe impl Send for OwnedParcel {} |
| 67 | |
| 68 | /// A variant of Parcel that is known to be borrowed. |
| 69 | pub struct BorrowedParcel<'a> { |
| 70 | inner: Parcel, |
| 71 | _lifetime: PhantomData<&'a mut Parcel>, |
| 72 | } |
| 73 | |
| 74 | impl OwnedParcel { |
| 75 | /// Create a new empty `OwnedParcel`. |
| 76 | pub fn new() -> OwnedParcel { |
| 77 | let ptr = unsafe { |
| 78 | // Safety: If `AParcel_create` succeeds, it always returns |
| 79 | // a valid pointer. If it fails, the process will crash. |
| 80 | sys::AParcel_create() |
| 81 | }; |
| 82 | assert!(!ptr.is_null()); |
| 83 | Self { ptr } |
| 84 | } |
| 85 | |
| 86 | /// Create an owned reference to a parcel object from a raw pointer. |
| 87 | /// |
| 88 | /// # Safety |
| 89 | /// |
| 90 | /// This constructor is safe if the raw pointer parameter is either null |
| 91 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. The |
| 92 | /// parcel object must be owned by the caller prior to this call, as this |
| 93 | /// constructor takes ownership of the parcel and will destroy it on drop. |
| 94 | /// |
| 95 | /// Additionally, the caller must guarantee that it is valid to take |
| 96 | /// ownership of the AParcel object. All future access to the AParcel |
| 97 | /// must happen through this `OwnedParcel`. |
| 98 | /// |
| 99 | /// Because `OwnedParcel` implements `Send`, the pointer must never point |
| 100 | /// to any thread-local data, e.g., a variable on the stack, either directly |
| 101 | /// or indirectly. |
| 102 | pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<OwnedParcel> { |
| 103 | ptr.as_mut().map(|ptr| Self { ptr }) |
| 104 | } |
| 105 | |
| 106 | /// Consume the parcel, transferring ownership to the caller. |
| 107 | pub(crate) fn into_raw(self) -> *mut sys::AParcel { |
| 108 | let ptr = self.ptr; |
| 109 | let _ = ManuallyDrop::new(self); |
| 110 | ptr |
| 111 | } |
| 112 | |
| 113 | /// Convert this `OwnedParcel` into an owned `Parcel`. |
| 114 | pub fn into_parcel(self) -> Parcel { |
| 115 | Parcel::Owned(self.into_raw()) |
| 116 | } |
| 117 | |
| 118 | /// Get a borrowed view into the contents of this `Parcel`. |
| 119 | pub fn borrowed(&mut self) -> BorrowedParcel<'_> { |
| 120 | BorrowedParcel { |
| 121 | inner: Parcel::Borrowed(self.ptr), |
| 122 | _lifetime: PhantomData, |
| 123 | } |
| 124 | } |
| 125 | } |
| 126 | |
| 127 | impl Default for OwnedParcel { |
| 128 | fn default() -> Self { |
| 129 | Self::new() |
| 130 | } |
| 131 | } |
| 132 | |
| 133 | impl Clone for OwnedParcel { |
| 134 | fn clone(&self) -> Self { |
| 135 | let mut new_parcel = Self::new(); |
| 136 | new_parcel |
| 137 | .borrowed() |
| 138 | .append_all_from(&Parcel::Borrowed(self.ptr)) |
| 139 | .expect("Failed to append from Parcel"); |
| 140 | new_parcel |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | impl<'a> std::ops::Deref for BorrowedParcel<'a> { |
| 145 | type Target = Parcel; |
| 146 | fn deref(&self) -> &Parcel { |
| 147 | &self.inner |
| 148 | } |
| 149 | } |
| 150 | impl<'a> std::ops::DerefMut for BorrowedParcel<'a> { |
| 151 | fn deref_mut(&mut self) -> &mut Parcel { |
| 152 | &mut self.inner |
| 153 | } |
| 154 | } |
| 155 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 156 | /// # Safety |
| 157 | /// |
| 158 | /// The `Parcel` constructors guarantee that a `Parcel` object will always |
| 159 | /// contain a valid pointer to an `AParcel`. |
| 160 | unsafe impl AsNative<sys::AParcel> for Parcel { |
| 161 | fn as_native(&self) -> *const sys::AParcel { |
| 162 | match *self { |
| 163 | Self::Owned(x) | Self::Borrowed(x) => x, |
| 164 | } |
| 165 | } |
| 166 | |
| 167 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 168 | match *self { |
| 169 | Self::Owned(x) | Self::Borrowed(x) => x, |
| 170 | } |
| 171 | } |
| 172 | } |
| 173 | |
| 174 | impl Parcel { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 175 | /// Create a new empty `Parcel`. |
| 176 | /// |
| 177 | /// Creates a new owned empty parcel that can be written to |
| 178 | /// using the serialization methods and appended to and |
| 179 | /// from using `append_from` and `append_from_all`. |
| 180 | pub fn new() -> Parcel { |
| 181 | let parcel = unsafe { |
| 182 | // Safety: If `AParcel_create` succeeds, it always returns |
| 183 | // a valid pointer. If it fails, the process will crash. |
| 184 | sys::AParcel_create() |
| 185 | }; |
| 186 | assert!(!parcel.is_null()); |
| 187 | Self::Owned(parcel) |
| 188 | } |
| 189 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 190 | /// Create a borrowed reference to a parcel object from a raw pointer. |
| 191 | /// |
| 192 | /// # Safety |
| 193 | /// |
| 194 | /// This constructor is safe if the raw pointer parameter is either null |
| 195 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. |
| 196 | pub(crate) unsafe fn borrowed(ptr: *mut sys::AParcel) -> Option<Parcel> { |
| 197 | ptr.as_mut().map(|ptr| Self::Borrowed(ptr)) |
| 198 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 199 | } |
| 200 | |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 201 | impl Default for Parcel { |
| 202 | fn default() -> Self { |
| 203 | Self::new() |
| 204 | } |
| 205 | } |
| 206 | |
| 207 | impl Clone for Parcel { |
| 208 | fn clone(&self) -> Self { |
| 209 | let mut new_parcel = Self::new(); |
| 210 | new_parcel |
| 211 | .append_all_from(self) |
| 212 | .expect("Failed to append from Parcel"); |
| 213 | new_parcel |
| 214 | } |
| 215 | } |
| 216 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 217 | // Data serialization methods |
| 218 | impl Parcel { |
Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 219 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| 220 | pub fn mark_sensitive(&mut self) { |
| 221 | unsafe { |
| 222 | // Safety: guaranteed to have a parcel object, and this method never fails |
| 223 | sys::AParcel_markSensitive(self.as_native()) |
| 224 | } |
| 225 | } |
| 226 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 227 | /// Write a type that implements [`Serialize`] to the `Parcel`. |
| 228 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 229 | parcelable.serialize(self) |
| 230 | } |
| 231 | |
| 232 | /// Writes the length of a slice to the `Parcel`. |
| 233 | /// |
| 234 | /// This is used in AIDL-generated client side code to indicate the |
| 235 | /// allocated space for an output array parameter. |
| 236 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 237 | if let Some(slice) = slice { |
| 238 | let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?; |
| 239 | self.write(&len) |
| 240 | } else { |
| 241 | self.write(&-1i32) |
| 242 | } |
| 243 | } |
| 244 | |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 245 | /// Perform a series of writes to the `Parcel`, prepended with the length |
| 246 | /// (in bytes) of the written data. |
| 247 | /// |
| 248 | /// The length `0i32` will be written to the parcel first, followed by the |
| 249 | /// writes performed by the callback. The initial length will then be |
| 250 | /// updated to the length of all data written by the callback, plus the |
| 251 | /// size of the length elemement itself (4 bytes). |
| 252 | /// |
| 253 | /// # Examples |
| 254 | /// |
| 255 | /// After the following call: |
| 256 | /// |
| 257 | /// ``` |
| 258 | /// # use binder::{Binder, Interface, Parcel}; |
| 259 | /// # let mut parcel = Parcel::Owned(std::ptr::null_mut()); |
| 260 | /// parcel.sized_write(|subparcel| { |
| 261 | /// subparcel.write(&1u32)?; |
| 262 | /// subparcel.write(&2u32)?; |
| 263 | /// subparcel.write(&3u32) |
| 264 | /// }); |
| 265 | /// ``` |
| 266 | /// |
| 267 | /// `parcel` will contain the following: |
| 268 | /// |
| 269 | /// ```ignore |
| 270 | /// [16i32, 1u32, 2u32, 3u32] |
| 271 | /// ``` |
| 272 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
| 273 | where for<'a> |
| 274 | F: Fn(&'a WritableSubParcel<'a>) -> Result<()> |
| 275 | { |
| 276 | let start = self.get_data_position(); |
| 277 | self.write(&0i32)?; |
| 278 | { |
| 279 | let subparcel = WritableSubParcel(RefCell::new(self)); |
| 280 | f(&subparcel)?; |
| 281 | } |
| 282 | let end = self.get_data_position(); |
| 283 | unsafe { |
| 284 | self.set_data_position(start)?; |
| 285 | } |
| 286 | assert!(end >= start); |
| 287 | self.write(&(end - start))?; |
| 288 | unsafe { |
| 289 | self.set_data_position(end)?; |
| 290 | } |
| 291 | Ok(()) |
| 292 | } |
| 293 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 294 | /// Returns the current position in the parcel data. |
| 295 | pub fn get_data_position(&self) -> i32 { |
| 296 | unsafe { |
| 297 | // Safety: `Parcel` always contains a valid pointer to an `AParcel`, |
| 298 | // and this call is otherwise safe. |
| 299 | sys::AParcel_getDataPosition(self.as_native()) |
| 300 | } |
| 301 | } |
| 302 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 303 | /// Returns the total size of the parcel. |
| 304 | pub fn get_data_size(&self) -> i32 { |
| 305 | unsafe { |
| 306 | // Safety: `Parcel` always contains a valid pointer to an `AParcel`, |
| 307 | // and this call is otherwise safe. |
| 308 | sys::AParcel_getDataSize(self.as_native()) |
| 309 | } |
| 310 | } |
| 311 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 312 | /// Move the current read/write position in the parcel. |
| 313 | /// |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 314 | /// # Safety |
| 315 | /// |
| 316 | /// This method is safe if `pos` is less than the current size of the parcel |
| 317 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 318 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 319 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 320 | /// on that. |
| 321 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
| 322 | status_result(sys::AParcel_setDataPosition(self.as_native(), pos)) |
| 323 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 324 | |
| 325 | /// Append a subset of another `Parcel`. |
| 326 | /// |
| 327 | /// This appends `size` bytes of data from `other` starting at offset |
| 328 | /// `start` to the current `Parcel`, or returns an error if not possible. |
| 329 | pub fn append_from(&mut self, other: &Self, start: i32, size: i32) -> Result<()> { |
| 330 | let status = unsafe { |
| 331 | // Safety: `Parcel::appendFrom` from C++ checks that `start` |
| 332 | // and `size` are in bounds, and returns an error otherwise. |
| 333 | // Both `self` and `other` always contain valid pointers. |
| 334 | sys::AParcel_appendFrom( |
| 335 | other.as_native(), |
| 336 | self.as_native_mut(), |
| 337 | start, |
| 338 | size, |
| 339 | ) |
| 340 | }; |
| 341 | status_result(status) |
| 342 | } |
| 343 | |
| 344 | /// Append the contents of another `Parcel`. |
| 345 | pub fn append_all_from(&mut self, other: &Self) -> Result<()> { |
| 346 | self.append_from(other, 0, other.get_data_size()) |
| 347 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 348 | } |
| 349 | |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 350 | /// A segment of a writable parcel, used for [`Parcel::sized_write`]. |
| 351 | pub struct WritableSubParcel<'a>(RefCell<&'a mut Parcel>); |
| 352 | |
| 353 | impl<'a> WritableSubParcel<'a> { |
| 354 | /// Write a type that implements [`Serialize`] to the sub-parcel. |
| 355 | pub fn write<S: Serialize + ?Sized>(&self, parcelable: &S) -> Result<()> { |
| 356 | parcelable.serialize(&mut *self.0.borrow_mut()) |
| 357 | } |
| 358 | } |
| 359 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 360 | // Data deserialization methods |
| 361 | impl Parcel { |
| 362 | /// Attempt to read a type that implements [`Deserialize`] from this |
| 363 | /// `Parcel`. |
| 364 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 365 | D::deserialize(self) |
| 366 | } |
| 367 | |
Andrei Homescu | 5000615 | 2021-05-01 07:34:51 +0000 | [diff] [blame] | 368 | /// Attempt to read a type that implements [`Deserialize`] from this |
| 369 | /// `Parcel` onto an existing value. This operation will overwrite the old |
| 370 | /// value partially or completely, depending on how much data is available. |
| 371 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 372 | x.deserialize_from(self) |
| 373 | } |
| 374 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 375 | /// Safely read a sized parcelable. |
| 376 | /// |
| 377 | /// Read the size of a parcelable, compute the end position |
| 378 | /// of that parcelable, then build a sized readable sub-parcel |
| 379 | /// and call a closure with the sub-parcel as its parameter. |
| 380 | /// The closure can keep reading data from the sub-parcel |
| 381 | /// until it runs out of input data. The closure is responsible |
| 382 | /// for calling [`ReadableSubParcel::has_more_data`] to check for |
| 383 | /// more data before every read, at least until Rust generators |
| 384 | /// are stabilized. |
| 385 | /// After the closure returns, skip to the end of the current |
| 386 | /// parcelable regardless of how much the closure has read. |
| 387 | /// |
| 388 | /// # Examples |
| 389 | /// |
| 390 | /// ```no_run |
| 391 | /// let mut parcelable = Default::default(); |
| 392 | /// parcel.sized_read(|subparcel| { |
| 393 | /// if subparcel.has_more_data() { |
| 394 | /// parcelable.a = subparcel.read()?; |
| 395 | /// } |
| 396 | /// if subparcel.has_more_data() { |
| 397 | /// parcelable.b = subparcel.read()?; |
| 398 | /// } |
| 399 | /// Ok(()) |
| 400 | /// }); |
| 401 | /// ``` |
| 402 | /// |
| 403 | pub fn sized_read<F>(&self, mut f: F) -> Result<()> |
| 404 | where |
| 405 | for<'a> F: FnMut(ReadableSubParcel<'a>) -> Result<()> |
| 406 | { |
| 407 | let start = self.get_data_position(); |
| 408 | let parcelable_size: i32 = self.read()?; |
| 409 | if parcelable_size < 0 { |
| 410 | return Err(StatusCode::BAD_VALUE); |
| 411 | } |
| 412 | |
| 413 | let end = start.checked_add(parcelable_size) |
| 414 | .ok_or(StatusCode::BAD_VALUE)?; |
| 415 | if end > self.get_data_size() { |
| 416 | return Err(StatusCode::NOT_ENOUGH_DATA); |
| 417 | } |
| 418 | |
| 419 | let subparcel = ReadableSubParcel { |
| 420 | parcel: self, |
| 421 | end_position: end, |
| 422 | }; |
| 423 | f(subparcel)?; |
| 424 | |
| 425 | // Advance the data position to the actual end, |
| 426 | // in case the closure read less data than was available |
| 427 | unsafe { |
| 428 | self.set_data_position(end)?; |
| 429 | } |
| 430 | |
| 431 | Ok(()) |
| 432 | } |
| 433 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 434 | /// Read a vector size from the `Parcel` and resize the given output vector |
| 435 | /// to be correctly sized for that amount of data. |
| 436 | /// |
| 437 | /// This method is used in AIDL-generated server side code for methods that |
| 438 | /// take a mutable slice reference parameter. |
| 439 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 440 | let len: i32 = self.read()?; |
| 441 | |
| 442 | if len < 0 { |
| 443 | return Err(StatusCode::UNEXPECTED_NULL); |
| 444 | } |
| 445 | |
| 446 | // usize in Rust may be 16-bit, so i32 may not fit |
| 447 | let len = len.try_into().unwrap(); |
| 448 | out_vec.resize_with(len, Default::default); |
| 449 | |
| 450 | Ok(()) |
| 451 | } |
| 452 | |
| 453 | /// Read a vector size from the `Parcel` and either create a correctly sized |
| 454 | /// vector for that amount of data or set the output parameter to None if |
| 455 | /// the vector should be null. |
| 456 | /// |
| 457 | /// This method is used in AIDL-generated server side code for methods that |
| 458 | /// take a mutable slice reference parameter. |
| 459 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 460 | &self, |
| 461 | out_vec: &mut Option<Vec<D>>, |
| 462 | ) -> Result<()> { |
| 463 | let len: i32 = self.read()?; |
| 464 | |
| 465 | if len < 0 { |
| 466 | *out_vec = None; |
| 467 | } else { |
| 468 | // usize in Rust may be 16-bit, so i32 may not fit |
| 469 | let len = len.try_into().unwrap(); |
| 470 | let mut vec = Vec::with_capacity(len); |
| 471 | vec.resize_with(len, Default::default); |
| 472 | *out_vec = Some(vec); |
| 473 | } |
| 474 | |
| 475 | Ok(()) |
| 476 | } |
| 477 | } |
| 478 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 479 | /// A segment of a readable parcel, used for [`Parcel::sized_read`]. |
| 480 | pub struct ReadableSubParcel<'a> { |
| 481 | parcel: &'a Parcel, |
| 482 | end_position: i32, |
| 483 | } |
| 484 | |
| 485 | impl<'a> ReadableSubParcel<'a> { |
| 486 | /// Read a type that implements [`Deserialize`] from the sub-parcel. |
| 487 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 488 | // The caller should have checked this, |
| 489 | // but it can't hurt to double-check |
| 490 | assert!(self.has_more_data()); |
| 491 | D::deserialize(self.parcel) |
| 492 | } |
| 493 | |
| 494 | /// Check if the sub-parcel has more data to read |
| 495 | pub fn has_more_data(&self) -> bool { |
| 496 | self.parcel.get_data_position() < self.end_position |
| 497 | } |
| 498 | } |
| 499 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 500 | // Internal APIs |
| 501 | impl Parcel { |
| 502 | pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> { |
| 503 | unsafe { |
| 504 | // Safety: `Parcel` always contains a valid pointer to an |
| 505 | // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return |
| 506 | // null or a valid pointer to an `AIBinder`, both of which are |
| 507 | // valid, safe inputs to `AParcel_writeStrongBinder`. |
| 508 | // |
| 509 | // This call does not take ownership of the binder. However, it does |
| 510 | // require a mutable pointer, which we cannot extract from an |
| 511 | // immutable reference, so we clone the binder, incrementing the |
| 512 | // refcount before the call. The refcount will be immediately |
| 513 | // decremented when this temporary is dropped. |
| 514 | status_result(sys::AParcel_writeStrongBinder( |
| 515 | self.as_native_mut(), |
| 516 | binder.cloned().as_native_mut(), |
| 517 | )) |
| 518 | } |
| 519 | } |
| 520 | |
| 521 | pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> { |
| 522 | let mut binder = ptr::null_mut(); |
| 523 | let status = unsafe { |
| 524 | // Safety: `Parcel` always contains a valid pointer to an |
| 525 | // `AParcel`. We pass a valid, mutable out pointer to the `binder` |
| 526 | // parameter. After this call, `binder` will be either null or a |
| 527 | // valid pointer to an `AIBinder` owned by the caller. |
| 528 | sys::AParcel_readStrongBinder(self.as_native(), &mut binder) |
| 529 | }; |
| 530 | |
| 531 | status_result(status)?; |
| 532 | |
| 533 | Ok(unsafe { |
| 534 | // Safety: `binder` is either null or a valid, owned pointer at this |
| 535 | // point, so can be safely passed to `SpIBinder::from_raw`. |
| 536 | SpIBinder::from_raw(binder) |
| 537 | }) |
| 538 | } |
| 539 | } |
| 540 | |
| 541 | impl Drop for Parcel { |
| 542 | fn drop(&mut self) { |
| 543 | // Run the C++ Parcel complete object destructor |
| 544 | if let Self::Owned(ptr) = *self { |
| 545 | unsafe { |
| 546 | // Safety: `Parcel` always contains a valid pointer to an |
| 547 | // `AParcel`. If we own the parcel, we can safely delete it |
| 548 | // here. |
| 549 | sys::AParcel_delete(ptr) |
| 550 | } |
| 551 | } |
| 552 | } |
| 553 | } |
| 554 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 555 | impl Drop for OwnedParcel { |
| 556 | fn drop(&mut self) { |
| 557 | // Run the C++ Parcel complete object destructor |
| 558 | unsafe { |
| 559 | // Safety: `OwnedParcel` always contains a valid pointer to an |
| 560 | // `AParcel`. Since we own the parcel, we can safely delete it |
| 561 | // here. |
| 562 | sys::AParcel_delete(self.ptr) |
| 563 | } |
| 564 | } |
| 565 | } |
| 566 | |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 567 | impl fmt::Debug for Parcel { |
| 568 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 569 | f.debug_struct("Parcel") |
| 570 | .finish() |
| 571 | } |
| 572 | } |
| 573 | |
Alice Ryhl | 268458c | 2021-09-15 12:56:10 +0000 | [diff] [blame] | 574 | impl fmt::Debug for OwnedParcel { |
| 575 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 576 | f.debug_struct("OwnedParcel") |
| 577 | .finish() |
| 578 | } |
| 579 | } |
| 580 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 581 | #[test] |
| 582 | fn test_read_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 583 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 584 | let start = parcel.get_data_position(); |
| 585 | |
| 586 | assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 587 | assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 588 | assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 589 | assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 590 | assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 591 | assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 592 | assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 593 | assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 594 | assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 595 | assert_eq!(parcel.read::<Option<String>>(), Ok(None)); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 596 | assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL)); |
| 597 | |
| 598 | assert_eq!(parcel.read_binder().err(), Some(StatusCode::BAD_TYPE)); |
| 599 | |
| 600 | parcel.write(&1i32).unwrap(); |
| 601 | |
| 602 | unsafe { |
| 603 | parcel.set_data_position(start).unwrap(); |
| 604 | } |
| 605 | |
| 606 | let i: i32 = parcel.read().unwrap(); |
| 607 | assert_eq!(i, 1i32); |
| 608 | } |
| 609 | |
| 610 | #[test] |
| 611 | #[allow(clippy::float_cmp)] |
| 612 | fn test_read_data() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 613 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 614 | let str_start = parcel.get_data_position(); |
| 615 | |
| 616 | parcel.write(&b"Hello, Binder!\0"[..]).unwrap(); |
| 617 | // Skip over string length |
| 618 | unsafe { |
| 619 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 620 | } |
| 621 | assert_eq!(parcel.read::<i32>().unwrap(), 15); |
| 622 | let start = parcel.get_data_position(); |
| 623 | |
Chris Wailes | 45fd294 | 2021-07-26 19:18:41 -0700 | [diff] [blame] | 624 | assert!(parcel.read::<bool>().unwrap()); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 625 | |
| 626 | unsafe { |
| 627 | assert!(parcel.set_data_position(start).is_ok()); |
| 628 | } |
| 629 | |
| 630 | assert_eq!(parcel.read::<i8>().unwrap(), 72i8); |
| 631 | |
| 632 | unsafe { |
| 633 | assert!(parcel.set_data_position(start).is_ok()); |
| 634 | } |
| 635 | |
| 636 | assert_eq!(parcel.read::<u16>().unwrap(), 25928); |
| 637 | |
| 638 | unsafe { |
| 639 | assert!(parcel.set_data_position(start).is_ok()); |
| 640 | } |
| 641 | |
| 642 | assert_eq!(parcel.read::<i32>().unwrap(), 1819043144); |
| 643 | |
| 644 | unsafe { |
| 645 | assert!(parcel.set_data_position(start).is_ok()); |
| 646 | } |
| 647 | |
| 648 | assert_eq!(parcel.read::<u32>().unwrap(), 1819043144); |
| 649 | |
| 650 | unsafe { |
| 651 | assert!(parcel.set_data_position(start).is_ok()); |
| 652 | } |
| 653 | |
| 654 | assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912); |
| 655 | |
| 656 | unsafe { |
| 657 | assert!(parcel.set_data_position(start).is_ok()); |
| 658 | } |
| 659 | |
| 660 | assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912); |
| 661 | |
| 662 | unsafe { |
| 663 | assert!(parcel.set_data_position(start).is_ok()); |
| 664 | } |
| 665 | |
| 666 | assert_eq!( |
| 667 | parcel.read::<f32>().unwrap(), |
| 668 | 1143139100000000000000000000.0 |
| 669 | ); |
| 670 | assert_eq!(parcel.read::<f32>().unwrap(), 40.043392); |
| 671 | |
| 672 | unsafe { |
| 673 | assert!(parcel.set_data_position(start).is_ok()); |
| 674 | } |
| 675 | |
| 676 | assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815); |
| 677 | |
| 678 | // Skip back to before the string length |
| 679 | unsafe { |
| 680 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 681 | } |
| 682 | |
| 683 | assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0"); |
| 684 | } |
| 685 | |
| 686 | #[test] |
| 687 | fn test_utf8_utf16_conversions() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 688 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 689 | let start = parcel.get_data_position(); |
| 690 | |
| 691 | assert!(parcel.write("Hello, Binder!").is_ok()); |
| 692 | unsafe { |
| 693 | assert!(parcel.set_data_position(start).is_ok()); |
| 694 | } |
| 695 | assert_eq!( |
| 696 | parcel.read::<Option<String>>().unwrap().unwrap(), |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 697 | "Hello, Binder!", |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 698 | ); |
| 699 | unsafe { |
| 700 | assert!(parcel.set_data_position(start).is_ok()); |
| 701 | } |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 702 | |
| 703 | assert!(parcel.write("Embedded null \0 inside a string").is_ok()); |
| 704 | unsafe { |
| 705 | assert!(parcel.set_data_position(start).is_ok()); |
| 706 | } |
| 707 | assert_eq!( |
| 708 | parcel.read::<Option<String>>().unwrap().unwrap(), |
| 709 | "Embedded null \0 inside a string", |
| 710 | ); |
| 711 | unsafe { |
| 712 | assert!(parcel.set_data_position(start).is_ok()); |
| 713 | } |
| 714 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 715 | assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok()); |
| 716 | assert!(parcel |
| 717 | .write( |
| 718 | &[ |
| 719 | String::from("str4"), |
| 720 | String::from("str5"), |
| 721 | String::from("str6"), |
| 722 | ][..] |
| 723 | ) |
| 724 | .is_ok()); |
| 725 | |
| 726 | let s1 = "Hello, Binder!"; |
| 727 | let s2 = "This is a utf8 string."; |
| 728 | let s3 = "Some more text here."; |
| 729 | |
| 730 | assert!(parcel.write(&[s1, s2, s3][..]).is_ok()); |
| 731 | unsafe { |
| 732 | assert!(parcel.set_data_position(start).is_ok()); |
| 733 | } |
| 734 | |
| 735 | assert_eq!( |
| 736 | parcel.read::<Vec<String>>().unwrap(), |
| 737 | ["str1", "str2", "str3"] |
| 738 | ); |
| 739 | assert_eq!( |
| 740 | parcel.read::<Vec<String>>().unwrap(), |
| 741 | ["str4", "str5", "str6"] |
| 742 | ); |
| 743 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]); |
| 744 | } |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 745 | |
| 746 | #[test] |
| 747 | fn test_sized_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 748 | let mut parcel = Parcel::new(); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 749 | let start = parcel.get_data_position(); |
| 750 | |
| 751 | let arr = [1i32, 2i32, 3i32]; |
| 752 | |
| 753 | parcel.sized_write(|subparcel| { |
| 754 | subparcel.write(&arr[..]) |
| 755 | }).expect("Could not perform sized write"); |
| 756 | |
| 757 | // i32 sub-parcel length + i32 array length + 3 i32 elements |
| 758 | let expected_len = 20i32; |
| 759 | |
| 760 | assert_eq!(parcel.get_data_position(), start + expected_len); |
| 761 | |
| 762 | unsafe { |
| 763 | parcel.set_data_position(start).unwrap(); |
| 764 | } |
| 765 | |
| 766 | assert_eq!( |
| 767 | expected_len, |
| 768 | parcel.read().unwrap(), |
| 769 | ); |
| 770 | |
| 771 | assert_eq!( |
| 772 | parcel.read::<Vec<i32>>().unwrap(), |
| 773 | &arr, |
| 774 | ); |
| 775 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 776 | |
| 777 | #[test] |
| 778 | fn test_append_from() { |
| 779 | let mut parcel1 = Parcel::new(); |
| 780 | parcel1.write(&42i32).expect("Could not perform write"); |
| 781 | |
| 782 | let mut parcel2 = Parcel::new(); |
| 783 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 784 | assert_eq!(4, parcel2.get_data_size()); |
| 785 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 786 | assert_eq!(8, parcel2.get_data_size()); |
| 787 | unsafe { |
| 788 | parcel2.set_data_position(0).unwrap(); |
| 789 | } |
| 790 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 791 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 792 | |
| 793 | let mut parcel2 = Parcel::new(); |
| 794 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 795 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2)); |
| 796 | assert_eq!(4, parcel2.get_data_size()); |
| 797 | unsafe { |
| 798 | parcel2.set_data_position(0).unwrap(); |
| 799 | } |
| 800 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 801 | |
| 802 | let mut parcel2 = Parcel::new(); |
| 803 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 804 | assert_eq!(2, parcel2.get_data_size()); |
| 805 | unsafe { |
| 806 | parcel2.set_data_position(0).unwrap(); |
| 807 | } |
| 808 | assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>()); |
| 809 | |
| 810 | let mut parcel2 = Parcel::new(); |
| 811 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2)); |
| 812 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4)); |
| 813 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4)); |
| 814 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1)); |
| 815 | } |