Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //! Container for messages that are sent via binder. |
| 18 | |
| 19 | use crate::binder::AsNative; |
| 20 | use crate::error::{status_result, Result, StatusCode}; |
| 21 | use crate::proxy::SpIBinder; |
| 22 | use crate::sys; |
| 23 | |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 24 | use std::cell::RefCell; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 25 | use std::convert::TryInto; |
| 26 | use std::mem::ManuallyDrop; |
| 27 | use std::ptr; |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 28 | use std::fmt; |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 29 | |
| 30 | mod file_descriptor; |
| 31 | mod parcelable; |
| 32 | |
| 33 | pub use self::file_descriptor::ParcelFileDescriptor; |
| 34 | pub use self::parcelable::{ |
| 35 | Deserialize, DeserializeArray, DeserializeOption, Serialize, SerializeArray, SerializeOption, |
Andrei Homescu | 083e353 | 2021-09-08 00:36:18 +0000 | [diff] [blame^] | 36 | Parcelable, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG, |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 37 | }; |
| 38 | |
| 39 | /// Container for a message (data and object references) that can be sent |
| 40 | /// through Binder. |
| 41 | /// |
| 42 | /// A Parcel can contain both serialized data that will be deserialized on the |
| 43 | /// other side of the IPC, and references to live Binder objects that will |
| 44 | /// result in the other side receiving a proxy Binder connected with the |
| 45 | /// original Binder in the Parcel. |
| 46 | pub enum Parcel { |
| 47 | /// Owned parcel pointer |
| 48 | Owned(*mut sys::AParcel), |
| 49 | /// Borrowed parcel pointer (will not be destroyed on drop) |
| 50 | Borrowed(*mut sys::AParcel), |
| 51 | } |
| 52 | |
| 53 | /// # Safety |
| 54 | /// |
| 55 | /// The `Parcel` constructors guarantee that a `Parcel` object will always |
| 56 | /// contain a valid pointer to an `AParcel`. |
| 57 | unsafe impl AsNative<sys::AParcel> for Parcel { |
| 58 | fn as_native(&self) -> *const sys::AParcel { |
| 59 | match *self { |
| 60 | Self::Owned(x) | Self::Borrowed(x) => x, |
| 61 | } |
| 62 | } |
| 63 | |
| 64 | fn as_native_mut(&mut self) -> *mut sys::AParcel { |
| 65 | match *self { |
| 66 | Self::Owned(x) | Self::Borrowed(x) => x, |
| 67 | } |
| 68 | } |
| 69 | } |
| 70 | |
| 71 | impl Parcel { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 72 | /// Create a new empty `Parcel`. |
| 73 | /// |
| 74 | /// Creates a new owned empty parcel that can be written to |
| 75 | /// using the serialization methods and appended to and |
| 76 | /// from using `append_from` and `append_from_all`. |
| 77 | pub fn new() -> Parcel { |
| 78 | let parcel = unsafe { |
| 79 | // Safety: If `AParcel_create` succeeds, it always returns |
| 80 | // a valid pointer. If it fails, the process will crash. |
| 81 | sys::AParcel_create() |
| 82 | }; |
| 83 | assert!(!parcel.is_null()); |
| 84 | Self::Owned(parcel) |
| 85 | } |
| 86 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 87 | /// Create a borrowed reference to a parcel object from a raw pointer. |
| 88 | /// |
| 89 | /// # Safety |
| 90 | /// |
| 91 | /// This constructor is safe if the raw pointer parameter is either null |
| 92 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. |
| 93 | pub(crate) unsafe fn borrowed(ptr: *mut sys::AParcel) -> Option<Parcel> { |
| 94 | ptr.as_mut().map(|ptr| Self::Borrowed(ptr)) |
| 95 | } |
| 96 | |
| 97 | /// Create an owned reference to a parcel object from a raw pointer. |
| 98 | /// |
| 99 | /// # Safety |
| 100 | /// |
| 101 | /// This constructor is safe if the raw pointer parameter is either null |
| 102 | /// (resulting in `None`), or a valid pointer to an `AParcel` object. The |
| 103 | /// parcel object must be owned by the caller prior to this call, as this |
| 104 | /// constructor takes ownership of the parcel and will destroy it on drop. |
| 105 | pub(crate) unsafe fn owned(ptr: *mut sys::AParcel) -> Option<Parcel> { |
| 106 | ptr.as_mut().map(|ptr| Self::Owned(ptr)) |
| 107 | } |
| 108 | |
| 109 | /// Consume the parcel, transferring ownership to the caller if the parcel |
| 110 | /// was owned. |
| 111 | pub(crate) fn into_raw(mut self) -> *mut sys::AParcel { |
| 112 | let ptr = self.as_native_mut(); |
| 113 | let _ = ManuallyDrop::new(self); |
| 114 | ptr |
| 115 | } |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 116 | |
| 117 | pub(crate) fn is_owned(&self) -> bool { |
| 118 | match *self { |
| 119 | Self::Owned(_) => true, |
| 120 | Self::Borrowed(_) => false, |
| 121 | } |
| 122 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 123 | } |
| 124 | |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 125 | impl Default for Parcel { |
| 126 | fn default() -> Self { |
| 127 | Self::new() |
| 128 | } |
| 129 | } |
| 130 | |
| 131 | impl Clone for Parcel { |
| 132 | fn clone(&self) -> Self { |
| 133 | let mut new_parcel = Self::new(); |
| 134 | new_parcel |
| 135 | .append_all_from(self) |
| 136 | .expect("Failed to append from Parcel"); |
| 137 | new_parcel |
| 138 | } |
| 139 | } |
| 140 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 141 | // Data serialization methods |
| 142 | impl Parcel { |
Steven Moreland | f183fdd | 2020-10-27 00:12:12 +0000 | [diff] [blame] | 143 | /// Data written to parcelable is zero'd before being deleted or reallocated. |
| 144 | pub fn mark_sensitive(&mut self) { |
| 145 | unsafe { |
| 146 | // Safety: guaranteed to have a parcel object, and this method never fails |
| 147 | sys::AParcel_markSensitive(self.as_native()) |
| 148 | } |
| 149 | } |
| 150 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 151 | /// Write a type that implements [`Serialize`] to the `Parcel`. |
| 152 | pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> { |
| 153 | parcelable.serialize(self) |
| 154 | } |
| 155 | |
| 156 | /// Writes the length of a slice to the `Parcel`. |
| 157 | /// |
| 158 | /// This is used in AIDL-generated client side code to indicate the |
| 159 | /// allocated space for an output array parameter. |
| 160 | pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> { |
| 161 | if let Some(slice) = slice { |
| 162 | let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?; |
| 163 | self.write(&len) |
| 164 | } else { |
| 165 | self.write(&-1i32) |
| 166 | } |
| 167 | } |
| 168 | |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 169 | /// Perform a series of writes to the `Parcel`, prepended with the length |
| 170 | /// (in bytes) of the written data. |
| 171 | /// |
| 172 | /// The length `0i32` will be written to the parcel first, followed by the |
| 173 | /// writes performed by the callback. The initial length will then be |
| 174 | /// updated to the length of all data written by the callback, plus the |
| 175 | /// size of the length elemement itself (4 bytes). |
| 176 | /// |
| 177 | /// # Examples |
| 178 | /// |
| 179 | /// After the following call: |
| 180 | /// |
| 181 | /// ``` |
| 182 | /// # use binder::{Binder, Interface, Parcel}; |
| 183 | /// # let mut parcel = Parcel::Owned(std::ptr::null_mut()); |
| 184 | /// parcel.sized_write(|subparcel| { |
| 185 | /// subparcel.write(&1u32)?; |
| 186 | /// subparcel.write(&2u32)?; |
| 187 | /// subparcel.write(&3u32) |
| 188 | /// }); |
| 189 | /// ``` |
| 190 | /// |
| 191 | /// `parcel` will contain the following: |
| 192 | /// |
| 193 | /// ```ignore |
| 194 | /// [16i32, 1u32, 2u32, 3u32] |
| 195 | /// ``` |
| 196 | pub fn sized_write<F>(&mut self, f: F) -> Result<()> |
| 197 | where for<'a> |
| 198 | F: Fn(&'a WritableSubParcel<'a>) -> Result<()> |
| 199 | { |
| 200 | let start = self.get_data_position(); |
| 201 | self.write(&0i32)?; |
| 202 | { |
| 203 | let subparcel = WritableSubParcel(RefCell::new(self)); |
| 204 | f(&subparcel)?; |
| 205 | } |
| 206 | let end = self.get_data_position(); |
| 207 | unsafe { |
| 208 | self.set_data_position(start)?; |
| 209 | } |
| 210 | assert!(end >= start); |
| 211 | self.write(&(end - start))?; |
| 212 | unsafe { |
| 213 | self.set_data_position(end)?; |
| 214 | } |
| 215 | Ok(()) |
| 216 | } |
| 217 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 218 | /// Returns the current position in the parcel data. |
| 219 | pub fn get_data_position(&self) -> i32 { |
| 220 | unsafe { |
| 221 | // Safety: `Parcel` always contains a valid pointer to an `AParcel`, |
| 222 | // and this call is otherwise safe. |
| 223 | sys::AParcel_getDataPosition(self.as_native()) |
| 224 | } |
| 225 | } |
| 226 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 227 | /// Returns the total size of the parcel. |
| 228 | pub fn get_data_size(&self) -> i32 { |
| 229 | unsafe { |
| 230 | // Safety: `Parcel` always contains a valid pointer to an `AParcel`, |
| 231 | // and this call is otherwise safe. |
| 232 | sys::AParcel_getDataSize(self.as_native()) |
| 233 | } |
| 234 | } |
| 235 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 236 | /// Move the current read/write position in the parcel. |
| 237 | /// |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 238 | /// # Safety |
| 239 | /// |
| 240 | /// This method is safe if `pos` is less than the current size of the parcel |
| 241 | /// data buffer. Otherwise, we are relying on correct bounds checking in the |
| 242 | /// Parcel C++ code on every subsequent read or write to this parcel. If all |
| 243 | /// accesses are bounds checked, this call is still safe, but we can't rely |
| 244 | /// on that. |
| 245 | pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> { |
| 246 | status_result(sys::AParcel_setDataPosition(self.as_native(), pos)) |
| 247 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 248 | |
| 249 | /// Append a subset of another `Parcel`. |
| 250 | /// |
| 251 | /// This appends `size` bytes of data from `other` starting at offset |
| 252 | /// `start` to the current `Parcel`, or returns an error if not possible. |
| 253 | pub fn append_from(&mut self, other: &Self, start: i32, size: i32) -> Result<()> { |
| 254 | let status = unsafe { |
| 255 | // Safety: `Parcel::appendFrom` from C++ checks that `start` |
| 256 | // and `size` are in bounds, and returns an error otherwise. |
| 257 | // Both `self` and `other` always contain valid pointers. |
| 258 | sys::AParcel_appendFrom( |
| 259 | other.as_native(), |
| 260 | self.as_native_mut(), |
| 261 | start, |
| 262 | size, |
| 263 | ) |
| 264 | }; |
| 265 | status_result(status) |
| 266 | } |
| 267 | |
| 268 | /// Append the contents of another `Parcel`. |
| 269 | pub fn append_all_from(&mut self, other: &Self) -> Result<()> { |
| 270 | self.append_from(other, 0, other.get_data_size()) |
| 271 | } |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 272 | } |
| 273 | |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 274 | /// A segment of a writable parcel, used for [`Parcel::sized_write`]. |
| 275 | pub struct WritableSubParcel<'a>(RefCell<&'a mut Parcel>); |
| 276 | |
| 277 | impl<'a> WritableSubParcel<'a> { |
| 278 | /// Write a type that implements [`Serialize`] to the sub-parcel. |
| 279 | pub fn write<S: Serialize + ?Sized>(&self, parcelable: &S) -> Result<()> { |
| 280 | parcelable.serialize(&mut *self.0.borrow_mut()) |
| 281 | } |
| 282 | } |
| 283 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 284 | // Data deserialization methods |
| 285 | impl Parcel { |
| 286 | /// Attempt to read a type that implements [`Deserialize`] from this |
| 287 | /// `Parcel`. |
| 288 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 289 | D::deserialize(self) |
| 290 | } |
| 291 | |
Andrei Homescu | 5000615 | 2021-05-01 07:34:51 +0000 | [diff] [blame] | 292 | /// Attempt to read a type that implements [`Deserialize`] from this |
| 293 | /// `Parcel` onto an existing value. This operation will overwrite the old |
| 294 | /// value partially or completely, depending on how much data is available. |
| 295 | pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> { |
| 296 | x.deserialize_from(self) |
| 297 | } |
| 298 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 299 | /// Safely read a sized parcelable. |
| 300 | /// |
| 301 | /// Read the size of a parcelable, compute the end position |
| 302 | /// of that parcelable, then build a sized readable sub-parcel |
| 303 | /// and call a closure with the sub-parcel as its parameter. |
| 304 | /// The closure can keep reading data from the sub-parcel |
| 305 | /// until it runs out of input data. The closure is responsible |
| 306 | /// for calling [`ReadableSubParcel::has_more_data`] to check for |
| 307 | /// more data before every read, at least until Rust generators |
| 308 | /// are stabilized. |
| 309 | /// After the closure returns, skip to the end of the current |
| 310 | /// parcelable regardless of how much the closure has read. |
| 311 | /// |
| 312 | /// # Examples |
| 313 | /// |
| 314 | /// ```no_run |
| 315 | /// let mut parcelable = Default::default(); |
| 316 | /// parcel.sized_read(|subparcel| { |
| 317 | /// if subparcel.has_more_data() { |
| 318 | /// parcelable.a = subparcel.read()?; |
| 319 | /// } |
| 320 | /// if subparcel.has_more_data() { |
| 321 | /// parcelable.b = subparcel.read()?; |
| 322 | /// } |
| 323 | /// Ok(()) |
| 324 | /// }); |
| 325 | /// ``` |
| 326 | /// |
| 327 | pub fn sized_read<F>(&self, mut f: F) -> Result<()> |
| 328 | where |
| 329 | for<'a> F: FnMut(ReadableSubParcel<'a>) -> Result<()> |
| 330 | { |
| 331 | let start = self.get_data_position(); |
| 332 | let parcelable_size: i32 = self.read()?; |
| 333 | if parcelable_size < 0 { |
| 334 | return Err(StatusCode::BAD_VALUE); |
| 335 | } |
| 336 | |
| 337 | let end = start.checked_add(parcelable_size) |
| 338 | .ok_or(StatusCode::BAD_VALUE)?; |
| 339 | if end > self.get_data_size() { |
| 340 | return Err(StatusCode::NOT_ENOUGH_DATA); |
| 341 | } |
| 342 | |
| 343 | let subparcel = ReadableSubParcel { |
| 344 | parcel: self, |
| 345 | end_position: end, |
| 346 | }; |
| 347 | f(subparcel)?; |
| 348 | |
| 349 | // Advance the data position to the actual end, |
| 350 | // in case the closure read less data than was available |
| 351 | unsafe { |
| 352 | self.set_data_position(end)?; |
| 353 | } |
| 354 | |
| 355 | Ok(()) |
| 356 | } |
| 357 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 358 | /// Read a vector size from the `Parcel` and resize the given output vector |
| 359 | /// to be correctly sized for that amount of data. |
| 360 | /// |
| 361 | /// This method is used in AIDL-generated server side code for methods that |
| 362 | /// take a mutable slice reference parameter. |
| 363 | pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> { |
| 364 | let len: i32 = self.read()?; |
| 365 | |
| 366 | if len < 0 { |
| 367 | return Err(StatusCode::UNEXPECTED_NULL); |
| 368 | } |
| 369 | |
| 370 | // usize in Rust may be 16-bit, so i32 may not fit |
| 371 | let len = len.try_into().unwrap(); |
| 372 | out_vec.resize_with(len, Default::default); |
| 373 | |
| 374 | Ok(()) |
| 375 | } |
| 376 | |
| 377 | /// Read a vector size from the `Parcel` and either create a correctly sized |
| 378 | /// vector for that amount of data or set the output parameter to None if |
| 379 | /// the vector should be null. |
| 380 | /// |
| 381 | /// This method is used in AIDL-generated server side code for methods that |
| 382 | /// take a mutable slice reference parameter. |
| 383 | pub fn resize_nullable_out_vec<D: Default + Deserialize>( |
| 384 | &self, |
| 385 | out_vec: &mut Option<Vec<D>>, |
| 386 | ) -> Result<()> { |
| 387 | let len: i32 = self.read()?; |
| 388 | |
| 389 | if len < 0 { |
| 390 | *out_vec = None; |
| 391 | } else { |
| 392 | // usize in Rust may be 16-bit, so i32 may not fit |
| 393 | let len = len.try_into().unwrap(); |
| 394 | let mut vec = Vec::with_capacity(len); |
| 395 | vec.resize_with(len, Default::default); |
| 396 | *out_vec = Some(vec); |
| 397 | } |
| 398 | |
| 399 | Ok(()) |
| 400 | } |
| 401 | } |
| 402 | |
Andrei Homescu | b048744 | 2021-05-12 07:16:16 +0000 | [diff] [blame] | 403 | /// A segment of a readable parcel, used for [`Parcel::sized_read`]. |
| 404 | pub struct ReadableSubParcel<'a> { |
| 405 | parcel: &'a Parcel, |
| 406 | end_position: i32, |
| 407 | } |
| 408 | |
| 409 | impl<'a> ReadableSubParcel<'a> { |
| 410 | /// Read a type that implements [`Deserialize`] from the sub-parcel. |
| 411 | pub fn read<D: Deserialize>(&self) -> Result<D> { |
| 412 | // The caller should have checked this, |
| 413 | // but it can't hurt to double-check |
| 414 | assert!(self.has_more_data()); |
| 415 | D::deserialize(self.parcel) |
| 416 | } |
| 417 | |
| 418 | /// Check if the sub-parcel has more data to read |
| 419 | pub fn has_more_data(&self) -> bool { |
| 420 | self.parcel.get_data_position() < self.end_position |
| 421 | } |
| 422 | } |
| 423 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 424 | // Internal APIs |
| 425 | impl Parcel { |
| 426 | pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> { |
| 427 | unsafe { |
| 428 | // Safety: `Parcel` always contains a valid pointer to an |
| 429 | // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return |
| 430 | // null or a valid pointer to an `AIBinder`, both of which are |
| 431 | // valid, safe inputs to `AParcel_writeStrongBinder`. |
| 432 | // |
| 433 | // This call does not take ownership of the binder. However, it does |
| 434 | // require a mutable pointer, which we cannot extract from an |
| 435 | // immutable reference, so we clone the binder, incrementing the |
| 436 | // refcount before the call. The refcount will be immediately |
| 437 | // decremented when this temporary is dropped. |
| 438 | status_result(sys::AParcel_writeStrongBinder( |
| 439 | self.as_native_mut(), |
| 440 | binder.cloned().as_native_mut(), |
| 441 | )) |
| 442 | } |
| 443 | } |
| 444 | |
| 445 | pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> { |
| 446 | let mut binder = ptr::null_mut(); |
| 447 | let status = unsafe { |
| 448 | // Safety: `Parcel` always contains a valid pointer to an |
| 449 | // `AParcel`. We pass a valid, mutable out pointer to the `binder` |
| 450 | // parameter. After this call, `binder` will be either null or a |
| 451 | // valid pointer to an `AIBinder` owned by the caller. |
| 452 | sys::AParcel_readStrongBinder(self.as_native(), &mut binder) |
| 453 | }; |
| 454 | |
| 455 | status_result(status)?; |
| 456 | |
| 457 | Ok(unsafe { |
| 458 | // Safety: `binder` is either null or a valid, owned pointer at this |
| 459 | // point, so can be safely passed to `SpIBinder::from_raw`. |
| 460 | SpIBinder::from_raw(binder) |
| 461 | }) |
| 462 | } |
| 463 | } |
| 464 | |
| 465 | impl Drop for Parcel { |
| 466 | fn drop(&mut self) { |
| 467 | // Run the C++ Parcel complete object destructor |
| 468 | if let Self::Owned(ptr) = *self { |
| 469 | unsafe { |
| 470 | // Safety: `Parcel` always contains a valid pointer to an |
| 471 | // `AParcel`. If we own the parcel, we can safely delete it |
| 472 | // here. |
| 473 | sys::AParcel_delete(ptr) |
| 474 | } |
| 475 | } |
| 476 | } |
| 477 | } |
| 478 | |
Alice Ryhl | feba6ca | 2021-08-19 10:47:04 +0000 | [diff] [blame] | 479 | impl fmt::Debug for Parcel { |
| 480 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 481 | f.debug_struct("Parcel") |
| 482 | .finish() |
| 483 | } |
| 484 | } |
| 485 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 486 | #[test] |
| 487 | fn test_read_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 488 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 489 | let start = parcel.get_data_position(); |
| 490 | |
| 491 | assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 492 | assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 493 | assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 494 | assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 495 | assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 496 | assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 497 | assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 498 | assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
| 499 | assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA)); |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 500 | assert_eq!(parcel.read::<Option<String>>(), Ok(None)); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 501 | assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL)); |
| 502 | |
| 503 | assert_eq!(parcel.read_binder().err(), Some(StatusCode::BAD_TYPE)); |
| 504 | |
| 505 | parcel.write(&1i32).unwrap(); |
| 506 | |
| 507 | unsafe { |
| 508 | parcel.set_data_position(start).unwrap(); |
| 509 | } |
| 510 | |
| 511 | let i: i32 = parcel.read().unwrap(); |
| 512 | assert_eq!(i, 1i32); |
| 513 | } |
| 514 | |
| 515 | #[test] |
| 516 | #[allow(clippy::float_cmp)] |
| 517 | fn test_read_data() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 518 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 519 | let str_start = parcel.get_data_position(); |
| 520 | |
| 521 | parcel.write(&b"Hello, Binder!\0"[..]).unwrap(); |
| 522 | // Skip over string length |
| 523 | unsafe { |
| 524 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 525 | } |
| 526 | assert_eq!(parcel.read::<i32>().unwrap(), 15); |
| 527 | let start = parcel.get_data_position(); |
| 528 | |
Chris Wailes | 45fd294 | 2021-07-26 19:18:41 -0700 | [diff] [blame] | 529 | assert!(parcel.read::<bool>().unwrap()); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 530 | |
| 531 | unsafe { |
| 532 | assert!(parcel.set_data_position(start).is_ok()); |
| 533 | } |
| 534 | |
| 535 | assert_eq!(parcel.read::<i8>().unwrap(), 72i8); |
| 536 | |
| 537 | unsafe { |
| 538 | assert!(parcel.set_data_position(start).is_ok()); |
| 539 | } |
| 540 | |
| 541 | assert_eq!(parcel.read::<u16>().unwrap(), 25928); |
| 542 | |
| 543 | unsafe { |
| 544 | assert!(parcel.set_data_position(start).is_ok()); |
| 545 | } |
| 546 | |
| 547 | assert_eq!(parcel.read::<i32>().unwrap(), 1819043144); |
| 548 | |
| 549 | unsafe { |
| 550 | assert!(parcel.set_data_position(start).is_ok()); |
| 551 | } |
| 552 | |
| 553 | assert_eq!(parcel.read::<u32>().unwrap(), 1819043144); |
| 554 | |
| 555 | unsafe { |
| 556 | assert!(parcel.set_data_position(start).is_ok()); |
| 557 | } |
| 558 | |
| 559 | assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912); |
| 560 | |
| 561 | unsafe { |
| 562 | assert!(parcel.set_data_position(start).is_ok()); |
| 563 | } |
| 564 | |
| 565 | assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912); |
| 566 | |
| 567 | unsafe { |
| 568 | assert!(parcel.set_data_position(start).is_ok()); |
| 569 | } |
| 570 | |
| 571 | assert_eq!( |
| 572 | parcel.read::<f32>().unwrap(), |
| 573 | 1143139100000000000000000000.0 |
| 574 | ); |
| 575 | assert_eq!(parcel.read::<f32>().unwrap(), 40.043392); |
| 576 | |
| 577 | unsafe { |
| 578 | assert!(parcel.set_data_position(start).is_ok()); |
| 579 | } |
| 580 | |
| 581 | assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815); |
| 582 | |
| 583 | // Skip back to before the string length |
| 584 | unsafe { |
| 585 | assert!(parcel.set_data_position(str_start).is_ok()); |
| 586 | } |
| 587 | |
| 588 | assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0"); |
| 589 | } |
| 590 | |
| 591 | #[test] |
| 592 | fn test_utf8_utf16_conversions() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 593 | let mut parcel = Parcel::new(); |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 594 | let start = parcel.get_data_position(); |
| 595 | |
| 596 | assert!(parcel.write("Hello, Binder!").is_ok()); |
| 597 | unsafe { |
| 598 | assert!(parcel.set_data_position(start).is_ok()); |
| 599 | } |
| 600 | assert_eq!( |
| 601 | parcel.read::<Option<String>>().unwrap().unwrap(), |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 602 | "Hello, Binder!", |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 603 | ); |
| 604 | unsafe { |
| 605 | assert!(parcel.set_data_position(start).is_ok()); |
| 606 | } |
Stephen Crane | 76072e8 | 2020-08-03 13:09:36 -0700 | [diff] [blame] | 607 | |
| 608 | assert!(parcel.write("Embedded null \0 inside a string").is_ok()); |
| 609 | unsafe { |
| 610 | assert!(parcel.set_data_position(start).is_ok()); |
| 611 | } |
| 612 | assert_eq!( |
| 613 | parcel.read::<Option<String>>().unwrap().unwrap(), |
| 614 | "Embedded null \0 inside a string", |
| 615 | ); |
| 616 | unsafe { |
| 617 | assert!(parcel.set_data_position(start).is_ok()); |
| 618 | } |
| 619 | |
Stephen Crane | 2a3c250 | 2020-06-16 17:48:35 -0700 | [diff] [blame] | 620 | assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok()); |
| 621 | assert!(parcel |
| 622 | .write( |
| 623 | &[ |
| 624 | String::from("str4"), |
| 625 | String::from("str5"), |
| 626 | String::from("str6"), |
| 627 | ][..] |
| 628 | ) |
| 629 | .is_ok()); |
| 630 | |
| 631 | let s1 = "Hello, Binder!"; |
| 632 | let s2 = "This is a utf8 string."; |
| 633 | let s3 = "Some more text here."; |
| 634 | |
| 635 | assert!(parcel.write(&[s1, s2, s3][..]).is_ok()); |
| 636 | unsafe { |
| 637 | assert!(parcel.set_data_position(start).is_ok()); |
| 638 | } |
| 639 | |
| 640 | assert_eq!( |
| 641 | parcel.read::<Vec<String>>().unwrap(), |
| 642 | ["str1", "str2", "str3"] |
| 643 | ); |
| 644 | assert_eq!( |
| 645 | parcel.read::<Vec<String>>().unwrap(), |
| 646 | ["str4", "str5", "str6"] |
| 647 | ); |
| 648 | assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]); |
| 649 | } |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 650 | |
| 651 | #[test] |
| 652 | fn test_sized_write() { |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 653 | let mut parcel = Parcel::new(); |
Stephen Crane | aae7638 | 2020-08-03 14:12:15 -0700 | [diff] [blame] | 654 | let start = parcel.get_data_position(); |
| 655 | |
| 656 | let arr = [1i32, 2i32, 3i32]; |
| 657 | |
| 658 | parcel.sized_write(|subparcel| { |
| 659 | subparcel.write(&arr[..]) |
| 660 | }).expect("Could not perform sized write"); |
| 661 | |
| 662 | // i32 sub-parcel length + i32 array length + 3 i32 elements |
| 663 | let expected_len = 20i32; |
| 664 | |
| 665 | assert_eq!(parcel.get_data_position(), start + expected_len); |
| 666 | |
| 667 | unsafe { |
| 668 | parcel.set_data_position(start).unwrap(); |
| 669 | } |
| 670 | |
| 671 | assert_eq!( |
| 672 | expected_len, |
| 673 | parcel.read().unwrap(), |
| 674 | ); |
| 675 | |
| 676 | assert_eq!( |
| 677 | parcel.read::<Vec<i32>>().unwrap(), |
| 678 | &arr, |
| 679 | ); |
| 680 | } |
Andrei Homescu | 72b799d | 2021-09-04 01:39:23 +0000 | [diff] [blame] | 681 | |
| 682 | #[test] |
| 683 | fn test_append_from() { |
| 684 | let mut parcel1 = Parcel::new(); |
| 685 | parcel1.write(&42i32).expect("Could not perform write"); |
| 686 | |
| 687 | let mut parcel2 = Parcel::new(); |
| 688 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 689 | assert_eq!(4, parcel2.get_data_size()); |
| 690 | assert_eq!(Ok(()), parcel2.append_all_from(&parcel1)); |
| 691 | assert_eq!(8, parcel2.get_data_size()); |
| 692 | unsafe { |
| 693 | parcel2.set_data_position(0).unwrap(); |
| 694 | } |
| 695 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 696 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 697 | |
| 698 | let mut parcel2 = Parcel::new(); |
| 699 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 700 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2)); |
| 701 | assert_eq!(4, parcel2.get_data_size()); |
| 702 | unsafe { |
| 703 | parcel2.set_data_position(0).unwrap(); |
| 704 | } |
| 705 | assert_eq!(Ok(42), parcel2.read::<i32>()); |
| 706 | |
| 707 | let mut parcel2 = Parcel::new(); |
| 708 | assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2)); |
| 709 | assert_eq!(2, parcel2.get_data_size()); |
| 710 | unsafe { |
| 711 | parcel2.set_data_position(0).unwrap(); |
| 712 | } |
| 713 | assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>()); |
| 714 | |
| 715 | let mut parcel2 = Parcel::new(); |
| 716 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2)); |
| 717 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4)); |
| 718 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4)); |
| 719 | assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1)); |
| 720 | } |