blob: 73915610d8b950dd1e0d87f815e56c3e595eb0e6 [file] [log] [blame]
Stephen Crane2a3c2502020-06-16 17:48:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//! Container for messages that are sent via binder.
18
19use crate::binder::AsNative;
20use crate::error::{status_result, Result, StatusCode};
21use crate::proxy::SpIBinder;
22use crate::sys;
23
Stephen Craneaae76382020-08-03 14:12:15 -070024use std::cell::RefCell;
Stephen Crane2a3c2502020-06-16 17:48:35 -070025use std::convert::TryInto;
26use std::mem::ManuallyDrop;
27use std::ptr;
Alice Ryhlfeba6ca2021-08-19 10:47:04 +000028use std::fmt;
Stephen Crane2a3c2502020-06-16 17:48:35 -070029
30mod file_descriptor;
31mod parcelable;
Andrei Homescuea406212021-09-03 02:55:00 +000032mod parcelable_holder;
Stephen Crane2a3c2502020-06-16 17:48:35 -070033
34pub use self::file_descriptor::ParcelFileDescriptor;
35pub use self::parcelable::{
36 Deserialize, DeserializeArray, DeserializeOption, Serialize, SerializeArray, SerializeOption,
Andrei Homescu083e3532021-09-08 00:36:18 +000037 Parcelable, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG,
Stephen Crane2a3c2502020-06-16 17:48:35 -070038};
Andrei Homescuea406212021-09-03 02:55:00 +000039pub use self::parcelable_holder::{ParcelableHolder, ParcelableMetadata};
Stephen Crane2a3c2502020-06-16 17:48:35 -070040
41/// Container for a message (data and object references) that can be sent
42/// through Binder.
43///
44/// A Parcel can contain both serialized data that will be deserialized on the
45/// other side of the IPC, and references to live Binder objects that will
46/// result in the other side receiving a proxy Binder connected with the
47/// original Binder in the Parcel.
48pub enum Parcel {
49 /// Owned parcel pointer
50 Owned(*mut sys::AParcel),
51 /// Borrowed parcel pointer (will not be destroyed on drop)
52 Borrowed(*mut sys::AParcel),
53}
54
55/// # Safety
56///
57/// The `Parcel` constructors guarantee that a `Parcel` object will always
58/// contain a valid pointer to an `AParcel`.
59unsafe impl AsNative<sys::AParcel> for Parcel {
60 fn as_native(&self) -> *const sys::AParcel {
61 match *self {
62 Self::Owned(x) | Self::Borrowed(x) => x,
63 }
64 }
65
66 fn as_native_mut(&mut self) -> *mut sys::AParcel {
67 match *self {
68 Self::Owned(x) | Self::Borrowed(x) => x,
69 }
70 }
71}
72
73impl Parcel {
Andrei Homescu72b799d2021-09-04 01:39:23 +000074 /// Create a new empty `Parcel`.
75 ///
76 /// Creates a new owned empty parcel that can be written to
77 /// using the serialization methods and appended to and
78 /// from using `append_from` and `append_from_all`.
79 pub fn new() -> Parcel {
80 let parcel = unsafe {
81 // Safety: If `AParcel_create` succeeds, it always returns
82 // a valid pointer. If it fails, the process will crash.
83 sys::AParcel_create()
84 };
85 assert!(!parcel.is_null());
86 Self::Owned(parcel)
87 }
88
Stephen Crane2a3c2502020-06-16 17:48:35 -070089 /// Create a borrowed reference to a parcel object from a raw pointer.
90 ///
91 /// # Safety
92 ///
93 /// This constructor is safe if the raw pointer parameter is either null
94 /// (resulting in `None`), or a valid pointer to an `AParcel` object.
95 pub(crate) unsafe fn borrowed(ptr: *mut sys::AParcel) -> Option<Parcel> {
96 ptr.as_mut().map(|ptr| Self::Borrowed(ptr))
97 }
98
99 /// Create an owned reference to a parcel object from a raw pointer.
100 ///
101 /// # Safety
102 ///
103 /// This constructor is safe if the raw pointer parameter is either null
104 /// (resulting in `None`), or a valid pointer to an `AParcel` object. The
105 /// parcel object must be owned by the caller prior to this call, as this
106 /// constructor takes ownership of the parcel and will destroy it on drop.
107 pub(crate) unsafe fn owned(ptr: *mut sys::AParcel) -> Option<Parcel> {
108 ptr.as_mut().map(|ptr| Self::Owned(ptr))
109 }
110
111 /// Consume the parcel, transferring ownership to the caller if the parcel
112 /// was owned.
113 pub(crate) fn into_raw(mut self) -> *mut sys::AParcel {
114 let ptr = self.as_native_mut();
115 let _ = ManuallyDrop::new(self);
116 ptr
117 }
Alice Ryhlfeba6ca2021-08-19 10:47:04 +0000118
119 pub(crate) fn is_owned(&self) -> bool {
120 match *self {
121 Self::Owned(_) => true,
122 Self::Borrowed(_) => false,
123 }
124 }
Stephen Crane2a3c2502020-06-16 17:48:35 -0700125}
126
Andrei Homescu72b799d2021-09-04 01:39:23 +0000127impl Default for Parcel {
128 fn default() -> Self {
129 Self::new()
130 }
131}
132
133impl Clone for Parcel {
134 fn clone(&self) -> Self {
135 let mut new_parcel = Self::new();
136 new_parcel
137 .append_all_from(self)
138 .expect("Failed to append from Parcel");
139 new_parcel
140 }
141}
142
Stephen Crane2a3c2502020-06-16 17:48:35 -0700143// Data serialization methods
144impl Parcel {
Steven Morelandf183fdd2020-10-27 00:12:12 +0000145 /// Data written to parcelable is zero'd before being deleted or reallocated.
146 pub fn mark_sensitive(&mut self) {
147 unsafe {
148 // Safety: guaranteed to have a parcel object, and this method never fails
149 sys::AParcel_markSensitive(self.as_native())
150 }
151 }
152
Stephen Crane2a3c2502020-06-16 17:48:35 -0700153 /// Write a type that implements [`Serialize`] to the `Parcel`.
154 pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> {
155 parcelable.serialize(self)
156 }
157
158 /// Writes the length of a slice to the `Parcel`.
159 ///
160 /// This is used in AIDL-generated client side code to indicate the
161 /// allocated space for an output array parameter.
162 pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> {
163 if let Some(slice) = slice {
164 let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?;
165 self.write(&len)
166 } else {
167 self.write(&-1i32)
168 }
169 }
170
Stephen Craneaae76382020-08-03 14:12:15 -0700171 /// Perform a series of writes to the `Parcel`, prepended with the length
172 /// (in bytes) of the written data.
173 ///
174 /// The length `0i32` will be written to the parcel first, followed by the
175 /// writes performed by the callback. The initial length will then be
176 /// updated to the length of all data written by the callback, plus the
177 /// size of the length elemement itself (4 bytes).
178 ///
179 /// # Examples
180 ///
181 /// After the following call:
182 ///
183 /// ```
184 /// # use binder::{Binder, Interface, Parcel};
185 /// # let mut parcel = Parcel::Owned(std::ptr::null_mut());
186 /// parcel.sized_write(|subparcel| {
187 /// subparcel.write(&1u32)?;
188 /// subparcel.write(&2u32)?;
189 /// subparcel.write(&3u32)
190 /// });
191 /// ```
192 ///
193 /// `parcel` will contain the following:
194 ///
195 /// ```ignore
196 /// [16i32, 1u32, 2u32, 3u32]
197 /// ```
198 pub fn sized_write<F>(&mut self, f: F) -> Result<()>
199 where for<'a>
200 F: Fn(&'a WritableSubParcel<'a>) -> Result<()>
201 {
202 let start = self.get_data_position();
203 self.write(&0i32)?;
204 {
205 let subparcel = WritableSubParcel(RefCell::new(self));
206 f(&subparcel)?;
207 }
208 let end = self.get_data_position();
209 unsafe {
210 self.set_data_position(start)?;
211 }
212 assert!(end >= start);
213 self.write(&(end - start))?;
214 unsafe {
215 self.set_data_position(end)?;
216 }
217 Ok(())
218 }
219
Stephen Crane2a3c2502020-06-16 17:48:35 -0700220 /// Returns the current position in the parcel data.
221 pub fn get_data_position(&self) -> i32 {
222 unsafe {
223 // Safety: `Parcel` always contains a valid pointer to an `AParcel`,
224 // and this call is otherwise safe.
225 sys::AParcel_getDataPosition(self.as_native())
226 }
227 }
228
Andrei Homescub0487442021-05-12 07:16:16 +0000229 /// Returns the total size of the parcel.
230 pub fn get_data_size(&self) -> i32 {
231 unsafe {
232 // Safety: `Parcel` always contains a valid pointer to an `AParcel`,
233 // and this call is otherwise safe.
234 sys::AParcel_getDataSize(self.as_native())
235 }
236 }
237
Stephen Crane2a3c2502020-06-16 17:48:35 -0700238 /// Move the current read/write position in the parcel.
239 ///
Stephen Crane2a3c2502020-06-16 17:48:35 -0700240 /// # Safety
241 ///
242 /// This method is safe if `pos` is less than the current size of the parcel
243 /// data buffer. Otherwise, we are relying on correct bounds checking in the
244 /// Parcel C++ code on every subsequent read or write to this parcel. If all
245 /// accesses are bounds checked, this call is still safe, but we can't rely
246 /// on that.
247 pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> {
248 status_result(sys::AParcel_setDataPosition(self.as_native(), pos))
249 }
Andrei Homescu72b799d2021-09-04 01:39:23 +0000250
251 /// Append a subset of another `Parcel`.
252 ///
253 /// This appends `size` bytes of data from `other` starting at offset
254 /// `start` to the current `Parcel`, or returns an error if not possible.
255 pub fn append_from(&mut self, other: &Self, start: i32, size: i32) -> Result<()> {
256 let status = unsafe {
257 // Safety: `Parcel::appendFrom` from C++ checks that `start`
258 // and `size` are in bounds, and returns an error otherwise.
259 // Both `self` and `other` always contain valid pointers.
260 sys::AParcel_appendFrom(
261 other.as_native(),
262 self.as_native_mut(),
263 start,
264 size,
265 )
266 };
267 status_result(status)
268 }
269
270 /// Append the contents of another `Parcel`.
271 pub fn append_all_from(&mut self, other: &Self) -> Result<()> {
272 self.append_from(other, 0, other.get_data_size())
273 }
Stephen Crane2a3c2502020-06-16 17:48:35 -0700274}
275
Stephen Craneaae76382020-08-03 14:12:15 -0700276/// A segment of a writable parcel, used for [`Parcel::sized_write`].
277pub struct WritableSubParcel<'a>(RefCell<&'a mut Parcel>);
278
279impl<'a> WritableSubParcel<'a> {
280 /// Write a type that implements [`Serialize`] to the sub-parcel.
281 pub fn write<S: Serialize + ?Sized>(&self, parcelable: &S) -> Result<()> {
282 parcelable.serialize(&mut *self.0.borrow_mut())
283 }
284}
285
Stephen Crane2a3c2502020-06-16 17:48:35 -0700286// Data deserialization methods
287impl Parcel {
288 /// Attempt to read a type that implements [`Deserialize`] from this
289 /// `Parcel`.
290 pub fn read<D: Deserialize>(&self) -> Result<D> {
291 D::deserialize(self)
292 }
293
Andrei Homescu50006152021-05-01 07:34:51 +0000294 /// Attempt to read a type that implements [`Deserialize`] from this
295 /// `Parcel` onto an existing value. This operation will overwrite the old
296 /// value partially or completely, depending on how much data is available.
297 pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> {
298 x.deserialize_from(self)
299 }
300
Andrei Homescub0487442021-05-12 07:16:16 +0000301 /// Safely read a sized parcelable.
302 ///
303 /// Read the size of a parcelable, compute the end position
304 /// of that parcelable, then build a sized readable sub-parcel
305 /// and call a closure with the sub-parcel as its parameter.
306 /// The closure can keep reading data from the sub-parcel
307 /// until it runs out of input data. The closure is responsible
308 /// for calling [`ReadableSubParcel::has_more_data`] to check for
309 /// more data before every read, at least until Rust generators
310 /// are stabilized.
311 /// After the closure returns, skip to the end of the current
312 /// parcelable regardless of how much the closure has read.
313 ///
314 /// # Examples
315 ///
316 /// ```no_run
317 /// let mut parcelable = Default::default();
318 /// parcel.sized_read(|subparcel| {
319 /// if subparcel.has_more_data() {
320 /// parcelable.a = subparcel.read()?;
321 /// }
322 /// if subparcel.has_more_data() {
323 /// parcelable.b = subparcel.read()?;
324 /// }
325 /// Ok(())
326 /// });
327 /// ```
328 ///
329 pub fn sized_read<F>(&self, mut f: F) -> Result<()>
330 where
331 for<'a> F: FnMut(ReadableSubParcel<'a>) -> Result<()>
332 {
333 let start = self.get_data_position();
334 let parcelable_size: i32 = self.read()?;
335 if parcelable_size < 0 {
336 return Err(StatusCode::BAD_VALUE);
337 }
338
339 let end = start.checked_add(parcelable_size)
340 .ok_or(StatusCode::BAD_VALUE)?;
341 if end > self.get_data_size() {
342 return Err(StatusCode::NOT_ENOUGH_DATA);
343 }
344
345 let subparcel = ReadableSubParcel {
346 parcel: self,
347 end_position: end,
348 };
349 f(subparcel)?;
350
351 // Advance the data position to the actual end,
352 // in case the closure read less data than was available
353 unsafe {
354 self.set_data_position(end)?;
355 }
356
357 Ok(())
358 }
359
Stephen Crane2a3c2502020-06-16 17:48:35 -0700360 /// Read a vector size from the `Parcel` and resize the given output vector
361 /// to be correctly sized for that amount of data.
362 ///
363 /// This method is used in AIDL-generated server side code for methods that
364 /// take a mutable slice reference parameter.
365 pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> {
366 let len: i32 = self.read()?;
367
368 if len < 0 {
369 return Err(StatusCode::UNEXPECTED_NULL);
370 }
371
372 // usize in Rust may be 16-bit, so i32 may not fit
373 let len = len.try_into().unwrap();
374 out_vec.resize_with(len, Default::default);
375
376 Ok(())
377 }
378
379 /// Read a vector size from the `Parcel` and either create a correctly sized
380 /// vector for that amount of data or set the output parameter to None if
381 /// the vector should be null.
382 ///
383 /// This method is used in AIDL-generated server side code for methods that
384 /// take a mutable slice reference parameter.
385 pub fn resize_nullable_out_vec<D: Default + Deserialize>(
386 &self,
387 out_vec: &mut Option<Vec<D>>,
388 ) -> Result<()> {
389 let len: i32 = self.read()?;
390
391 if len < 0 {
392 *out_vec = None;
393 } else {
394 // usize in Rust may be 16-bit, so i32 may not fit
395 let len = len.try_into().unwrap();
396 let mut vec = Vec::with_capacity(len);
397 vec.resize_with(len, Default::default);
398 *out_vec = Some(vec);
399 }
400
401 Ok(())
402 }
403}
404
Andrei Homescub0487442021-05-12 07:16:16 +0000405/// A segment of a readable parcel, used for [`Parcel::sized_read`].
406pub struct ReadableSubParcel<'a> {
407 parcel: &'a Parcel,
408 end_position: i32,
409}
410
411impl<'a> ReadableSubParcel<'a> {
412 /// Read a type that implements [`Deserialize`] from the sub-parcel.
413 pub fn read<D: Deserialize>(&self) -> Result<D> {
414 // The caller should have checked this,
415 // but it can't hurt to double-check
416 assert!(self.has_more_data());
417 D::deserialize(self.parcel)
418 }
419
420 /// Check if the sub-parcel has more data to read
421 pub fn has_more_data(&self) -> bool {
422 self.parcel.get_data_position() < self.end_position
423 }
424}
425
Stephen Crane2a3c2502020-06-16 17:48:35 -0700426// Internal APIs
427impl Parcel {
428 pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> {
429 unsafe {
430 // Safety: `Parcel` always contains a valid pointer to an
431 // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return
432 // null or a valid pointer to an `AIBinder`, both of which are
433 // valid, safe inputs to `AParcel_writeStrongBinder`.
434 //
435 // This call does not take ownership of the binder. However, it does
436 // require a mutable pointer, which we cannot extract from an
437 // immutable reference, so we clone the binder, incrementing the
438 // refcount before the call. The refcount will be immediately
439 // decremented when this temporary is dropped.
440 status_result(sys::AParcel_writeStrongBinder(
441 self.as_native_mut(),
442 binder.cloned().as_native_mut(),
443 ))
444 }
445 }
446
447 pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> {
448 let mut binder = ptr::null_mut();
449 let status = unsafe {
450 // Safety: `Parcel` always contains a valid pointer to an
451 // `AParcel`. We pass a valid, mutable out pointer to the `binder`
452 // parameter. After this call, `binder` will be either null or a
453 // valid pointer to an `AIBinder` owned by the caller.
454 sys::AParcel_readStrongBinder(self.as_native(), &mut binder)
455 };
456
457 status_result(status)?;
458
459 Ok(unsafe {
460 // Safety: `binder` is either null or a valid, owned pointer at this
461 // point, so can be safely passed to `SpIBinder::from_raw`.
462 SpIBinder::from_raw(binder)
463 })
464 }
465}
466
467impl Drop for Parcel {
468 fn drop(&mut self) {
469 // Run the C++ Parcel complete object destructor
470 if let Self::Owned(ptr) = *self {
471 unsafe {
472 // Safety: `Parcel` always contains a valid pointer to an
473 // `AParcel`. If we own the parcel, we can safely delete it
474 // here.
475 sys::AParcel_delete(ptr)
476 }
477 }
478 }
479}
480
Alice Ryhlfeba6ca2021-08-19 10:47:04 +0000481impl fmt::Debug for Parcel {
482 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
483 f.debug_struct("Parcel")
484 .finish()
485 }
486}
487
Stephen Crane2a3c2502020-06-16 17:48:35 -0700488#[test]
489fn test_read_write() {
Andrei Homescu72b799d2021-09-04 01:39:23 +0000490 let mut parcel = Parcel::new();
Stephen Crane2a3c2502020-06-16 17:48:35 -0700491 let start = parcel.get_data_position();
492
493 assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA));
494 assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA));
495 assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA));
496 assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA));
497 assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA));
498 assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA));
499 assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA));
500 assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA));
501 assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA));
Stephen Crane76072e82020-08-03 13:09:36 -0700502 assert_eq!(parcel.read::<Option<String>>(), Ok(None));
Stephen Crane2a3c2502020-06-16 17:48:35 -0700503 assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL));
504
505 assert_eq!(parcel.read_binder().err(), Some(StatusCode::BAD_TYPE));
506
507 parcel.write(&1i32).unwrap();
508
509 unsafe {
510 parcel.set_data_position(start).unwrap();
511 }
512
513 let i: i32 = parcel.read().unwrap();
514 assert_eq!(i, 1i32);
515}
516
517#[test]
518#[allow(clippy::float_cmp)]
519fn test_read_data() {
Andrei Homescu72b799d2021-09-04 01:39:23 +0000520 let mut parcel = Parcel::new();
Stephen Crane2a3c2502020-06-16 17:48:35 -0700521 let str_start = parcel.get_data_position();
522
523 parcel.write(&b"Hello, Binder!\0"[..]).unwrap();
524 // Skip over string length
525 unsafe {
526 assert!(parcel.set_data_position(str_start).is_ok());
527 }
528 assert_eq!(parcel.read::<i32>().unwrap(), 15);
529 let start = parcel.get_data_position();
530
Chris Wailes45fd2942021-07-26 19:18:41 -0700531 assert!(parcel.read::<bool>().unwrap());
Stephen Crane2a3c2502020-06-16 17:48:35 -0700532
533 unsafe {
534 assert!(parcel.set_data_position(start).is_ok());
535 }
536
537 assert_eq!(parcel.read::<i8>().unwrap(), 72i8);
538
539 unsafe {
540 assert!(parcel.set_data_position(start).is_ok());
541 }
542
543 assert_eq!(parcel.read::<u16>().unwrap(), 25928);
544
545 unsafe {
546 assert!(parcel.set_data_position(start).is_ok());
547 }
548
549 assert_eq!(parcel.read::<i32>().unwrap(), 1819043144);
550
551 unsafe {
552 assert!(parcel.set_data_position(start).is_ok());
553 }
554
555 assert_eq!(parcel.read::<u32>().unwrap(), 1819043144);
556
557 unsafe {
558 assert!(parcel.set_data_position(start).is_ok());
559 }
560
561 assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912);
562
563 unsafe {
564 assert!(parcel.set_data_position(start).is_ok());
565 }
566
567 assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912);
568
569 unsafe {
570 assert!(parcel.set_data_position(start).is_ok());
571 }
572
573 assert_eq!(
574 parcel.read::<f32>().unwrap(),
575 1143139100000000000000000000.0
576 );
577 assert_eq!(parcel.read::<f32>().unwrap(), 40.043392);
578
579 unsafe {
580 assert!(parcel.set_data_position(start).is_ok());
581 }
582
583 assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815);
584
585 // Skip back to before the string length
586 unsafe {
587 assert!(parcel.set_data_position(str_start).is_ok());
588 }
589
590 assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0");
591}
592
593#[test]
594fn test_utf8_utf16_conversions() {
Andrei Homescu72b799d2021-09-04 01:39:23 +0000595 let mut parcel = Parcel::new();
Stephen Crane2a3c2502020-06-16 17:48:35 -0700596 let start = parcel.get_data_position();
597
598 assert!(parcel.write("Hello, Binder!").is_ok());
599 unsafe {
600 assert!(parcel.set_data_position(start).is_ok());
601 }
602 assert_eq!(
603 parcel.read::<Option<String>>().unwrap().unwrap(),
Stephen Crane76072e82020-08-03 13:09:36 -0700604 "Hello, Binder!",
Stephen Crane2a3c2502020-06-16 17:48:35 -0700605 );
606 unsafe {
607 assert!(parcel.set_data_position(start).is_ok());
608 }
Stephen Crane76072e82020-08-03 13:09:36 -0700609
610 assert!(parcel.write("Embedded null \0 inside a string").is_ok());
611 unsafe {
612 assert!(parcel.set_data_position(start).is_ok());
613 }
614 assert_eq!(
615 parcel.read::<Option<String>>().unwrap().unwrap(),
616 "Embedded null \0 inside a string",
617 );
618 unsafe {
619 assert!(parcel.set_data_position(start).is_ok());
620 }
621
Stephen Crane2a3c2502020-06-16 17:48:35 -0700622 assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok());
623 assert!(parcel
624 .write(
625 &[
626 String::from("str4"),
627 String::from("str5"),
628 String::from("str6"),
629 ][..]
630 )
631 .is_ok());
632
633 let s1 = "Hello, Binder!";
634 let s2 = "This is a utf8 string.";
635 let s3 = "Some more text here.";
636
637 assert!(parcel.write(&[s1, s2, s3][..]).is_ok());
638 unsafe {
639 assert!(parcel.set_data_position(start).is_ok());
640 }
641
642 assert_eq!(
643 parcel.read::<Vec<String>>().unwrap(),
644 ["str1", "str2", "str3"]
645 );
646 assert_eq!(
647 parcel.read::<Vec<String>>().unwrap(),
648 ["str4", "str5", "str6"]
649 );
650 assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]);
651}
Stephen Craneaae76382020-08-03 14:12:15 -0700652
653#[test]
654fn test_sized_write() {
Andrei Homescu72b799d2021-09-04 01:39:23 +0000655 let mut parcel = Parcel::new();
Stephen Craneaae76382020-08-03 14:12:15 -0700656 let start = parcel.get_data_position();
657
658 let arr = [1i32, 2i32, 3i32];
659
660 parcel.sized_write(|subparcel| {
661 subparcel.write(&arr[..])
662 }).expect("Could not perform sized write");
663
664 // i32 sub-parcel length + i32 array length + 3 i32 elements
665 let expected_len = 20i32;
666
667 assert_eq!(parcel.get_data_position(), start + expected_len);
668
669 unsafe {
670 parcel.set_data_position(start).unwrap();
671 }
672
673 assert_eq!(
674 expected_len,
675 parcel.read().unwrap(),
676 );
677
678 assert_eq!(
679 parcel.read::<Vec<i32>>().unwrap(),
680 &arr,
681 );
682}
Andrei Homescu72b799d2021-09-04 01:39:23 +0000683
684#[test]
685fn test_append_from() {
686 let mut parcel1 = Parcel::new();
687 parcel1.write(&42i32).expect("Could not perform write");
688
689 let mut parcel2 = Parcel::new();
690 assert_eq!(Ok(()), parcel2.append_all_from(&parcel1));
691 assert_eq!(4, parcel2.get_data_size());
692 assert_eq!(Ok(()), parcel2.append_all_from(&parcel1));
693 assert_eq!(8, parcel2.get_data_size());
694 unsafe {
695 parcel2.set_data_position(0).unwrap();
696 }
697 assert_eq!(Ok(42), parcel2.read::<i32>());
698 assert_eq!(Ok(42), parcel2.read::<i32>());
699
700 let mut parcel2 = Parcel::new();
701 assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2));
702 assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2));
703 assert_eq!(4, parcel2.get_data_size());
704 unsafe {
705 parcel2.set_data_position(0).unwrap();
706 }
707 assert_eq!(Ok(42), parcel2.read::<i32>());
708
709 let mut parcel2 = Parcel::new();
710 assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2));
711 assert_eq!(2, parcel2.get_data_size());
712 unsafe {
713 parcel2.set_data_position(0).unwrap();
714 }
715 assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>());
716
717 let mut parcel2 = Parcel::new();
718 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2));
719 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4));
720 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4));
721 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1));
722}