Update layout/PageTable memory range to Range<VirtualAddress>
This cl updates the return type of the layout functions and the
parameter type in PageTable memory mapping functions from
Range<usize> to Range<VirtualAddress>. This makes it explicit
that the ranges used here are virtual memory ranges.
Test: atest vmbase_example.integration_test rialto_test
Test: m pvmfw_img
Bug: 284462758
Change-Id: I19d4859a03edffedb00ab2831f43929befcb98d8
diff --git a/pvmfw/src/entry.rs b/pvmfw/src/entry.rs
index 6f96fc0..3d2fea8 100644
--- a/pvmfw/src/entry.rs
+++ b/pvmfw/src/entry.rs
@@ -278,19 +278,19 @@
let scratch = layout::scratch_range();
- assert_ne!(scratch.len(), 0, "scratch memory is empty.");
- assert_eq!(scratch.start % ASM_STP_ALIGN, 0, "scratch memory is misaligned.");
- assert_eq!(scratch.end % ASM_STP_ALIGN, 0, "scratch memory is misaligned.");
+ assert_ne!(scratch.end - scratch.start, 0, "scratch memory is empty.");
+ assert_eq!(scratch.start.0 % ASM_STP_ALIGN, 0, "scratch memory is misaligned.");
+ assert_eq!(scratch.end.0 % ASM_STP_ALIGN, 0, "scratch memory is misaligned.");
- assert!(bcc.is_within(&scratch));
+ assert!(bcc.is_within(&(scratch.start.0..scratch.end.0)));
assert_eq!(bcc.start % ASM_STP_ALIGN, 0, "Misaligned guest BCC.");
assert_eq!(bcc.end % ASM_STP_ALIGN, 0, "Misaligned guest BCC.");
let stack = memory::stack_range();
- assert_ne!(stack.len(), 0, "stack region is empty.");
- assert_eq!(stack.start % ASM_STP_ALIGN, 0, "Misaligned stack region.");
- assert_eq!(stack.end % ASM_STP_ALIGN, 0, "Misaligned stack region.");
+ assert_ne!(stack.end - stack.start, 0, "stack region is empty.");
+ assert_eq!(stack.start.0 % ASM_STP_ALIGN, 0, "Misaligned stack region.");
+ assert_eq!(stack.end.0 % ASM_STP_ALIGN, 0, "Misaligned stack region.");
// Zero all memory that could hold secrets and that can't be safely written to from Rust.
// Disable the exception vector, caches and page table and then jump to the payload at the
@@ -375,11 +375,11 @@
sctlr_el1_val = in(reg) SCTLR_EL1_VAL,
bcc = in(reg) u64::try_from(bcc.start).unwrap(),
bcc_end = in(reg) u64::try_from(bcc.end).unwrap(),
- cache_line = in(reg) u64::try_from(scratch.start).unwrap(),
- scratch = in(reg) u64::try_from(scratch.start).unwrap(),
- scratch_end = in(reg) u64::try_from(scratch.end).unwrap(),
- stack = in(reg) u64::try_from(stack.start).unwrap(),
- stack_end = in(reg) u64::try_from(stack.end).unwrap(),
+ cache_line = in(reg) u64::try_from(scratch.start.0).unwrap(),
+ scratch = in(reg) u64::try_from(scratch.start.0).unwrap(),
+ scratch_end = in(reg) u64::try_from(scratch.end.0).unwrap(),
+ stack = in(reg) u64::try_from(stack.start.0).unwrap(),
+ stack_end = in(reg) u64::try_from(stack.end.0).unwrap(),
dcache_line_size = in(reg) u64::try_from(min_dcache_line_size()).unwrap(),
in("x0") fdt_address,
in("x30") payload_start,
@@ -396,7 +396,7 @@
let range = memory::appended_payload_range();
// SAFETY: This region is mapped and the linker script prevents it from overlapping with other
// objects.
- unsafe { slice::from_raw_parts_mut(range.start as *mut u8, range.len()) }
+ unsafe { slice::from_raw_parts_mut(range.start.0 as *mut u8, range.end - range.start) }
}
enum AppendedConfigType {