Add randomized padding around shared library mappings.
Improve ASLR by increasing the randomly sized gaps between shared
library mappings, and keep them mapped PROT_NONE.
Bug: 158113540
Test: look at /proc/$$/maps
Change-Id: Ie72c84047fb624fe2ac8b7744b2a2d0d255ea974
diff --git a/linker/linker.cpp b/linker/linker.cpp
index edf0329..9301f8c 100644
--- a/linker/linker.cpp
+++ b/linker/linker.cpp
@@ -301,13 +301,14 @@
return;
}
- if (si->base != 0 && si->size != 0) {
+ void* start = reinterpret_cast<void*>(si->has_min_version(6) ? si->get_map_start() : si->base);
+ size_t size = si->has_min_version(6) ? si->get_map_size() : si->size;
+ if (start != nullptr && size != 0) {
if (!si->is_mapped_by_caller()) {
- munmap(reinterpret_cast<void*>(si->base), si->size);
+ munmap(start, size);
} else {
// remap the region as PROT_NONE, MAP_ANONYMOUS | MAP_NORESERVE
- mmap(reinterpret_cast<void*>(si->base), si->size, PROT_NONE,
- MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
+ mmap(start, size, PROT_NONE, MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
}
}
@@ -599,6 +600,8 @@
si_->load_bias = elf_reader.load_bias();
si_->phnum = elf_reader.phdr_count();
si_->phdr = elf_reader.loaded_phdr();
+ si_->set_map_start(elf_reader.map_start());
+ si_->set_map_size(elf_reader.map_size());
return true;
}
diff --git a/linker/linker_phdr.cpp b/linker/linker_phdr.cpp
index 9b7a461..74d7a31 100644
--- a/linker/linker_phdr.cpp
+++ b/linker/linker_phdr.cpp
@@ -520,7 +520,8 @@
// Reserve a virtual address range such that if it's limits were extended to the next 2**align
// boundary, it would not overlap with any existing mappings.
-static void* ReserveAligned(size_t size, size_t align) {
+static void* ReserveWithAlignmentPadding(size_t size, size_t align, void** map_start,
+ size_t* map_size) {
int mmap_flags = MAP_PRIVATE | MAP_ANONYMOUS;
if (align == PAGE_SIZE) {
void* mmap_ptr = mmap(nullptr, size, PROT_NONE, mmap_flags, -1, 0);
@@ -532,7 +533,12 @@
// Allocate enough space so that the end of the desired region aligned up is still inside the
// mapping.
- size_t mmap_size = align_up(size, align) + align - PAGE_SIZE;
+#if defined(__LP64__)
+ constexpr size_t kLibraryPadding = 1ul << 24;
+#else
+ constexpr size_t kLibraryPadding = 0;
+#endif
+ size_t mmap_size = align_up(size + kLibraryPadding, align) + align - PAGE_SIZE;
uint8_t* mmap_ptr =
reinterpret_cast<uint8_t*>(mmap(nullptr, mmap_size, PROT_NONE, mmap_flags, -1, 0));
if (mmap_ptr == MAP_FAILED) {
@@ -546,8 +552,18 @@
// created. Don't randomize then.
size_t n = is_first_stage_init() ? 0 : arc4random_uniform((last - first) / PAGE_SIZE + 1);
uint8_t* start = first + n * PAGE_SIZE;
+ // Unmap the extra space around the allocation.
+ // Keep it mapped PROT_NONE on 64-bit targets where address space is plentiful to make it harder
+ // to defeat ASLR by probing for readable memory mappings.
+#if defined(__LP64__)
+ *map_start = mmap_ptr;
+ *map_size = mmap_size;
+#else
munmap(mmap_ptr, start - mmap_ptr);
munmap(start + size, mmap_ptr + mmap_size - (start + size));
+ *map_start = start;
+ *map_size = size;
+#endif
return start;
}
@@ -571,13 +587,15 @@
load_size_ - address_space->reserved_size, load_size_, name_.c_str());
return false;
}
- start = ReserveAligned(load_size_, kLibraryAlignment);
+ start = ReserveWithAlignmentPadding(load_size_, kLibraryAlignment, &map_start_, &map_size_);
if (start == nullptr) {
DL_ERR("couldn't reserve %zd bytes of address space for \"%s\"", load_size_, name_.c_str());
return false;
}
} else {
start = address_space->start_addr;
+ map_start_ = start;
+ map_size_ = load_size_;
mapped_by_caller_ = true;
// Update the reserved address space to subtract the space used by this library.
diff --git a/linker/linker_phdr.h b/linker/linker_phdr.h
index 5d1cfc2..18881ca 100644
--- a/linker/linker_phdr.h
+++ b/linker/linker_phdr.h
@@ -49,6 +49,8 @@
size_t phdr_count() const { return phdr_num_; }
ElfW(Addr) load_start() const { return reinterpret_cast<ElfW(Addr)>(load_start_); }
size_t load_size() const { return load_size_; }
+ ElfW(Addr) map_start() const { return reinterpret_cast<ElfW(Addr)>(map_start_); }
+ size_t map_size() const { return map_size_; }
ElfW(Addr) load_bias() const { return load_bias_; }
const ElfW(Phdr)* loaded_phdr() const { return loaded_phdr_; }
const ElfW(Dyn)* dynamic() const { return dynamic_; }
@@ -96,6 +98,10 @@
void* load_start_;
// Size in bytes of reserved address space.
size_t load_size_;
+ // First page of reserved address space including randomized padding.
+ void* map_start_;
+ // Size in bytes of reserved address space including randomized padding.
+ size_t map_size_;
// Load bias.
ElfW(Addr) load_bias_;
diff --git a/linker/linker_soinfo.cpp b/linker/linker_soinfo.cpp
index 4f67003..088c17d 100644
--- a/linker/linker_soinfo.cpp
+++ b/linker/linker_soinfo.cpp
@@ -900,6 +900,24 @@
g_soinfo_handles_map[handle_] = this;
}
+void soinfo::set_map_start(ElfW(Addr) map_start) {
+ CHECK(has_min_version(6));
+ map_start_ = map_start;
+}
+ElfW(Addr) soinfo::get_map_start() const {
+ CHECK(has_min_version(6));
+ return map_start_;
+}
+
+void soinfo::set_map_size(size_t map_size) {
+ CHECK(has_min_version(6));
+ map_size_ = map_size;
+}
+size_t soinfo::get_map_size() const {
+ CHECK(has_min_version(6));
+ return map_size_;
+}
+
// TODO(dimitry): Move SymbolName methods to a separate file.
uint32_t calculate_elf_hash(const char* name) {
diff --git a/linker/linker_soinfo.h b/linker/linker_soinfo.h
index e1a3c30..0ecf8d8 100644
--- a/linker/linker_soinfo.h
+++ b/linker/linker_soinfo.h
@@ -66,7 +66,7 @@
#define FLAG_PRELINKED 0x00000400 // prelink_image has successfully processed this soinfo
#define FLAG_NEW_SOINFO 0x40000000 // new soinfo format
-#define SOINFO_VERSION 5
+#define SOINFO_VERSION 6
ElfW(Addr) call_ifunc_resolver(ElfW(Addr) resolver_addr);
@@ -345,6 +345,12 @@
SymbolLookupLib get_lookup_lib();
+ void set_map_start(ElfW(Addr) map_start);
+ ElfW(Addr) get_map_start() const;
+
+ void set_map_size(size_t map_size);
+ size_t get_map_size() const;
+
private:
bool is_image_linked() const;
void set_image_linked();
@@ -423,6 +429,10 @@
// version >= 5
std::unique_ptr<soinfo_tls> tls_;
std::vector<TlsDynamicResolverArg> tlsdesc_args_;
+
+ // version >= 6
+ ElfW(Addr) map_start_;
+ size_t map_size_;
};
// This function is used by dlvsym() to calculate hash of sym_ver