Revert "Add randomized padding around shared library mappings."

This reverts commit a8cf3fef2a.

Reason for revert: memory regression due to the fragmentation of the page tables
Bug: 159810641
Bug: 158113540

Change-Id: I6212c623ff440c7f6889f0a1e82cf7a96200a411
This commit is contained in:
Evgenii Stepanov 2020-07-06 19:25:43 +00:00
parent a8cf3fef2a
commit 474f2f5c8b
5 changed files with 8 additions and 63 deletions

View file

@ -301,14 +301,13 @@ static void soinfo_free(soinfo* si) {
return; return;
} }
void* start = reinterpret_cast<void*>(si->has_min_version(6) ? si->get_map_start() : si->base); if (si->base != 0 && si->size != 0) {
size_t size = si->has_min_version(6) ? si->get_map_size() : si->size;
if (start != nullptr && size != 0) {
if (!si->is_mapped_by_caller()) { if (!si->is_mapped_by_caller()) {
munmap(start, size); munmap(reinterpret_cast<void*>(si->base), si->size);
} else { } else {
// remap the region as PROT_NONE, MAP_ANONYMOUS | MAP_NORESERVE // remap the region as PROT_NONE, MAP_ANONYMOUS | MAP_NORESERVE
mmap(start, size, PROT_NONE, MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0); mmap(reinterpret_cast<void*>(si->base), si->size, PROT_NONE,
MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
} }
} }
@ -600,8 +599,6 @@ class LoadTask {
si_->load_bias = elf_reader.load_bias(); si_->load_bias = elf_reader.load_bias();
si_->phnum = elf_reader.phdr_count(); si_->phnum = elf_reader.phdr_count();
si_->phdr = elf_reader.loaded_phdr(); si_->phdr = elf_reader.loaded_phdr();
si_->set_map_start(elf_reader.map_start());
si_->set_map_size(elf_reader.map_size());
return true; return true;
} }

View file

@ -520,8 +520,7 @@ size_t phdr_table_get_load_size(const ElfW(Phdr)* phdr_table, size_t phdr_count,
// Reserve a virtual address range such that if it's limits were extended to the next 2**align // Reserve a virtual address range such that if it's limits were extended to the next 2**align
// boundary, it would not overlap with any existing mappings. // boundary, it would not overlap with any existing mappings.
static void* ReserveWithAlignmentPadding(size_t size, size_t align, void** map_start, static void* ReserveAligned(size_t size, size_t align) {
size_t* map_size) {
int mmap_flags = MAP_PRIVATE | MAP_ANONYMOUS; int mmap_flags = MAP_PRIVATE | MAP_ANONYMOUS;
if (align == PAGE_SIZE) { if (align == PAGE_SIZE) {
void* mmap_ptr = mmap(nullptr, size, PROT_NONE, mmap_flags, -1, 0); void* mmap_ptr = mmap(nullptr, size, PROT_NONE, mmap_flags, -1, 0);
@ -533,12 +532,7 @@ static void* ReserveWithAlignmentPadding(size_t size, size_t align, void** map_s
// Allocate enough space so that the end of the desired region aligned up is still inside the // Allocate enough space so that the end of the desired region aligned up is still inside the
// mapping. // mapping.
#if defined(__LP64__) size_t mmap_size = align_up(size, align) + align - PAGE_SIZE;
constexpr size_t kLibraryPadding = 1ul << 24;
#else
constexpr size_t kLibraryPadding = 0;
#endif
size_t mmap_size = align_up(size + kLibraryPadding, align) + align - PAGE_SIZE;
uint8_t* mmap_ptr = uint8_t* mmap_ptr =
reinterpret_cast<uint8_t*>(mmap(nullptr, mmap_size, PROT_NONE, mmap_flags, -1, 0)); reinterpret_cast<uint8_t*>(mmap(nullptr, mmap_size, PROT_NONE, mmap_flags, -1, 0));
if (mmap_ptr == MAP_FAILED) { if (mmap_ptr == MAP_FAILED) {
@ -552,18 +546,8 @@ static void* ReserveWithAlignmentPadding(size_t size, size_t align, void** map_s
// created. Don't randomize then. // created. Don't randomize then.
size_t n = is_first_stage_init() ? 0 : arc4random_uniform((last - first) / PAGE_SIZE + 1); size_t n = is_first_stage_init() ? 0 : arc4random_uniform((last - first) / PAGE_SIZE + 1);
uint8_t* start = first + n * PAGE_SIZE; uint8_t* start = first + n * PAGE_SIZE;
// Unmap the extra space around the allocation.
// Keep it mapped PROT_NONE on 64-bit targets where address space is plentiful to make it harder
// to defeat ASLR by probing for readable memory mappings.
#if defined(__LP64__)
*map_start = mmap_ptr;
*map_size = mmap_size;
#else
munmap(mmap_ptr, start - mmap_ptr); munmap(mmap_ptr, start - mmap_ptr);
munmap(start + size, mmap_ptr + mmap_size - (start + size)); munmap(start + size, mmap_ptr + mmap_size - (start + size));
*map_start = start;
*map_size = size;
#endif
return start; return start;
} }
@ -587,15 +571,13 @@ bool ElfReader::ReserveAddressSpace(address_space_params* address_space) {
load_size_ - address_space->reserved_size, load_size_, name_.c_str()); load_size_ - address_space->reserved_size, load_size_, name_.c_str());
return false; return false;
} }
start = ReserveWithAlignmentPadding(load_size_, kLibraryAlignment, &map_start_, &map_size_); start = ReserveAligned(load_size_, kLibraryAlignment);
if (start == nullptr) { if (start == nullptr) {
DL_ERR("couldn't reserve %zd bytes of address space for \"%s\"", load_size_, name_.c_str()); DL_ERR("couldn't reserve %zd bytes of address space for \"%s\"", load_size_, name_.c_str());
return false; return false;
} }
} else { } else {
start = address_space->start_addr; start = address_space->start_addr;
map_start_ = start;
map_size_ = load_size_;
mapped_by_caller_ = true; mapped_by_caller_ = true;
// Update the reserved address space to subtract the space used by this library. // Update the reserved address space to subtract the space used by this library.

View file

@ -49,8 +49,6 @@ class ElfReader {
size_t phdr_count() const { return phdr_num_; } size_t phdr_count() const { return phdr_num_; }
ElfW(Addr) load_start() const { return reinterpret_cast<ElfW(Addr)>(load_start_); } ElfW(Addr) load_start() const { return reinterpret_cast<ElfW(Addr)>(load_start_); }
size_t load_size() const { return load_size_; } size_t load_size() const { return load_size_; }
ElfW(Addr) map_start() const { return reinterpret_cast<ElfW(Addr)>(map_start_); }
size_t map_size() const { return map_size_; }
ElfW(Addr) load_bias() const { return load_bias_; } ElfW(Addr) load_bias() const { return load_bias_; }
const ElfW(Phdr)* loaded_phdr() const { return loaded_phdr_; } const ElfW(Phdr)* loaded_phdr() const { return loaded_phdr_; }
const ElfW(Dyn)* dynamic() const { return dynamic_; } const ElfW(Dyn)* dynamic() const { return dynamic_; }
@ -98,10 +96,6 @@ class ElfReader {
void* load_start_; void* load_start_;
// Size in bytes of reserved address space. // Size in bytes of reserved address space.
size_t load_size_; size_t load_size_;
// First page of reserved address space including randomized padding.
void* map_start_;
// Size in bytes of reserved address space including randomized padding.
size_t map_size_;
// Load bias. // Load bias.
ElfW(Addr) load_bias_; ElfW(Addr) load_bias_;

View file

@ -900,24 +900,6 @@ void soinfo::generate_handle() {
g_soinfo_handles_map[handle_] = this; g_soinfo_handles_map[handle_] = this;
} }
void soinfo::set_map_start(ElfW(Addr) map_start) {
CHECK(has_min_version(6));
map_start_ = map_start;
}
ElfW(Addr) soinfo::get_map_start() const {
CHECK(has_min_version(6));
return map_start_;
}
void soinfo::set_map_size(size_t map_size) {
CHECK(has_min_version(6));
map_size_ = map_size;
}
size_t soinfo::get_map_size() const {
CHECK(has_min_version(6));
return map_size_;
}
// TODO(dimitry): Move SymbolName methods to a separate file. // TODO(dimitry): Move SymbolName methods to a separate file.
uint32_t calculate_elf_hash(const char* name) { uint32_t calculate_elf_hash(const char* name) {

View file

@ -66,7 +66,7 @@
#define FLAG_PRELINKED 0x00000400 // prelink_image has successfully processed this soinfo #define FLAG_PRELINKED 0x00000400 // prelink_image has successfully processed this soinfo
#define FLAG_NEW_SOINFO 0x40000000 // new soinfo format #define FLAG_NEW_SOINFO 0x40000000 // new soinfo format
#define SOINFO_VERSION 6 #define SOINFO_VERSION 5
ElfW(Addr) call_ifunc_resolver(ElfW(Addr) resolver_addr); ElfW(Addr) call_ifunc_resolver(ElfW(Addr) resolver_addr);
@ -345,12 +345,6 @@ struct soinfo {
SymbolLookupLib get_lookup_lib(); SymbolLookupLib get_lookup_lib();
void set_map_start(ElfW(Addr) map_start);
ElfW(Addr) get_map_start() const;
void set_map_size(size_t map_size);
size_t get_map_size() const;
private: private:
bool is_image_linked() const; bool is_image_linked() const;
void set_image_linked(); void set_image_linked();
@ -429,10 +423,6 @@ struct soinfo {
// version >= 5 // version >= 5
std::unique_ptr<soinfo_tls> tls_; std::unique_ptr<soinfo_tls> tls_;
std::vector<TlsDynamicResolverArg> tlsdesc_args_; std::vector<TlsDynamicResolverArg> tlsdesc_args_;
// version >= 6
ElfW(Addr) map_start_;
size_t map_size_;
}; };
// This function is used by dlvsym() to calculate hash of sym_ver // This function is used by dlvsym() to calculate hash of sym_ver