blob: 0086c9edb27d3f85da6fc2975a24b39d181d39ea [file] [log] [blame]
Christopher Ferris0b79ae12018-01-25 12:15:56 -08001/*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <elf.h>
18#include <unistd.h>
19
20#include <android-base/file.h>
21#include <android-base/test_utils.h>
22
23#include <gtest/gtest.h>
24
25#include <unwindstack/Elf.h>
26#include <unwindstack/MapInfo.h>
27
28#include "ElfTestUtils.h"
29#include "MemoryFake.h"
30
31namespace unwindstack {
32
33class ElfCacheTest : public ::testing::Test {
34 protected:
35 static void SetUpTestCase() { memory_.reset(new MemoryFake); }
36
37 void SetUp() override { Elf::SetCachingEnabled(true); }
38
39 void TearDown() override { Elf::SetCachingEnabled(false); }
40
41 void WriteElfFile(uint64_t offset, TemporaryFile* tf, uint32_t type) {
42 ASSERT_TRUE(type == EM_ARM || type == EM_386 || type == EM_X86_64);
43 size_t ehdr_size;
44 Elf32_Ehdr ehdr32;
45 Elf64_Ehdr ehdr64;
46 void* ptr;
47 if (type == EM_ARM || type == EM_386) {
48 ehdr_size = sizeof(ehdr32);
49 ptr = &ehdr32;
50 TestInitEhdr(&ehdr32, ELFCLASS32, type);
51 } else {
52 ehdr_size = sizeof(ehdr64);
53 ptr = &ehdr64;
54 TestInitEhdr(&ehdr64, ELFCLASS64, type);
55 }
56
57 ASSERT_EQ(offset, static_cast<uint64_t>(lseek(tf->fd, offset, SEEK_SET)));
58 ASSERT_TRUE(android::base::WriteFully(tf->fd, ptr, ehdr_size));
59 }
60
61 void VerifyWithinSameMap(bool cache_enabled);
62 void VerifySameMap(bool cache_enabled);
63
64 static std::shared_ptr<Memory> memory_;
65};
66
67std::shared_ptr<Memory> ElfCacheTest::memory_;
68
69void ElfCacheTest::VerifySameMap(bool cache_enabled) {
70 if (!cache_enabled) {
71 Elf::SetCachingEnabled(false);
72 }
73
74 TemporaryFile tf;
75 ASSERT_TRUE(tf.fd != -1);
76 WriteElfFile(0, &tf, EM_ARM);
77 close(tf.fd);
78
79 uint64_t start = 0x1000;
80 uint64_t end = 0x20000;
81 MapInfo info1(start, end, 0, 0x5, tf.path);
82 MapInfo info2(start, end, 0, 0x5, tf.path);
83
84 Elf* elf1 = info1.GetElf(memory_, true);
85 ASSERT_TRUE(elf1->valid());
86 Elf* elf2 = info2.GetElf(memory_, true);
87 ASSERT_TRUE(elf2->valid());
88
89 if (cache_enabled) {
90 EXPECT_EQ(elf1, elf2);
91 } else {
92 EXPECT_NE(elf1, elf2);
93 }
94}
95
96TEST_F(ElfCacheTest, no_caching) {
97 VerifySameMap(false);
98}
99
100TEST_F(ElfCacheTest, caching_invalid_elf) {
101 VerifySameMap(true);
102}
103
104void ElfCacheTest::VerifyWithinSameMap(bool cache_enabled) {
105 if (!cache_enabled) {
106 Elf::SetCachingEnabled(false);
107 }
108
109 TemporaryFile tf;
110 ASSERT_TRUE(tf.fd != -1);
111 WriteElfFile(0, &tf, EM_ARM);
112 WriteElfFile(0x100, &tf, EM_386);
113 WriteElfFile(0x200, &tf, EM_X86_64);
114 lseek(tf.fd, 0x500, SEEK_SET);
115 uint8_t value = 0;
116 write(tf.fd, &value, 1);
117 close(tf.fd);
118
119 uint64_t start = 0x1000;
120 uint64_t end = 0x20000;
121 // Will have an elf at offset 0 in file.
122 MapInfo info0_1(start, end, 0, 0x5, tf.path);
123 MapInfo info0_2(start, end, 0, 0x5, tf.path);
124 // Will have an elf at offset 0x100 in file.
125 MapInfo info100_1(start, end, 0x100, 0x5, tf.path);
126 MapInfo info100_2(start, end, 0x100, 0x5, tf.path);
127 // Will have an elf at offset 0x200 in file.
128 MapInfo info200_1(start, end, 0x200, 0x5, tf.path);
129 MapInfo info200_2(start, end, 0x200, 0x5, tf.path);
130 // Will have an elf at offset 0 in file.
131 MapInfo info300_1(start, end, 0x300, 0x5, tf.path);
132 MapInfo info300_2(start, end, 0x300, 0x5, tf.path);
133
134 Elf* elf0_1 = info0_1.GetElf(memory_, true);
135 ASSERT_TRUE(elf0_1->valid());
136 EXPECT_EQ(ARCH_ARM, elf0_1->arch());
137 Elf* elf0_2 = info0_2.GetElf(memory_, true);
138 ASSERT_TRUE(elf0_2->valid());
139 EXPECT_EQ(ARCH_ARM, elf0_2->arch());
140 EXPECT_EQ(0U, info0_1.elf_offset);
141 EXPECT_EQ(0U, info0_2.elf_offset);
142 if (cache_enabled) {
143 EXPECT_EQ(elf0_1, elf0_2);
144 } else {
145 EXPECT_NE(elf0_1, elf0_2);
146 }
147
148 Elf* elf100_1 = info100_1.GetElf(memory_, true);
149 ASSERT_TRUE(elf100_1->valid());
150 EXPECT_EQ(ARCH_X86, elf100_1->arch());
151 Elf* elf100_2 = info100_2.GetElf(memory_, true);
152 ASSERT_TRUE(elf100_2->valid());
153 EXPECT_EQ(ARCH_X86, elf100_2->arch());
154 EXPECT_EQ(0U, info100_1.elf_offset);
155 EXPECT_EQ(0U, info100_2.elf_offset);
156 if (cache_enabled) {
157 EXPECT_EQ(elf100_1, elf100_2);
158 } else {
159 EXPECT_NE(elf100_1, elf100_2);
160 }
161
162 Elf* elf200_1 = info200_1.GetElf(memory_, true);
163 ASSERT_TRUE(elf200_1->valid());
164 EXPECT_EQ(ARCH_X86_64, elf200_1->arch());
165 Elf* elf200_2 = info200_2.GetElf(memory_, true);
166 ASSERT_TRUE(elf200_2->valid());
167 EXPECT_EQ(ARCH_X86_64, elf200_2->arch());
168 EXPECT_EQ(0U, info200_1.elf_offset);
169 EXPECT_EQ(0U, info200_2.elf_offset);
170 if (cache_enabled) {
171 EXPECT_EQ(elf200_1, elf200_2);
172 } else {
173 EXPECT_NE(elf200_1, elf200_2);
174 }
175
176 Elf* elf300_1 = info300_1.GetElf(memory_, true);
177 ASSERT_TRUE(elf300_1->valid());
178 EXPECT_EQ(ARCH_ARM, elf300_1->arch());
179 Elf* elf300_2 = info300_2.GetElf(memory_, true);
180 ASSERT_TRUE(elf300_2->valid());
181 EXPECT_EQ(ARCH_ARM, elf300_2->arch());
182 EXPECT_EQ(0x300U, info300_1.elf_offset);
183 EXPECT_EQ(0x300U, info300_2.elf_offset);
184 if (cache_enabled) {
185 EXPECT_EQ(elf300_1, elf300_2);
186 EXPECT_EQ(elf0_1, elf300_1);
187 } else {
188 EXPECT_NE(elf300_1, elf300_2);
189 EXPECT_NE(elf0_1, elf300_1);
190 }
191}
192
193TEST_F(ElfCacheTest, no_caching_valid_elf_offset_non_zero) {
194 VerifyWithinSameMap(false);
195}
196
197TEST_F(ElfCacheTest, caching_valid_elf_offset_non_zero) {
198 VerifyWithinSameMap(true);
199}
200
201} // namespace unwindstack