xref: /aosp_15_r20/external/cronet/base/profiler/module_cache_unittest.cc (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 // Copyright 2018 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/profiler/module_cache.h"
6 
7 #include <iomanip>
8 #include <map>
9 #include <memory>
10 #include <string_view>
11 #include <utility>
12 #include <vector>
13 
14 #include "base/containers/adapters.h"
15 #include "base/functional/callback.h"
16 #include "base/functional/callback_helpers.h"
17 #include "base/ranges/algorithm.h"
18 #include "base/test/bind.h"
19 #include "build/build_config.h"
20 #include "testing/gtest/include/gtest/gtest.h"
21 
22 #if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
23 #include "base/debug/proc_maps_linux.h"
24 #endif
25 
26 // Note: The special-case IS_CHROMEOS code inside GetDebugBasenameForModule to
27 // handle the interaction between that function and
28 // SetProcessTitleFromCommandLine() is tested in
29 // base/process/set_process_title_linux_unittest.cc due to dependency issues.
30 
31 namespace base {
32 namespace {
33 
AFunctionForTest()34 int AFunctionForTest() {
35   return 42;
36 }
37 
38 // Provides a module that is guaranteed to be isolated from (and non-contiguous
39 // with) any other module, by placing the module in the middle of a block of
40 // heap memory.
41 class IsolatedModule : public ModuleCache::Module {
42  public:
IsolatedModule(bool is_native=true)43   explicit IsolatedModule(bool is_native = true)
44       : is_native_(is_native), memory_region_(new char[kRegionSize]) {}
45 
46   // ModuleCache::Module
GetBaseAddress() const47   uintptr_t GetBaseAddress() const override {
48     // Place the module in the middle of the region.
49     return reinterpret_cast<uintptr_t>(&memory_region_[kRegionSize / 4]);
50   }
51 
GetId() const52   std::string GetId() const override { return ""; }
GetDebugBasename() const53   FilePath GetDebugBasename() const override { return FilePath(); }
GetSize() const54   size_t GetSize() const override { return kRegionSize / 2; }
IsNative() const55   bool IsNative() const override { return is_native_; }
56 
57  private:
58   static const int kRegionSize = 100;
59 
60   bool is_native_;
61   std::unique_ptr<char[]> memory_region_;
62 };
63 
64 // Provides a fake module with configurable base address and size.
65 class FakeModule : public ModuleCache::Module {
66  public:
FakeModule(uintptr_t base_address,size_t size,bool is_native=true,OnceClosure destruction_closure=OnceClosure ())67   FakeModule(uintptr_t base_address,
68              size_t size,
69              bool is_native = true,
70              OnceClosure destruction_closure = OnceClosure())
71       : base_address_(base_address),
72         size_(size),
73         is_native_(is_native),
74         destruction_closure_runner_(std::move(destruction_closure)) {}
75 
76   FakeModule(const FakeModule&) = delete;
77   FakeModule& operator=(const FakeModule&) = delete;
78 
GetBaseAddress() const79   uintptr_t GetBaseAddress() const override { return base_address_; }
GetId() const80   std::string GetId() const override { return ""; }
GetDebugBasename() const81   FilePath GetDebugBasename() const override { return FilePath(); }
GetSize() const82   size_t GetSize() const override { return size_; }
IsNative() const83   bool IsNative() const override { return is_native_; }
84 
85  private:
86   uintptr_t base_address_;
87   size_t size_;
88   bool is_native_;
89   ScopedClosureRunner destruction_closure_runner_;
90 };
91 
92 // Utility function to add a single non-native module during test setup. Returns
93 // a pointer to the provided module.
AddNonNativeModule(ModuleCache * cache,std::unique_ptr<const ModuleCache::Module> module)94 const ModuleCache::Module* AddNonNativeModule(
95     ModuleCache* cache,
96     std::unique_ptr<const ModuleCache::Module> module) {
97   const ModuleCache::Module* module_ptr = module.get();
98   std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
99   modules.push_back(std::move(module));
100   cache->UpdateNonNativeModules({}, std::move(modules));
101   return module_ptr;
102 }
103 
104 #if (BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_IOS) && !defined(ARCH_CPU_ARM64)) || \
105     BUILDFLAG(IS_FUCHSIA) || BUILDFLAG(IS_WIN)
106 #define MAYBE_TEST(TestSuite, TestName) TEST(TestSuite, TestName)
107 #else
108 #define MAYBE_TEST(TestSuite, TestName) TEST(TestSuite, DISABLED_##TestName)
109 #endif
110 
MAYBE_TEST(ModuleCacheTest,GetDebugBasename)111 MAYBE_TEST(ModuleCacheTest, GetDebugBasename) {
112   ModuleCache cache;
113   const ModuleCache::Module* module =
114       cache.GetModuleForAddress(reinterpret_cast<uintptr_t>(&AFunctionForTest));
115   ASSERT_NE(nullptr, module);
116 #if BUILDFLAG(IS_ANDROID)
117   EXPECT_EQ("libbase_unittests__library",
118             module->GetDebugBasename().RemoveFinalExtension().value());
119 #elif BUILDFLAG(IS_POSIX)
120   EXPECT_EQ("base_unittests", module->GetDebugBasename().value());
121 #elif BUILDFLAG(IS_WIN)
122   EXPECT_EQ(L"base_unittests.exe.pdb", module->GetDebugBasename().value());
123 #endif
124 }
125 
126 // Checks that ModuleCache returns the same module instance for
127 // addresses within the module.
MAYBE_TEST(ModuleCacheTest,LookupCodeAddresses)128 MAYBE_TEST(ModuleCacheTest, LookupCodeAddresses) {
129   uintptr_t ptr1 = reinterpret_cast<uintptr_t>(&AFunctionForTest);
130   uintptr_t ptr2 = ptr1 + 1;
131   ModuleCache cache;
132   const ModuleCache::Module* module1 = cache.GetModuleForAddress(ptr1);
133   const ModuleCache::Module* module2 = cache.GetModuleForAddress(ptr2);
134   EXPECT_EQ(module1, module2);
135   EXPECT_NE(nullptr, module1);
136   EXPECT_GT(module1->GetSize(), 0u);
137   EXPECT_LE(module1->GetBaseAddress(), ptr1);
138   EXPECT_GT(module1->GetBaseAddress() + module1->GetSize(), ptr2);
139 }
140 
MAYBE_TEST(ModuleCacheTest,LookupRange)141 MAYBE_TEST(ModuleCacheTest, LookupRange) {
142   ModuleCache cache;
143   auto to_inject = std::make_unique<IsolatedModule>();
144   const ModuleCache::Module* module = to_inject.get();
145   cache.AddCustomNativeModule(std::move(to_inject));
146 
147   EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() - 1));
148   EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress()));
149   EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress() +
150                                               module->GetSize() - 1));
151   EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() +
152                                                module->GetSize()));
153 }
154 
MAYBE_TEST(ModuleCacheTest,LookupNonNativeModule)155 MAYBE_TEST(ModuleCacheTest, LookupNonNativeModule) {
156   ModuleCache cache;
157   const ModuleCache::Module* module =
158       AddNonNativeModule(&cache, std::make_unique<IsolatedModule>(false));
159 
160   EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() - 1));
161   EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress()));
162   EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress() +
163                                               module->GetSize() - 1));
164   EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() +
165                                                module->GetSize()));
166 }
167 
MAYBE_TEST(ModuleCacheTest,LookupOverlaidNonNativeModule)168 MAYBE_TEST(ModuleCacheTest, LookupOverlaidNonNativeModule) {
169   ModuleCache cache;
170 
171   auto native_module_to_inject = std::make_unique<IsolatedModule>();
172   const ModuleCache::Module* native_module = native_module_to_inject.get();
173   cache.AddCustomNativeModule(std::move(native_module_to_inject));
174 
175   // Overlay the native module with the non-native module, starting 8 bytes into
176   // the native modules and ending 8 bytes before the end of the module.
177   const ModuleCache::Module* non_native_module = AddNonNativeModule(
178       &cache,
179       std::make_unique<FakeModule>(native_module->GetBaseAddress() + 8,
180                                    native_module->GetSize() - 16, false));
181 
182   EXPECT_EQ(native_module,
183             cache.GetModuleForAddress(non_native_module->GetBaseAddress() - 1));
184   EXPECT_EQ(non_native_module,
185             cache.GetModuleForAddress(non_native_module->GetBaseAddress()));
186   EXPECT_EQ(non_native_module,
187             cache.GetModuleForAddress(non_native_module->GetBaseAddress() +
188                                       non_native_module->GetSize() - 1));
189   EXPECT_EQ(native_module,
190             cache.GetModuleForAddress(non_native_module->GetBaseAddress() +
191                                       non_native_module->GetSize()));
192 }
193 
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesAdd)194 MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesAdd) {
195   ModuleCache cache;
196   std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
197   modules.push_back(std::make_unique<FakeModule>(1, 1, false));
198   const ModuleCache::Module* module = modules.back().get();
199   cache.UpdateNonNativeModules({}, std::move(modules));
200 
201   EXPECT_EQ(module, cache.GetModuleForAddress(1));
202 }
203 
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesRemove)204 MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesRemove) {
205   ModuleCache cache;
206   std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
207   modules.push_back(std::make_unique<FakeModule>(1, 1, false));
208   const ModuleCache::Module* module = modules.back().get();
209   cache.UpdateNonNativeModules({}, std::move(modules));
210   cache.UpdateNonNativeModules({module}, {});
211 
212   EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
213 }
214 
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesRemoveModuleIsNotDestroyed)215 MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesRemoveModuleIsNotDestroyed) {
216   bool was_destroyed = false;
217   {
218     ModuleCache cache;
219     std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
220     modules.push_back(std::make_unique<FakeModule>(
221         1, 1, false,
222         BindLambdaForTesting([&was_destroyed]() { was_destroyed = true; })));
223     const ModuleCache::Module* module = modules.back().get();
224     cache.UpdateNonNativeModules({}, std::move(modules));
225     cache.UpdateNonNativeModules({module}, {});
226 
227     EXPECT_FALSE(was_destroyed);
228   }
229   EXPECT_TRUE(was_destroyed);
230 }
231 
232 // Regression test to validate that when modules are partitioned into modules to
233 // keep and modules to remove, the modules to remove are not destroyed.
234 // https://crbug.com/1127466 case 2.
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesPartitioning)235 MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesPartitioning) {
236   int destroyed_count = 0;
237   const auto record_destroyed = [&destroyed_count]() { ++destroyed_count; };
238   {
239     ModuleCache cache;
240     std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
241     modules.push_back(std::make_unique<FakeModule>(
242         1, 1, false, BindLambdaForTesting(record_destroyed)));
243     const ModuleCache::Module* module1 = modules.back().get();
244     modules.push_back(std::make_unique<FakeModule>(
245         2, 1, false, BindLambdaForTesting(record_destroyed)));
246     cache.UpdateNonNativeModules({}, std::move(modules));
247     cache.UpdateNonNativeModules({module1}, {});
248 
249     EXPECT_EQ(0, destroyed_count);
250   }
251   EXPECT_EQ(2, destroyed_count);
252 }
253 
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesReplace)254 MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesReplace) {
255   ModuleCache cache;
256   // Replace a module with another larger module at the same base address.
257   std::vector<std::unique_ptr<const ModuleCache::Module>> modules1;
258   modules1.push_back(std::make_unique<FakeModule>(1, 1, false));
259   const ModuleCache::Module* module1 = modules1.back().get();
260   std::vector<std::unique_ptr<const ModuleCache::Module>> modules2;
261   modules2.push_back(std::make_unique<FakeModule>(1, 2, false));
262   const ModuleCache::Module* module2 = modules2.back().get();
263 
264   cache.UpdateNonNativeModules({}, std::move(modules1));
265   cache.UpdateNonNativeModules({module1}, std::move(modules2));
266 
267   EXPECT_EQ(module2, cache.GetModuleForAddress(2));
268 }
269 
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesMultipleRemovedModulesAtSameAddress)270 MAYBE_TEST(ModuleCacheTest,
271            UpdateNonNativeModulesMultipleRemovedModulesAtSameAddress) {
272   int destroyed_count = 0;
273   const auto record_destroyed = [&destroyed_count]() { ++destroyed_count; };
274   ModuleCache cache;
275 
276   // Checks that non-native modules can be repeatedly added and removed at the
277   // same addresses, and that all are retained in the cache.
278   std::vector<std::unique_ptr<const ModuleCache::Module>> modules1;
279   modules1.push_back(std::make_unique<FakeModule>(
280       1, 1, false, BindLambdaForTesting(record_destroyed)));
281   const ModuleCache::Module* module1 = modules1.back().get();
282 
283   std::vector<std::unique_ptr<const ModuleCache::Module>> modules2;
284   modules2.push_back(std::make_unique<FakeModule>(
285       1, 1, false, BindLambdaForTesting(record_destroyed)));
286   const ModuleCache::Module* module2 = modules2.back().get();
287 
288   cache.UpdateNonNativeModules({}, std::move(modules1));
289   cache.UpdateNonNativeModules({module1}, std::move(modules2));
290   cache.UpdateNonNativeModules({module2}, {});
291 
292   EXPECT_EQ(0, destroyed_count);
293 }
294 
MAYBE_TEST(ModuleCacheTest,UpdateNonNativeModulesCorrectModulesRemoved)295 MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesCorrectModulesRemoved) {
296   ModuleCache cache;
297 
298   std::vector<std::unique_ptr<const ModuleCache::Module>> to_add;
299   for (int i = 0; i < 5; ++i) {
300     to_add.push_back(std::make_unique<FakeModule>(i + 1, 1, false));
301   }
302 
303   std::vector<const ModuleCache::Module*> to_remove = {to_add[1].get(),
304                                                        to_add[3].get()};
305 
306   // Checks that the correct modules are removed when removing some but not all
307   // modules.
308   cache.UpdateNonNativeModules({}, std::move(to_add));
309   cache.UpdateNonNativeModules({to_remove}, {});
310 
311   DCHECK_NE(nullptr, cache.GetModuleForAddress(1));
312   DCHECK_EQ(nullptr, cache.GetModuleForAddress(2));
313   DCHECK_NE(nullptr, cache.GetModuleForAddress(3));
314   DCHECK_EQ(nullptr, cache.GetModuleForAddress(4));
315   DCHECK_NE(nullptr, cache.GetModuleForAddress(5));
316 }
317 
MAYBE_TEST(ModuleCacheTest,ModulesList)318 MAYBE_TEST(ModuleCacheTest, ModulesList) {
319   ModuleCache cache;
320   uintptr_t ptr = reinterpret_cast<uintptr_t>(&AFunctionForTest);
321   const ModuleCache::Module* native_module = cache.GetModuleForAddress(ptr);
322   const ModuleCache::Module* non_native_module =
323       AddNonNativeModule(&cache, std::make_unique<FakeModule>(1, 2, false));
324 
325   EXPECT_NE(nullptr, native_module);
326   std::vector<const ModuleCache::Module*> modules = cache.GetModules();
327   ASSERT_EQ(2u, modules.size());
328   EXPECT_EQ(native_module, modules[0]);
329   EXPECT_EQ(non_native_module, modules[1]);
330 }
331 
MAYBE_TEST(ModuleCacheTest,InvalidModule)332 MAYBE_TEST(ModuleCacheTest, InvalidModule) {
333   ModuleCache cache;
334   EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
335 }
336 
337 // arm64 module support is not implemented.
338 #if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || \
339     (BUILDFLAG(IS_ANDROID) && !defined(ARCH_CPU_ARM64))
340 // Validates that, for the memory regions listed in /proc/self/maps, the modules
341 // found via ModuleCache are consistent with those regions' extents.
TEST(ModuleCacheTest,CheckAgainstProcMaps)342 TEST(ModuleCacheTest, CheckAgainstProcMaps) {
343   std::string proc_maps;
344   debug::ReadProcMaps(&proc_maps);
345   std::vector<debug::MappedMemoryRegion> regions;
346   ASSERT_TRUE(debug::ParseProcMaps(proc_maps, &regions));
347 
348   // Map distinct paths to lists of regions for the path in increasing memory
349   // order.
350   using RegionVector = std::vector<const debug::MappedMemoryRegion*>;
351   using PathRegionsMap = std::map<std::string_view, RegionVector>;
352   PathRegionsMap path_regions;
353   for (const debug::MappedMemoryRegion& region : regions)
354     path_regions[region.path].push_back(&region);
355 
356   const auto find_last_executable_region = [](const RegionVector& regions) {
357     const auto rloc = base::ranges::find_if(
358         base::Reversed(regions), [](const debug::MappedMemoryRegion* region) {
359           return static_cast<bool>(region->permissions &
360                                    debug::MappedMemoryRegion::EXECUTE);
361         });
362     return rloc == regions.rend() ? nullptr : *rloc;
363   };
364 
365   int module_count = 0;
366 
367   // Loop through each distinct path.
368   for (const auto& path_regions_pair : path_regions) {
369     // Regions that aren't associated with absolute paths are unlikely to be
370     // part of modules.
371     if (path_regions_pair.first.empty() || path_regions_pair.first[0] != '/')
372       continue;
373 
374     const debug::MappedMemoryRegion* const last_executable_region =
375         find_last_executable_region(path_regions_pair.second);
376     // The region isn't part of a module if no executable regions are associated
377     // with the same path.
378     if (!last_executable_region)
379       continue;
380 
381     // Loop through all the regions associated with the path, checking that
382     // modules created for addresses in each region have the expected extents.
383     const uintptr_t expected_base_address =
384         path_regions_pair.second.front()->start;
385     for (const auto* region : path_regions_pair.second) {
386       ModuleCache cache;
387       const ModuleCache::Module* module =
388           cache.GetModuleForAddress(region->start);
389       // Not all regions matching the prior conditions are necessarily modules;
390       // things like resources are also mmapped into memory from files. Ignore
391       // any region isn't part of a module.
392       if (!module)
393         continue;
394 
395       ++module_count;
396 
397       EXPECT_EQ(expected_base_address, module->GetBaseAddress());
398       // This needs an inequality comparison because the module size is computed
399       // based on the ELF section's actual extent, while the |proc_maps| region
400       // is aligned to a larger boundary.
401       EXPECT_LE(module->GetSize(),
402                 last_executable_region->end - expected_base_address)
403           << "base address: " << std::hex << module->GetBaseAddress()
404           << std::endl
405           << "region start: " << std::hex << region->start << std::endl
406           << "region end: " << std::hex << region->end << std::endl;
407     }
408   }
409 
410   // Linux should have at least this module and ld-linux.so. Android should have
411   // at least this module and system libraries.
412   EXPECT_GE(module_count, 2);
413 }
414 #endif
415 
416 // Module provider that always return a fake module of size 1 for a given
417 // |address|.
418 class MockModuleProvider : public ModuleCache::AuxiliaryModuleProvider {
419  public:
MockModuleProvider(size_t module_size=1)420   explicit MockModuleProvider(size_t module_size = 1)
421       : module_size_(module_size) {}
422 
TryCreateModuleForAddress(uintptr_t address)423   std::unique_ptr<const ModuleCache::Module> TryCreateModuleForAddress(
424       uintptr_t address) override {
425     return std::make_unique<FakeModule>(address, module_size_);
426   }
427 
428  private:
429   size_t module_size_;
430 };
431 
432 // Check that auxiliary provider can inject new modules when registered.
TEST(ModuleCacheTest,RegisterAuxiliaryModuleProvider)433 TEST(ModuleCacheTest, RegisterAuxiliaryModuleProvider) {
434   ModuleCache cache;
435   EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
436 
437   MockModuleProvider auxiliary_provider;
438   cache.RegisterAuxiliaryModuleProvider(&auxiliary_provider);
439   auto* module = cache.GetModuleForAddress(1);
440   EXPECT_NE(nullptr, module);
441   EXPECT_EQ(1U, module->GetBaseAddress());
442   cache.UnregisterAuxiliaryModuleProvider(&auxiliary_provider);
443 
444   // Even when unregistered, the module remains in the cache.
445   EXPECT_EQ(module, cache.GetModuleForAddress(1));
446 }
447 
448 // Check that ModuleCache's own module creator is used preferentially over
449 // auxiliary provider if possible.
MAYBE_TEST(ModuleCacheTest,NativeModuleOverAuxiliaryModuleProvider)450 MAYBE_TEST(ModuleCacheTest, NativeModuleOverAuxiliaryModuleProvider) {
451   ModuleCache cache;
452 
453   MockModuleProvider auxiliary_provider(/*module_size=*/100);
454   cache.RegisterAuxiliaryModuleProvider(&auxiliary_provider);
455 
456   const ModuleCache::Module* module =
457       cache.GetModuleForAddress(reinterpret_cast<uintptr_t>(&AFunctionForTest));
458   ASSERT_NE(nullptr, module);
459 
460   // The module should be a native module, which will have size greater than 100
461   // bytes.
462   EXPECT_NE(100u, module->GetSize());
463   cache.UnregisterAuxiliaryModuleProvider(&auxiliary_provider);
464 }
465 
466 // Check that auxiliary provider is no longer used after being unregistered.
TEST(ModuleCacheTest,UnregisterAuxiliaryModuleProvider)467 TEST(ModuleCacheTest, UnregisterAuxiliaryModuleProvider) {
468   ModuleCache cache;
469 
470   EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
471 
472   MockModuleProvider auxiliary_provider;
473   cache.RegisterAuxiliaryModuleProvider(&auxiliary_provider);
474   cache.UnregisterAuxiliaryModuleProvider(&auxiliary_provider);
475 
476   EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
477 }
478 
479 #if BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_LINUX)
TEST(ModuleCacheTest,TransformELFToSymbolServerFormat)480 TEST(ModuleCacheTest, TransformELFToSymbolServerFormat) {
481   // See explanation for the module_id mangling in
482   // base::TransformModuleIDToSymbolServerFormat implementation.
483   EXPECT_EQ(TransformModuleIDToSymbolServerFormat(
484                 "7F0715C286F8B16C10E4AD349CDA3B9B56C7A773"),
485             "C215077FF8866CB110E4AD349CDA3B9B0");
486 }
487 #endif
488 
489 }  // namespace
490 }  // namespace base
491