1 /*
2 * Copyright (C) 2024 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "../faulty_memory_accesses.h"
18
19 #include <cstdint>
20 #include <utility>
21
22 #include "berberis/base/checks.h"
23 #include "berberis/runtime_primitives/recovery_code.h"
24
25 namespace berberis {
26
27 namespace {
28
29 extern "C" FaultyLoadResult FaultyLoad8(const void*);
30 extern "C" FaultyLoadResult FaultyLoad16(const void*);
31 extern "C" FaultyLoadResult FaultyLoad32(const void*);
32 extern "C" FaultyLoadResult FaultyLoad64(const void*);
33 extern "C" char g_faulty_load_recovery;
34
35 __asm__(
36 R"(
37 .globl FaultyLoad8
38 .balign 16
39 FaultyLoad8:
40 ldrb w0, [x0] // Load 1 byte from memory pointed to by x0 into w0 (lower 32 bits of x0)
41 mov w1, #0 // Move 0 into w1 (lower 32 bits of x1)
42 ret
43
44 .globl FaultyLoad16
45 .balign 16
46 FaultyLoad16:
47 ldrh w0, [x0] // Load 2 bytes (halfword) from memory pointed to by x0 into w0
48 mov w1, #0
49 ret
50
51 .globl FaultyLoad32
52 .balign 16
53 FaultyLoad32:
54 ldr w0, [x0] // Load 4 bytes (word) from memory pointed to by x0 into w0
55 mov w1, #0
56 ret
57
58 .globl FaultyLoad64
59 .balign 16
60 FaultyLoad64:
61 ldr x0, [x0] // Load 8 bytes (doubleword) from memory pointed to by x0 into x0
62 mov w1, #0
63 ret
64
65 .globl g_faulty_load_recovery
66 g_faulty_load_recovery:
67 mov w1, #1
68 ret
69 )");
70
71 extern "C" bool FaultyStore8(void*, uint64_t);
72 extern "C" bool FaultyStore16(void*, uint64_t);
73 extern "C" bool FaultyStore32(void*, uint64_t);
74 extern "C" bool FaultyStore64(void*, uint64_t);
75 extern "C" char g_faulty_store_recovery;
76
77 __asm__(
78 R"(
79 .globl FaultyStore8
80 .balign 16
81 FaultyStore8:
82 strb w1, [x0] // Store the lower 8 bits of w1 (from x1) into memory pointed to by x0
83 mov w0, #0 // Move 0 into w0 (lower 32 bits of x0)
84 ret
85
86 .globl FaultyStore16
87 .balign 16
88 FaultyStore16:
89 strh w1, [x0] // Store the lower 16 bits of w1 (from x1) into memory pointed to by x0
90 mov w0, #0
91 ret
92
93 .globl FaultyStore32
94 .balign 16
95 FaultyStore32:
96 str w1, [x0] // Store the lower 32 bits of w1 (from x1) into memory pointed to by x0
97 mov w0, #0
98 ret
99
100 .globl FaultyStore64
101 .balign 16
102 FaultyStore64:
103 str x1, [x0] // Store the 64 bits of x1 into memory pointed to by x0
104 mov w0, #0
105 ret
106
107 .globl g_faulty_store_recovery
108 g_faulty_store_recovery:
109 mov w0, #1
110 ret
111 )");
112
113 template <typename FaultyAccessPointer>
MakePairAdapter(FaultyAccessPointer fault_addr,void * recovery_addr)114 std::pair<uintptr_t, uintptr_t> MakePairAdapter(FaultyAccessPointer fault_addr,
115 void* recovery_addr) {
116 return {reinterpret_cast<uintptr_t>(fault_addr), reinterpret_cast<uintptr_t>(recovery_addr)};
117 }
118
119 } // namespace
120
FaultyLoad(const void * addr,uint8_t data_bytes)121 FaultyLoadResult FaultyLoad(const void* addr, uint8_t data_bytes) {
122 CHECK_LE(data_bytes, 8);
123
124 FaultyLoadResult result;
125 switch (data_bytes) {
126 case 1:
127 result = FaultyLoad8(addr);
128 break;
129 case 2:
130 result = FaultyLoad16(addr);
131 break;
132 case 4:
133 result = FaultyLoad32(addr);
134 break;
135 case 8:
136 result = FaultyLoad64(addr);
137 break;
138 default:
139 LOG_ALWAYS_FATAL("Unexpected FaultyLoad access size");
140 }
141
142 return result;
143 }
144
FaultyStore(void * addr,uint8_t data_bytes,uint64_t value)145 bool FaultyStore(void* addr, uint8_t data_bytes, uint64_t value) {
146 CHECK_LE(data_bytes, 8);
147
148 bool is_fault;
149 switch (data_bytes) {
150 case 1:
151 is_fault = FaultyStore8(addr, value);
152 break;
153 case 2:
154 is_fault = FaultyStore16(addr, value);
155 break;
156 case 4:
157 is_fault = FaultyStore32(addr, value);
158 break;
159 case 8:
160 is_fault = FaultyStore64(addr, value);
161 break;
162 default:
163 LOG_ALWAYS_FATAL("Unexpected FaultyLoad access size");
164 }
165
166 return is_fault;
167 }
168
AddFaultyMemoryAccessRecoveryCode()169 void AddFaultyMemoryAccessRecoveryCode() {
170 InitExtraRecoveryCodeUnsafe({
171 MakePairAdapter(&FaultyLoad8, &g_faulty_load_recovery),
172 MakePairAdapter(&FaultyLoad16, &g_faulty_load_recovery),
173 MakePairAdapter(&FaultyLoad32, &g_faulty_load_recovery),
174 MakePairAdapter(&FaultyLoad64, &g_faulty_load_recovery),
175 });
176 }
177
FindFaultyMemoryAccessRecoveryAddrForTesting(void * fault_addr)178 void* FindFaultyMemoryAccessRecoveryAddrForTesting(void* fault_addr) {
179 if (fault_addr == &FaultyLoad8 || fault_addr == &FaultyLoad16 || fault_addr == &FaultyLoad32 ||
180 fault_addr == &FaultyLoad64) {
181 return &g_faulty_load_recovery;
182 }
183 if (fault_addr == &FaultyStore8 || fault_addr == &FaultyStore16 || fault_addr == &FaultyStore32 ||
184 fault_addr == &FaultyStore64) {
185 return &g_faulty_store_recovery;
186 }
187 return nullptr;
188 }
189
190 } // namespace berberis
191