Linux-libre 5.3.12-gnu
[librecmc/linux-libre.git] / mm / kasan / generic.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * This file contains core generic KASAN code.
4  *
5  * Copyright (c) 2014 Samsung Electronics Co., Ltd.
6  * Author: Andrey Ryabinin <ryabinin.a.a@gmail.com>
7  *
8  * Some code borrowed from https://github.com/xairy/kasan-prototype by
9  *        Andrey Konovalov <andreyknvl@gmail.com>
10  *
11  * This program is free software; you can redistribute it and/or modify
12  * it under the terms of the GNU General Public License version 2 as
13  * published by the Free Software Foundation.
14  *
15  */
16
17 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
18 #define DISABLE_BRANCH_PROFILING
19
20 #include <linux/export.h>
21 #include <linux/interrupt.h>
22 #include <linux/init.h>
23 #include <linux/kasan.h>
24 #include <linux/kernel.h>
25 #include <linux/kmemleak.h>
26 #include <linux/linkage.h>
27 #include <linux/memblock.h>
28 #include <linux/memory.h>
29 #include <linux/mm.h>
30 #include <linux/module.h>
31 #include <linux/printk.h>
32 #include <linux/sched.h>
33 #include <linux/sched/task_stack.h>
34 #include <linux/slab.h>
35 #include <linux/stacktrace.h>
36 #include <linux/string.h>
37 #include <linux/types.h>
38 #include <linux/vmalloc.h>
39 #include <linux/bug.h>
40
41 #include "kasan.h"
42 #include "../slab.h"
43
44 /*
45  * All functions below always inlined so compiler could
46  * perform better optimizations in each of __asan_loadX/__assn_storeX
47  * depending on memory access size X.
48  */
49
50 static __always_inline bool memory_is_poisoned_1(unsigned long addr)
51 {
52         s8 shadow_value = *(s8 *)kasan_mem_to_shadow((void *)addr);
53
54         if (unlikely(shadow_value)) {
55                 s8 last_accessible_byte = addr & KASAN_SHADOW_MASK;
56                 return unlikely(last_accessible_byte >= shadow_value);
57         }
58
59         return false;
60 }
61
62 static __always_inline bool memory_is_poisoned_2_4_8(unsigned long addr,
63                                                 unsigned long size)
64 {
65         u8 *shadow_addr = (u8 *)kasan_mem_to_shadow((void *)addr);
66
67         /*
68          * Access crosses 8(shadow size)-byte boundary. Such access maps
69          * into 2 shadow bytes, so we need to check them both.
70          */
71         if (unlikely(((addr + size - 1) & KASAN_SHADOW_MASK) < size - 1))
72                 return *shadow_addr || memory_is_poisoned_1(addr + size - 1);
73
74         return memory_is_poisoned_1(addr + size - 1);
75 }
76
77 static __always_inline bool memory_is_poisoned_16(unsigned long addr)
78 {
79         u16 *shadow_addr = (u16 *)kasan_mem_to_shadow((void *)addr);
80
81         /* Unaligned 16-bytes access maps into 3 shadow bytes. */
82         if (unlikely(!IS_ALIGNED(addr, KASAN_SHADOW_SCALE_SIZE)))
83                 return *shadow_addr || memory_is_poisoned_1(addr + 15);
84
85         return *shadow_addr;
86 }
87
88 static __always_inline unsigned long bytes_is_nonzero(const u8 *start,
89                                         size_t size)
90 {
91         while (size) {
92                 if (unlikely(*start))
93                         return (unsigned long)start;
94                 start++;
95                 size--;
96         }
97
98         return 0;
99 }
100
101 static __always_inline unsigned long memory_is_nonzero(const void *start,
102                                                 const void *end)
103 {
104         unsigned int words;
105         unsigned long ret;
106         unsigned int prefix = (unsigned long)start % 8;
107
108         if (end - start <= 16)
109                 return bytes_is_nonzero(start, end - start);
110
111         if (prefix) {
112                 prefix = 8 - prefix;
113                 ret = bytes_is_nonzero(start, prefix);
114                 if (unlikely(ret))
115                         return ret;
116                 start += prefix;
117         }
118
119         words = (end - start) / 8;
120         while (words) {
121                 if (unlikely(*(u64 *)start))
122                         return bytes_is_nonzero(start, 8);
123                 start += 8;
124                 words--;
125         }
126
127         return bytes_is_nonzero(start, (end - start) % 8);
128 }
129
130 static __always_inline bool memory_is_poisoned_n(unsigned long addr,
131                                                 size_t size)
132 {
133         unsigned long ret;
134
135         ret = memory_is_nonzero(kasan_mem_to_shadow((void *)addr),
136                         kasan_mem_to_shadow((void *)addr + size - 1) + 1);
137
138         if (unlikely(ret)) {
139                 unsigned long last_byte = addr + size - 1;
140                 s8 *last_shadow = (s8 *)kasan_mem_to_shadow((void *)last_byte);
141
142                 if (unlikely(ret != (unsigned long)last_shadow ||
143                         ((long)(last_byte & KASAN_SHADOW_MASK) >= *last_shadow)))
144                         return true;
145         }
146         return false;
147 }
148
149 static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size)
150 {
151         if (__builtin_constant_p(size)) {
152                 switch (size) {
153                 case 1:
154                         return memory_is_poisoned_1(addr);
155                 case 2:
156                 case 4:
157                 case 8:
158                         return memory_is_poisoned_2_4_8(addr, size);
159                 case 16:
160                         return memory_is_poisoned_16(addr);
161                 default:
162                         BUILD_BUG();
163                 }
164         }
165
166         return memory_is_poisoned_n(addr, size);
167 }
168
169 static __always_inline bool check_memory_region_inline(unsigned long addr,
170                                                 size_t size, bool write,
171                                                 unsigned long ret_ip)
172 {
173         if (unlikely(size == 0))
174                 return true;
175
176         if (unlikely((void *)addr <
177                 kasan_shadow_to_mem((void *)KASAN_SHADOW_START))) {
178                 kasan_report(addr, size, write, ret_ip);
179                 return false;
180         }
181
182         if (likely(!memory_is_poisoned(addr, size)))
183                 return true;
184
185         kasan_report(addr, size, write, ret_ip);
186         return false;
187 }
188
189 bool check_memory_region(unsigned long addr, size_t size, bool write,
190                                 unsigned long ret_ip)
191 {
192         return check_memory_region_inline(addr, size, write, ret_ip);
193 }
194
195 void kasan_cache_shrink(struct kmem_cache *cache)
196 {
197         quarantine_remove_cache(cache);
198 }
199
200 void kasan_cache_shutdown(struct kmem_cache *cache)
201 {
202         if (!__kmem_cache_empty(cache))
203                 quarantine_remove_cache(cache);
204 }
205
206 static void register_global(struct kasan_global *global)
207 {
208         size_t aligned_size = round_up(global->size, KASAN_SHADOW_SCALE_SIZE);
209
210         kasan_unpoison_shadow(global->beg, global->size);
211
212         kasan_poison_shadow(global->beg + aligned_size,
213                 global->size_with_redzone - aligned_size,
214                 KASAN_GLOBAL_REDZONE);
215 }
216
217 void __asan_register_globals(struct kasan_global *globals, size_t size)
218 {
219         int i;
220
221         for (i = 0; i < size; i++)
222                 register_global(&globals[i]);
223 }
224 EXPORT_SYMBOL(__asan_register_globals);
225
226 void __asan_unregister_globals(struct kasan_global *globals, size_t size)
227 {
228 }
229 EXPORT_SYMBOL(__asan_unregister_globals);
230
231 #define DEFINE_ASAN_LOAD_STORE(size)                                    \
232         void __asan_load##size(unsigned long addr)                      \
233         {                                                               \
234                 check_memory_region_inline(addr, size, false, _RET_IP_);\
235         }                                                               \
236         EXPORT_SYMBOL(__asan_load##size);                               \
237         __alias(__asan_load##size)                                      \
238         void __asan_load##size##_noabort(unsigned long);                \
239         EXPORT_SYMBOL(__asan_load##size##_noabort);                     \
240         void __asan_store##size(unsigned long addr)                     \
241         {                                                               \
242                 check_memory_region_inline(addr, size, true, _RET_IP_); \
243         }                                                               \
244         EXPORT_SYMBOL(__asan_store##size);                              \
245         __alias(__asan_store##size)                                     \
246         void __asan_store##size##_noabort(unsigned long);               \
247         EXPORT_SYMBOL(__asan_store##size##_noabort)
248
249 DEFINE_ASAN_LOAD_STORE(1);
250 DEFINE_ASAN_LOAD_STORE(2);
251 DEFINE_ASAN_LOAD_STORE(4);
252 DEFINE_ASAN_LOAD_STORE(8);
253 DEFINE_ASAN_LOAD_STORE(16);
254
255 void __asan_loadN(unsigned long addr, size_t size)
256 {
257         check_memory_region(addr, size, false, _RET_IP_);
258 }
259 EXPORT_SYMBOL(__asan_loadN);
260
261 __alias(__asan_loadN)
262 void __asan_loadN_noabort(unsigned long, size_t);
263 EXPORT_SYMBOL(__asan_loadN_noabort);
264
265 void __asan_storeN(unsigned long addr, size_t size)
266 {
267         check_memory_region(addr, size, true, _RET_IP_);
268 }
269 EXPORT_SYMBOL(__asan_storeN);
270
271 __alias(__asan_storeN)
272 void __asan_storeN_noabort(unsigned long, size_t);
273 EXPORT_SYMBOL(__asan_storeN_noabort);
274
275 /* to shut up compiler complaints */
276 void __asan_handle_no_return(void) {}
277 EXPORT_SYMBOL(__asan_handle_no_return);
278
279 /* Emitted by compiler to poison alloca()ed objects. */
280 void __asan_alloca_poison(unsigned long addr, size_t size)
281 {
282         size_t rounded_up_size = round_up(size, KASAN_SHADOW_SCALE_SIZE);
283         size_t padding_size = round_up(size, KASAN_ALLOCA_REDZONE_SIZE) -
284                         rounded_up_size;
285         size_t rounded_down_size = round_down(size, KASAN_SHADOW_SCALE_SIZE);
286
287         const void *left_redzone = (const void *)(addr -
288                         KASAN_ALLOCA_REDZONE_SIZE);
289         const void *right_redzone = (const void *)(addr + rounded_up_size);
290
291         WARN_ON(!IS_ALIGNED(addr, KASAN_ALLOCA_REDZONE_SIZE));
292
293         kasan_unpoison_shadow((const void *)(addr + rounded_down_size),
294                               size - rounded_down_size);
295         kasan_poison_shadow(left_redzone, KASAN_ALLOCA_REDZONE_SIZE,
296                         KASAN_ALLOCA_LEFT);
297         kasan_poison_shadow(right_redzone,
298                         padding_size + KASAN_ALLOCA_REDZONE_SIZE,
299                         KASAN_ALLOCA_RIGHT);
300 }
301 EXPORT_SYMBOL(__asan_alloca_poison);
302
303 /* Emitted by compiler to unpoison alloca()ed areas when the stack unwinds. */
304 void __asan_allocas_unpoison(const void *stack_top, const void *stack_bottom)
305 {
306         if (unlikely(!stack_top || stack_top > stack_bottom))
307                 return;
308
309         kasan_unpoison_shadow(stack_top, stack_bottom - stack_top);
310 }
311 EXPORT_SYMBOL(__asan_allocas_unpoison);
312
313 /* Emitted by the compiler to [un]poison local variables. */
314 #define DEFINE_ASAN_SET_SHADOW(byte) \
315         void __asan_set_shadow_##byte(const void *addr, size_t size)    \
316         {                                                               \
317                 __memset((void *)addr, 0x##byte, size);                 \
318         }                                                               \
319         EXPORT_SYMBOL(__asan_set_shadow_##byte)
320
321 DEFINE_ASAN_SET_SHADOW(00);
322 DEFINE_ASAN_SET_SHADOW(f1);
323 DEFINE_ASAN_SET_SHADOW(f2);
324 DEFINE_ASAN_SET_SHADOW(f3);
325 DEFINE_ASAN_SET_SHADOW(f5);
326 DEFINE_ASAN_SET_SHADOW(f8);