module: Convert symbol namespace to string literal
[linux-stable.git] / lib / overflow_kunit.c
blob5222c6393f116865e3e3eab7ae22f1fc9ac787cb
1 // SPDX-License-Identifier: GPL-2.0 OR MIT
2 /*
3 * Test cases for arithmetic overflow checks. See:
4 * "Running tests with kunit_tool" at Documentation/dev-tools/kunit/start.rst
5 * ./tools/testing/kunit/kunit.py run overflow [--raw_output]
6 */
7 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9 #include <kunit/device.h>
10 #include <kunit/test.h>
11 #include <linux/device.h>
12 #include <linux/kernel.h>
13 #include <linux/mm.h>
14 #include <linux/module.h>
15 #include <linux/overflow.h>
16 #include <linux/slab.h>
17 #include <linux/types.h>
18 #include <linux/vmalloc.h>
20 #define SKIP(cond, reason) do { \
21 if (cond) { \
22 kunit_skip(test, reason); \
23 return; \
24 } \
25 } while (0)
28 * Clang 11 and earlier generate unwanted libcalls for signed output
29 * on unsigned input.
31 #if defined(CONFIG_CC_IS_CLANG) && __clang_major__ <= 11
32 # define SKIP_SIGN_MISMATCH(t) SKIP(t, "Clang 11 unwanted libcalls")
33 #else
34 # define SKIP_SIGN_MISMATCH(t) do { } while (0)
35 #endif
38 * Clang 13 and earlier generate unwanted libcalls for 64-bit tests on
39 * 32-bit hosts.
41 #if defined(CONFIG_CC_IS_CLANG) && __clang_major__ <= 13 && \
42 BITS_PER_LONG != 64
43 # define SKIP_64_ON_32(t) SKIP(t, "Clang 13 unwanted libcalls")
44 #else
45 # define SKIP_64_ON_32(t) do { } while (0)
46 #endif
48 #define DEFINE_TEST_ARRAY_TYPED(t1, t2, t) \
49 static const struct test_ ## t1 ## _ ## t2 ## __ ## t { \
50 t1 a; \
51 t2 b; \
52 t sum, diff, prod; \
53 bool s_of, d_of, p_of; \
54 } t1 ## _ ## t2 ## __ ## t ## _tests[]
56 #define DEFINE_TEST_ARRAY(t) DEFINE_TEST_ARRAY_TYPED(t, t, t)
58 DEFINE_TEST_ARRAY(u8) = {
59 {0, 0, 0, 0, 0, false, false, false},
60 {1, 1, 2, 0, 1, false, false, false},
61 {0, 1, 1, U8_MAX, 0, false, true, false},
62 {1, 0, 1, 1, 0, false, false, false},
63 {0, U8_MAX, U8_MAX, 1, 0, false, true, false},
64 {U8_MAX, 0, U8_MAX, U8_MAX, 0, false, false, false},
65 {1, U8_MAX, 0, 2, U8_MAX, true, true, false},
66 {U8_MAX, 1, 0, U8_MAX-1, U8_MAX, true, false, false},
67 {U8_MAX, U8_MAX, U8_MAX-1, 0, 1, true, false, true},
69 {U8_MAX, U8_MAX-1, U8_MAX-2, 1, 2, true, false, true},
70 {U8_MAX-1, U8_MAX, U8_MAX-2, U8_MAX, 2, true, true, true},
72 {1U << 3, 1U << 3, 1U << 4, 0, 1U << 6, false, false, false},
73 {1U << 4, 1U << 4, 1U << 5, 0, 0, false, false, true},
74 {1U << 4, 1U << 3, 3*(1U << 3), 1U << 3, 1U << 7, false, false, false},
75 {1U << 7, 1U << 7, 0, 0, 0, true, false, true},
77 {48, 32, 80, 16, 0, false, false, true},
78 {128, 128, 0, 0, 0, true, false, true},
79 {123, 234, 101, 145, 110, true, true, true},
81 DEFINE_TEST_ARRAY(u16) = {
82 {0, 0, 0, 0, 0, false, false, false},
83 {1, 1, 2, 0, 1, false, false, false},
84 {0, 1, 1, U16_MAX, 0, false, true, false},
85 {1, 0, 1, 1, 0, false, false, false},
86 {0, U16_MAX, U16_MAX, 1, 0, false, true, false},
87 {U16_MAX, 0, U16_MAX, U16_MAX, 0, false, false, false},
88 {1, U16_MAX, 0, 2, U16_MAX, true, true, false},
89 {U16_MAX, 1, 0, U16_MAX-1, U16_MAX, true, false, false},
90 {U16_MAX, U16_MAX, U16_MAX-1, 0, 1, true, false, true},
92 {U16_MAX, U16_MAX-1, U16_MAX-2, 1, 2, true, false, true},
93 {U16_MAX-1, U16_MAX, U16_MAX-2, U16_MAX, 2, true, true, true},
95 {1U << 7, 1U << 7, 1U << 8, 0, 1U << 14, false, false, false},
96 {1U << 8, 1U << 8, 1U << 9, 0, 0, false, false, true},
97 {1U << 8, 1U << 7, 3*(1U << 7), 1U << 7, 1U << 15, false, false, false},
98 {1U << 15, 1U << 15, 0, 0, 0, true, false, true},
100 {123, 234, 357, 65425, 28782, false, true, false},
101 {1234, 2345, 3579, 64425, 10146, false, true, true},
103 DEFINE_TEST_ARRAY(u32) = {
104 {0, 0, 0, 0, 0, false, false, false},
105 {1, 1, 2, 0, 1, false, false, false},
106 {0, 1, 1, U32_MAX, 0, false, true, false},
107 {1, 0, 1, 1, 0, false, false, false},
108 {0, U32_MAX, U32_MAX, 1, 0, false, true, false},
109 {U32_MAX, 0, U32_MAX, U32_MAX, 0, false, false, false},
110 {1, U32_MAX, 0, 2, U32_MAX, true, true, false},
111 {U32_MAX, 1, 0, U32_MAX-1, U32_MAX, true, false, false},
112 {U32_MAX, U32_MAX, U32_MAX-1, 0, 1, true, false, true},
114 {U32_MAX, U32_MAX-1, U32_MAX-2, 1, 2, true, false, true},
115 {U32_MAX-1, U32_MAX, U32_MAX-2, U32_MAX, 2, true, true, true},
117 {1U << 15, 1U << 15, 1U << 16, 0, 1U << 30, false, false, false},
118 {1U << 16, 1U << 16, 1U << 17, 0, 0, false, false, true},
119 {1U << 16, 1U << 15, 3*(1U << 15), 1U << 15, 1U << 31, false, false, false},
120 {1U << 31, 1U << 31, 0, 0, 0, true, false, true},
122 {-2U, 1U, -1U, -3U, -2U, false, false, false},
123 {-4U, 5U, 1U, -9U, -20U, true, false, true},
126 DEFINE_TEST_ARRAY(u64) = {
127 {0, 0, 0, 0, 0, false, false, false},
128 {1, 1, 2, 0, 1, false, false, false},
129 {0, 1, 1, U64_MAX, 0, false, true, false},
130 {1, 0, 1, 1, 0, false, false, false},
131 {0, U64_MAX, U64_MAX, 1, 0, false, true, false},
132 {U64_MAX, 0, U64_MAX, U64_MAX, 0, false, false, false},
133 {1, U64_MAX, 0, 2, U64_MAX, true, true, false},
134 {U64_MAX, 1, 0, U64_MAX-1, U64_MAX, true, false, false},
135 {U64_MAX, U64_MAX, U64_MAX-1, 0, 1, true, false, true},
137 {U64_MAX, U64_MAX-1, U64_MAX-2, 1, 2, true, false, true},
138 {U64_MAX-1, U64_MAX, U64_MAX-2, U64_MAX, 2, true, true, true},
140 {1ULL << 31, 1ULL << 31, 1ULL << 32, 0, 1ULL << 62, false, false, false},
141 {1ULL << 32, 1ULL << 32, 1ULL << 33, 0, 0, false, false, true},
142 {1ULL << 32, 1ULL << 31, 3*(1ULL << 31), 1ULL << 31, 1ULL << 63, false, false, false},
143 {1ULL << 63, 1ULL << 63, 0, 0, 0, true, false, true},
144 {1000000000ULL /* 10^9 */, 10000000000ULL /* 10^10 */,
145 11000000000ULL, 18446744064709551616ULL, 10000000000000000000ULL,
146 false, true, false},
147 {-15ULL, 10ULL, -5ULL, -25ULL, -150ULL, false, false, true},
150 DEFINE_TEST_ARRAY(s8) = {
151 {0, 0, 0, 0, 0, false, false, false},
153 {0, S8_MAX, S8_MAX, -S8_MAX, 0, false, false, false},
154 {S8_MAX, 0, S8_MAX, S8_MAX, 0, false, false, false},
155 {0, S8_MIN, S8_MIN, S8_MIN, 0, false, true, false},
156 {S8_MIN, 0, S8_MIN, S8_MIN, 0, false, false, false},
158 {-1, S8_MIN, S8_MAX, S8_MAX, S8_MIN, true, false, true},
159 {S8_MIN, -1, S8_MAX, -S8_MAX, S8_MIN, true, false, true},
160 {-1, S8_MAX, S8_MAX-1, S8_MIN, -S8_MAX, false, false, false},
161 {S8_MAX, -1, S8_MAX-1, S8_MIN, -S8_MAX, false, true, false},
162 {-1, -S8_MAX, S8_MIN, S8_MAX-1, S8_MAX, false, false, false},
163 {-S8_MAX, -1, S8_MIN, S8_MIN+2, S8_MAX, false, false, false},
165 {1, S8_MIN, -S8_MAX, -S8_MAX, S8_MIN, false, true, false},
166 {S8_MIN, 1, -S8_MAX, S8_MAX, S8_MIN, false, true, false},
167 {1, S8_MAX, S8_MIN, S8_MIN+2, S8_MAX, true, false, false},
168 {S8_MAX, 1, S8_MIN, S8_MAX-1, S8_MAX, true, false, false},
170 {S8_MIN, S8_MIN, 0, 0, 0, true, false, true},
171 {S8_MAX, S8_MAX, -2, 0, 1, true, false, true},
173 {-4, -32, -36, 28, -128, false, false, true},
174 {-4, 32, 28, -36, -128, false, false, false},
177 DEFINE_TEST_ARRAY(s16) = {
178 {0, 0, 0, 0, 0, false, false, false},
180 {0, S16_MAX, S16_MAX, -S16_MAX, 0, false, false, false},
181 {S16_MAX, 0, S16_MAX, S16_MAX, 0, false, false, false},
182 {0, S16_MIN, S16_MIN, S16_MIN, 0, false, true, false},
183 {S16_MIN, 0, S16_MIN, S16_MIN, 0, false, false, false},
185 {-1, S16_MIN, S16_MAX, S16_MAX, S16_MIN, true, false, true},
186 {S16_MIN, -1, S16_MAX, -S16_MAX, S16_MIN, true, false, true},
187 {-1, S16_MAX, S16_MAX-1, S16_MIN, -S16_MAX, false, false, false},
188 {S16_MAX, -1, S16_MAX-1, S16_MIN, -S16_MAX, false, true, false},
189 {-1, -S16_MAX, S16_MIN, S16_MAX-1, S16_MAX, false, false, false},
190 {-S16_MAX, -1, S16_MIN, S16_MIN+2, S16_MAX, false, false, false},
192 {1, S16_MIN, -S16_MAX, -S16_MAX, S16_MIN, false, true, false},
193 {S16_MIN, 1, -S16_MAX, S16_MAX, S16_MIN, false, true, false},
194 {1, S16_MAX, S16_MIN, S16_MIN+2, S16_MAX, true, false, false},
195 {S16_MAX, 1, S16_MIN, S16_MAX-1, S16_MAX, true, false, false},
197 {S16_MIN, S16_MIN, 0, 0, 0, true, false, true},
198 {S16_MAX, S16_MAX, -2, 0, 1, true, false, true},
200 DEFINE_TEST_ARRAY(s32) = {
201 {0, 0, 0, 0, 0, false, false, false},
203 {0, S32_MAX, S32_MAX, -S32_MAX, 0, false, false, false},
204 {S32_MAX, 0, S32_MAX, S32_MAX, 0, false, false, false},
205 {0, S32_MIN, S32_MIN, S32_MIN, 0, false, true, false},
206 {S32_MIN, 0, S32_MIN, S32_MIN, 0, false, false, false},
208 {-1, S32_MIN, S32_MAX, S32_MAX, S32_MIN, true, false, true},
209 {S32_MIN, -1, S32_MAX, -S32_MAX, S32_MIN, true, false, true},
210 {-1, S32_MAX, S32_MAX-1, S32_MIN, -S32_MAX, false, false, false},
211 {S32_MAX, -1, S32_MAX-1, S32_MIN, -S32_MAX, false, true, false},
212 {-1, -S32_MAX, S32_MIN, S32_MAX-1, S32_MAX, false, false, false},
213 {-S32_MAX, -1, S32_MIN, S32_MIN+2, S32_MAX, false, false, false},
215 {1, S32_MIN, -S32_MAX, -S32_MAX, S32_MIN, false, true, false},
216 {S32_MIN, 1, -S32_MAX, S32_MAX, S32_MIN, false, true, false},
217 {1, S32_MAX, S32_MIN, S32_MIN+2, S32_MAX, true, false, false},
218 {S32_MAX, 1, S32_MIN, S32_MAX-1, S32_MAX, true, false, false},
220 {S32_MIN, S32_MIN, 0, 0, 0, true, false, true},
221 {S32_MAX, S32_MAX, -2, 0, 1, true, false, true},
224 DEFINE_TEST_ARRAY(s64) = {
225 {0, 0, 0, 0, 0, false, false, false},
227 {0, S64_MAX, S64_MAX, -S64_MAX, 0, false, false, false},
228 {S64_MAX, 0, S64_MAX, S64_MAX, 0, false, false, false},
229 {0, S64_MIN, S64_MIN, S64_MIN, 0, false, true, false},
230 {S64_MIN, 0, S64_MIN, S64_MIN, 0, false, false, false},
232 {-1, S64_MIN, S64_MAX, S64_MAX, S64_MIN, true, false, true},
233 {S64_MIN, -1, S64_MAX, -S64_MAX, S64_MIN, true, false, true},
234 {-1, S64_MAX, S64_MAX-1, S64_MIN, -S64_MAX, false, false, false},
235 {S64_MAX, -1, S64_MAX-1, S64_MIN, -S64_MAX, false, true, false},
236 {-1, -S64_MAX, S64_MIN, S64_MAX-1, S64_MAX, false, false, false},
237 {-S64_MAX, -1, S64_MIN, S64_MIN+2, S64_MAX, false, false, false},
239 {1, S64_MIN, -S64_MAX, -S64_MAX, S64_MIN, false, true, false},
240 {S64_MIN, 1, -S64_MAX, S64_MAX, S64_MIN, false, true, false},
241 {1, S64_MAX, S64_MIN, S64_MIN+2, S64_MAX, true, false, false},
242 {S64_MAX, 1, S64_MIN, S64_MAX-1, S64_MAX, true, false, false},
244 {S64_MIN, S64_MIN, 0, 0, 0, true, false, true},
245 {S64_MAX, S64_MAX, -2, 0, 1, true, false, true},
247 {-1, -1, -2, 0, 1, false, false, false},
248 {-1, -128, -129, 127, 128, false, false, false},
249 {-128, -1, -129, -127, 128, false, false, false},
250 {0, -S64_MAX, -S64_MAX, S64_MAX, 0, false, false, false},
253 #define check_one_op(t, fmt, op, sym, a, b, r, of) do { \
254 int _a_orig = a, _a_bump = a + 1; \
255 int _b_orig = b, _b_bump = b + 1; \
256 bool _of; \
257 t _r; \
259 _of = check_ ## op ## _overflow(a, b, &_r); \
260 KUNIT_EXPECT_EQ_MSG(test, _of, of, \
261 "expected check "fmt" "sym" "fmt" to%s overflow (type %s)\n", \
262 a, b, of ? "" : " not", #t); \
263 KUNIT_EXPECT_EQ_MSG(test, _r, r, \
264 "expected check "fmt" "sym" "fmt" == "fmt", got "fmt" (type %s)\n", \
265 a, b, r, _r, #t); \
266 /* Check for internal macro side-effects. */ \
267 _of = check_ ## op ## _overflow(_a_orig++, _b_orig++, &_r); \
268 KUNIT_EXPECT_EQ_MSG(test, _a_orig, _a_bump, \
269 "Unexpected check " #op " macro side-effect!\n"); \
270 KUNIT_EXPECT_EQ_MSG(test, _b_orig, _b_bump, \
271 "Unexpected check " #op " macro side-effect!\n"); \
273 _r = wrapping_ ## op(t, a, b); \
274 KUNIT_EXPECT_TRUE_MSG(test, _r == r, \
275 "expected wrap "fmt" "sym" "fmt" == "fmt", got "fmt" (type %s)\n", \
276 a, b, r, _r, #t); \
277 /* Check for internal macro side-effects. */ \
278 _a_orig = a; \
279 _b_orig = b; \
280 _r = wrapping_ ## op(t, _a_orig++, _b_orig++); \
281 KUNIT_EXPECT_EQ_MSG(test, _a_orig, _a_bump, \
282 "Unexpected wrap " #op " macro side-effect!\n"); \
283 KUNIT_EXPECT_EQ_MSG(test, _b_orig, _b_bump, \
284 "Unexpected wrap " #op " macro side-effect!\n"); \
285 } while (0)
287 static int global_counter;
288 static void bump_counter(void)
290 global_counter++;
293 static int get_index(void)
295 volatile int index = 0;
296 bump_counter();
297 return index;
300 #define check_self_op(fmt, op, sym, a, b) do { \
301 typeof(a + 0) _a = a; \
302 typeof(b + 0) _b = b; \
303 typeof(a + 0) _a_sym = a; \
304 typeof(a + 0) _a_orig[1] = { a }; \
305 typeof(b + 0) _b_orig = b; \
306 typeof(b + 0) _b_bump = b + 1; \
307 typeof(a + 0) _r; \
309 _a_sym sym _b; \
310 _r = wrapping_ ## op(_a, _b); \
311 KUNIT_EXPECT_TRUE_MSG(test, _r == _a_sym, \
312 "expected "fmt" "#op" "fmt" == "fmt", got "fmt"\n", \
313 a, b, _a_sym, _r); \
314 KUNIT_EXPECT_TRUE_MSG(test, _a == _a_sym, \
315 "expected "fmt" "#op" "fmt" == "fmt", got "fmt"\n", \
316 a, b, _a_sym, _a); \
317 /* Check for internal macro side-effects. */ \
318 global_counter = 0; \
319 wrapping_ ## op(_a_orig[get_index()], _b_orig++); \
320 KUNIT_EXPECT_EQ_MSG(test, global_counter, 1, \
321 "Unexpected wrapping_" #op " macro side-effect on arg1!\n"); \
322 KUNIT_EXPECT_EQ_MSG(test, _b_orig, _b_bump, \
323 "Unexpected wrapping_" #op " macro side-effect on arg2!\n"); \
324 } while (0)
326 #define DEFINE_TEST_FUNC_TYPED(n, t, fmt) \
327 static void do_test_ ## n(struct kunit *test, const struct test_ ## n *p) \
329 /* check_{add,sub,mul}_overflow() and wrapping_{add,sub,mul} */ \
330 check_one_op(t, fmt, add, "+", p->a, p->b, p->sum, p->s_of); \
331 check_one_op(t, fmt, add, "+", p->b, p->a, p->sum, p->s_of); \
332 check_one_op(t, fmt, sub, "-", p->a, p->b, p->diff, p->d_of); \
333 check_one_op(t, fmt, mul, "*", p->a, p->b, p->prod, p->p_of); \
334 check_one_op(t, fmt, mul, "*", p->b, p->a, p->prod, p->p_of); \
335 /* wrapping_assign_{add,sub}() */ \
336 check_self_op(fmt, assign_add, +=, p->a, p->b); \
337 check_self_op(fmt, assign_add, +=, p->b, p->a); \
338 check_self_op(fmt, assign_sub, -=, p->a, p->b); \
341 static void n ## _overflow_test(struct kunit *test) { \
342 unsigned i; \
344 SKIP_64_ON_32(__same_type(t, u64)); \
345 SKIP_64_ON_32(__same_type(t, s64)); \
346 SKIP_SIGN_MISMATCH(__same_type(n ## _tests[0].a, u32) && \
347 __same_type(n ## _tests[0].b, u32) && \
348 __same_type(n ## _tests[0].sum, int)); \
350 for (i = 0; i < ARRAY_SIZE(n ## _tests); ++i) \
351 do_test_ ## n(test, &n ## _tests[i]); \
352 kunit_info(test, "%zu %s arithmetic tests finished\n", \
353 ARRAY_SIZE(n ## _tests), #n); \
356 #define DEFINE_TEST_FUNC(t, fmt) \
357 DEFINE_TEST_FUNC_TYPED(t ## _ ## t ## __ ## t, t, fmt)
359 DEFINE_TEST_FUNC(u8, "%d");
360 DEFINE_TEST_FUNC(s8, "%d");
361 DEFINE_TEST_FUNC(u16, "%d");
362 DEFINE_TEST_FUNC(s16, "%d");
363 DEFINE_TEST_FUNC(u32, "%u");
364 DEFINE_TEST_FUNC(s32, "%d");
365 DEFINE_TEST_FUNC(u64, "%llu");
366 DEFINE_TEST_FUNC(s64, "%lld");
368 DEFINE_TEST_ARRAY_TYPED(u32, u32, u8) = {
369 {0, 0, 0, 0, 0, false, false, false},
370 {U8_MAX, 2, 1, U8_MAX - 2, U8_MAX - 1, true, false, true},
371 {U8_MAX + 1, 0, 0, 0, 0, true, true, false},
373 DEFINE_TEST_FUNC_TYPED(u32_u32__u8, u8, "%d");
375 DEFINE_TEST_ARRAY_TYPED(u32, u32, int) = {
376 {0, 0, 0, 0, 0, false, false, false},
377 {U32_MAX, 0, -1, -1, 0, true, true, false},
379 DEFINE_TEST_FUNC_TYPED(u32_u32__int, int, "%d");
381 DEFINE_TEST_ARRAY_TYPED(u8, u8, int) = {
382 {0, 0, 0, 0, 0, false, false, false},
383 {U8_MAX, U8_MAX, 2 * U8_MAX, 0, U8_MAX * U8_MAX, false, false, false},
384 {1, 2, 3, -1, 2, false, false, false},
386 DEFINE_TEST_FUNC_TYPED(u8_u8__int, int, "%d");
388 DEFINE_TEST_ARRAY_TYPED(int, int, u8) = {
389 {0, 0, 0, 0, 0, false, false, false},
390 {1, 2, 3, U8_MAX, 2, false, true, false},
391 {-1, 0, U8_MAX, U8_MAX, 0, true, true, false},
393 DEFINE_TEST_FUNC_TYPED(int_int__u8, u8, "%d");
395 /* Args are: value, shift, type, expected result, overflow expected */
396 #define TEST_ONE_SHIFT(a, s, t, expect, of) do { \
397 typeof(a) __a = (a); \
398 typeof(s) __s = (s); \
399 t __e = (expect); \
400 t __d; \
401 bool __of = check_shl_overflow(__a, __s, &__d); \
402 if (__of != of) { \
403 KUNIT_EXPECT_EQ_MSG(test, __of, of, \
404 "expected (%s)(%s << %s) to%s overflow\n", \
405 #t, #a, #s, of ? "" : " not"); \
406 } else if (!__of && __d != __e) { \
407 KUNIT_EXPECT_EQ_MSG(test, __d, __e, \
408 "expected (%s)(%s << %s) == %s\n", \
409 #t, #a, #s, #expect); \
410 if ((t)-1 < 0) \
411 kunit_info(test, "got %lld\n", (s64)__d); \
412 else \
413 kunit_info(test, "got %llu\n", (u64)__d); \
415 count++; \
416 } while (0)
418 static void shift_sane_test(struct kunit *test)
420 int count = 0;
422 /* Sane shifts. */
423 TEST_ONE_SHIFT(1, 0, u8, 1 << 0, false);
424 TEST_ONE_SHIFT(1, 4, u8, 1 << 4, false);
425 TEST_ONE_SHIFT(1, 7, u8, 1 << 7, false);
426 TEST_ONE_SHIFT(0xF, 4, u8, 0xF << 4, false);
427 TEST_ONE_SHIFT(1, 0, u16, 1 << 0, false);
428 TEST_ONE_SHIFT(1, 10, u16, 1 << 10, false);
429 TEST_ONE_SHIFT(1, 15, u16, 1 << 15, false);
430 TEST_ONE_SHIFT(0xFF, 8, u16, 0xFF << 8, false);
431 TEST_ONE_SHIFT(1, 0, int, 1 << 0, false);
432 TEST_ONE_SHIFT(1, 16, int, 1 << 16, false);
433 TEST_ONE_SHIFT(1, 30, int, 1 << 30, false);
434 TEST_ONE_SHIFT(1, 0, s32, 1 << 0, false);
435 TEST_ONE_SHIFT(1, 16, s32, 1 << 16, false);
436 TEST_ONE_SHIFT(1, 30, s32, 1 << 30, false);
437 TEST_ONE_SHIFT(1, 0, unsigned int, 1U << 0, false);
438 TEST_ONE_SHIFT(1, 20, unsigned int, 1U << 20, false);
439 TEST_ONE_SHIFT(1, 31, unsigned int, 1U << 31, false);
440 TEST_ONE_SHIFT(0xFFFFU, 16, unsigned int, 0xFFFFU << 16, false);
441 TEST_ONE_SHIFT(1, 0, u32, 1U << 0, false);
442 TEST_ONE_SHIFT(1, 20, u32, 1U << 20, false);
443 TEST_ONE_SHIFT(1, 31, u32, 1U << 31, false);
444 TEST_ONE_SHIFT(0xFFFFU, 16, u32, 0xFFFFU << 16, false);
445 TEST_ONE_SHIFT(1, 0, u64, 1ULL << 0, false);
446 TEST_ONE_SHIFT(1, 40, u64, 1ULL << 40, false);
447 TEST_ONE_SHIFT(1, 63, u64, 1ULL << 63, false);
448 TEST_ONE_SHIFT(0xFFFFFFFFULL, 32, u64, 0xFFFFFFFFULL << 32, false);
450 /* Sane shift: start and end with 0, without a too-wide shift. */
451 TEST_ONE_SHIFT(0, 7, u8, 0, false);
452 TEST_ONE_SHIFT(0, 15, u16, 0, false);
453 TEST_ONE_SHIFT(0, 31, unsigned int, 0, false);
454 TEST_ONE_SHIFT(0, 31, u32, 0, false);
455 TEST_ONE_SHIFT(0, 63, u64, 0, false);
457 /* Sane shift: start and end with 0, without reaching signed bit. */
458 TEST_ONE_SHIFT(0, 6, s8, 0, false);
459 TEST_ONE_SHIFT(0, 14, s16, 0, false);
460 TEST_ONE_SHIFT(0, 30, int, 0, false);
461 TEST_ONE_SHIFT(0, 30, s32, 0, false);
462 TEST_ONE_SHIFT(0, 62, s64, 0, false);
464 kunit_info(test, "%d sane shift tests finished\n", count);
467 static void shift_overflow_test(struct kunit *test)
469 int count = 0;
471 /* Overflow: shifted the bit off the end. */
472 TEST_ONE_SHIFT(1, 8, u8, 0, true);
473 TEST_ONE_SHIFT(1, 16, u16, 0, true);
474 TEST_ONE_SHIFT(1, 32, unsigned int, 0, true);
475 TEST_ONE_SHIFT(1, 32, u32, 0, true);
476 TEST_ONE_SHIFT(1, 64, u64, 0, true);
478 /* Overflow: shifted into the signed bit. */
479 TEST_ONE_SHIFT(1, 7, s8, 0, true);
480 TEST_ONE_SHIFT(1, 15, s16, 0, true);
481 TEST_ONE_SHIFT(1, 31, int, 0, true);
482 TEST_ONE_SHIFT(1, 31, s32, 0, true);
483 TEST_ONE_SHIFT(1, 63, s64, 0, true);
485 /* Overflow: high bit falls off unsigned types. */
486 /* 10010110 */
487 TEST_ONE_SHIFT(150, 1, u8, 0, true);
488 /* 1000100010010110 */
489 TEST_ONE_SHIFT(34966, 1, u16, 0, true);
490 /* 10000100000010001000100010010110 */
491 TEST_ONE_SHIFT(2215151766U, 1, u32, 0, true);
492 TEST_ONE_SHIFT(2215151766U, 1, unsigned int, 0, true);
493 /* 1000001000010000010000000100000010000100000010001000100010010110 */
494 TEST_ONE_SHIFT(9372061470395238550ULL, 1, u64, 0, true);
496 /* Overflow: bit shifted into signed bit on signed types. */
497 /* 01001011 */
498 TEST_ONE_SHIFT(75, 1, s8, 0, true);
499 /* 0100010001001011 */
500 TEST_ONE_SHIFT(17483, 1, s16, 0, true);
501 /* 01000010000001000100010001001011 */
502 TEST_ONE_SHIFT(1107575883, 1, s32, 0, true);
503 TEST_ONE_SHIFT(1107575883, 1, int, 0, true);
504 /* 0100000100001000001000000010000001000010000001000100010001001011 */
505 TEST_ONE_SHIFT(4686030735197619275LL, 1, s64, 0, true);
507 /* Overflow: bit shifted past signed bit on signed types. */
508 /* 01001011 */
509 TEST_ONE_SHIFT(75, 2, s8, 0, true);
510 /* 0100010001001011 */
511 TEST_ONE_SHIFT(17483, 2, s16, 0, true);
512 /* 01000010000001000100010001001011 */
513 TEST_ONE_SHIFT(1107575883, 2, s32, 0, true);
514 TEST_ONE_SHIFT(1107575883, 2, int, 0, true);
515 /* 0100000100001000001000000010000001000010000001000100010001001011 */
516 TEST_ONE_SHIFT(4686030735197619275LL, 2, s64, 0, true);
518 kunit_info(test, "%d overflow shift tests finished\n", count);
521 static void shift_truncate_test(struct kunit *test)
523 int count = 0;
525 /* Overflow: values larger than destination type. */
526 TEST_ONE_SHIFT(0x100, 0, u8, 0, true);
527 TEST_ONE_SHIFT(0xFF, 0, s8, 0, true);
528 TEST_ONE_SHIFT(0x10000U, 0, u16, 0, true);
529 TEST_ONE_SHIFT(0xFFFFU, 0, s16, 0, true);
530 TEST_ONE_SHIFT(0x100000000ULL, 0, u32, 0, true);
531 TEST_ONE_SHIFT(0x100000000ULL, 0, unsigned int, 0, true);
532 TEST_ONE_SHIFT(0xFFFFFFFFUL, 0, s32, 0, true);
533 TEST_ONE_SHIFT(0xFFFFFFFFUL, 0, int, 0, true);
534 TEST_ONE_SHIFT(0xFFFFFFFFFFFFFFFFULL, 0, s64, 0, true);
536 /* Overflow: shifted at or beyond entire type's bit width. */
537 TEST_ONE_SHIFT(0, 8, u8, 0, true);
538 TEST_ONE_SHIFT(0, 9, u8, 0, true);
539 TEST_ONE_SHIFT(0, 8, s8, 0, true);
540 TEST_ONE_SHIFT(0, 9, s8, 0, true);
541 TEST_ONE_SHIFT(0, 16, u16, 0, true);
542 TEST_ONE_SHIFT(0, 17, u16, 0, true);
543 TEST_ONE_SHIFT(0, 16, s16, 0, true);
544 TEST_ONE_SHIFT(0, 17, s16, 0, true);
545 TEST_ONE_SHIFT(0, 32, u32, 0, true);
546 TEST_ONE_SHIFT(0, 33, u32, 0, true);
547 TEST_ONE_SHIFT(0, 32, int, 0, true);
548 TEST_ONE_SHIFT(0, 33, int, 0, true);
549 TEST_ONE_SHIFT(0, 32, s32, 0, true);
550 TEST_ONE_SHIFT(0, 33, s32, 0, true);
551 TEST_ONE_SHIFT(0, 64, u64, 0, true);
552 TEST_ONE_SHIFT(0, 65, u64, 0, true);
553 TEST_ONE_SHIFT(0, 64, s64, 0, true);
554 TEST_ONE_SHIFT(0, 65, s64, 0, true);
556 kunit_info(test, "%d truncate shift tests finished\n", count);
559 static void shift_nonsense_test(struct kunit *test)
561 int count = 0;
563 /* Nonsense: negative initial value. */
564 TEST_ONE_SHIFT(-1, 0, s8, 0, true);
565 TEST_ONE_SHIFT(-1, 0, u8, 0, true);
566 TEST_ONE_SHIFT(-5, 0, s16, 0, true);
567 TEST_ONE_SHIFT(-5, 0, u16, 0, true);
568 TEST_ONE_SHIFT(-10, 0, int, 0, true);
569 TEST_ONE_SHIFT(-10, 0, unsigned int, 0, true);
570 TEST_ONE_SHIFT(-100, 0, s32, 0, true);
571 TEST_ONE_SHIFT(-100, 0, u32, 0, true);
572 TEST_ONE_SHIFT(-10000, 0, s64, 0, true);
573 TEST_ONE_SHIFT(-10000, 0, u64, 0, true);
575 /* Nonsense: negative shift values. */
576 TEST_ONE_SHIFT(0, -5, s8, 0, true);
577 TEST_ONE_SHIFT(0, -5, u8, 0, true);
578 TEST_ONE_SHIFT(0, -10, s16, 0, true);
579 TEST_ONE_SHIFT(0, -10, u16, 0, true);
580 TEST_ONE_SHIFT(0, -15, int, 0, true);
581 TEST_ONE_SHIFT(0, -15, unsigned int, 0, true);
582 TEST_ONE_SHIFT(0, -20, s32, 0, true);
583 TEST_ONE_SHIFT(0, -20, u32, 0, true);
584 TEST_ONE_SHIFT(0, -30, s64, 0, true);
585 TEST_ONE_SHIFT(0, -30, u64, 0, true);
588 * Corner case: for unsigned types, we fail when we've shifted
589 * through the entire width of bits. For signed types, we might
590 * want to match this behavior, but that would mean noticing if
591 * we shift through all but the signed bit, and this is not
592 * currently detected (but we'll notice an overflow into the
593 * signed bit). So, for now, we will test this condition but
594 * mark it as not expected to overflow.
596 TEST_ONE_SHIFT(0, 7, s8, 0, false);
597 TEST_ONE_SHIFT(0, 15, s16, 0, false);
598 TEST_ONE_SHIFT(0, 31, int, 0, false);
599 TEST_ONE_SHIFT(0, 31, s32, 0, false);
600 TEST_ONE_SHIFT(0, 63, s64, 0, false);
602 kunit_info(test, "%d nonsense shift tests finished\n", count);
604 #undef TEST_ONE_SHIFT
607 * Deal with the various forms of allocator arguments. See comments above
608 * the DEFINE_TEST_ALLOC() instances for mapping of the "bits".
610 #define alloc_GFP (GFP_KERNEL | __GFP_NOWARN)
611 #define alloc010(alloc, arg, sz) alloc(sz, alloc_GFP)
612 #define alloc011(alloc, arg, sz) alloc(sz, alloc_GFP, NUMA_NO_NODE)
613 #define alloc000(alloc, arg, sz) alloc(sz)
614 #define alloc001(alloc, arg, sz) alloc(sz, NUMA_NO_NODE)
615 #define alloc110(alloc, arg, sz) alloc(arg, sz, alloc_GFP)
616 #define free0(free, arg, ptr) free(ptr)
617 #define free1(free, arg, ptr) free(arg, ptr)
619 /* Wrap around to 16K */
620 #define TEST_SIZE (5 * 4096)
622 #define DEFINE_TEST_ALLOC(func, free_func, want_arg, want_gfp, want_node)\
623 static void test_ ## func (struct kunit *test, void *arg) \
625 volatile size_t a = TEST_SIZE; \
626 volatile size_t b = (SIZE_MAX / TEST_SIZE) + 1; \
627 void *ptr; \
629 /* Tiny allocation test. */ \
630 ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, 1);\
631 KUNIT_ASSERT_NOT_ERR_OR_NULL_MSG(test, ptr, \
632 #func " failed regular allocation?!\n"); \
633 free ## want_arg (free_func, arg, ptr); \
635 /* Wrapped allocation test. */ \
636 ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, \
637 a * b); \
638 KUNIT_ASSERT_NOT_ERR_OR_NULL_MSG(test, ptr, \
639 #func " unexpectedly failed bad wrapping?!\n"); \
640 free ## want_arg (free_func, arg, ptr); \
642 /* Saturated allocation test. */ \
643 ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, \
644 array_size(a, b)); \
645 if (ptr) { \
646 KUNIT_FAIL(test, #func " missed saturation!\n"); \
647 free ## want_arg (free_func, arg, ptr); \
652 * Allocator uses a trailing node argument --------+ (e.g. kmalloc_node())
653 * Allocator uses the gfp_t argument -----------+ | (e.g. kmalloc())
654 * Allocator uses a special leading argument + | | (e.g. devm_kmalloc())
655 * | | |
657 DEFINE_TEST_ALLOC(kmalloc, kfree, 0, 1, 0);
658 DEFINE_TEST_ALLOC(kmalloc_node, kfree, 0, 1, 1);
659 DEFINE_TEST_ALLOC(kzalloc, kfree, 0, 1, 0);
660 DEFINE_TEST_ALLOC(kzalloc_node, kfree, 0, 1, 1);
661 DEFINE_TEST_ALLOC(__vmalloc, vfree, 0, 1, 0);
662 DEFINE_TEST_ALLOC(kvmalloc, kvfree, 0, 1, 0);
663 DEFINE_TEST_ALLOC(kvmalloc_node, kvfree, 0, 1, 1);
664 DEFINE_TEST_ALLOC(kvzalloc, kvfree, 0, 1, 0);
665 DEFINE_TEST_ALLOC(kvzalloc_node, kvfree, 0, 1, 1);
666 DEFINE_TEST_ALLOC(devm_kmalloc, devm_kfree, 1, 1, 0);
667 DEFINE_TEST_ALLOC(devm_kzalloc, devm_kfree, 1, 1, 0);
669 static void overflow_allocation_test(struct kunit *test)
671 struct device *dev;
672 int count = 0;
674 #define check_allocation_overflow(alloc) do { \
675 count++; \
676 test_ ## alloc(test, dev); \
677 } while (0)
679 /* Create dummy device for devm_kmalloc()-family tests. */
680 dev = kunit_device_register(test, "overflow-test");
681 KUNIT_ASSERT_FALSE_MSG(test, IS_ERR(dev),
682 "Cannot register test device\n");
684 check_allocation_overflow(kmalloc);
685 check_allocation_overflow(kmalloc_node);
686 check_allocation_overflow(kzalloc);
687 check_allocation_overflow(kzalloc_node);
688 check_allocation_overflow(__vmalloc);
689 check_allocation_overflow(kvmalloc);
690 check_allocation_overflow(kvmalloc_node);
691 check_allocation_overflow(kvzalloc);
692 check_allocation_overflow(kvzalloc_node);
693 check_allocation_overflow(devm_kmalloc);
694 check_allocation_overflow(devm_kzalloc);
696 kunit_info(test, "%d allocation overflow tests finished\n", count);
697 #undef check_allocation_overflow
700 struct __test_flex_array {
701 unsigned long flags;
702 size_t count;
703 unsigned long data[];
706 static void overflow_size_helpers_test(struct kunit *test)
708 /* Make sure struct_size() can be used in a constant expression. */
709 u8 ce_array[struct_size_t(struct __test_flex_array, data, 55)];
710 struct __test_flex_array *obj;
711 int count = 0;
712 int var;
713 volatile int unconst = 0;
715 /* Verify constant expression against runtime version. */
716 var = 55;
717 OPTIMIZER_HIDE_VAR(var);
718 KUNIT_EXPECT_EQ(test, sizeof(ce_array), struct_size(obj, data, var));
720 #define check_one_size_helper(expected, func, args...) do { \
721 size_t _r = func(args); \
722 KUNIT_EXPECT_EQ_MSG(test, _r, expected, \
723 "expected " #func "(" #args ") to return %zu but got %zu instead\n", \
724 (size_t)(expected), _r); \
725 count++; \
726 } while (0)
728 var = 4;
729 check_one_size_helper(20, size_mul, var++, 5);
730 check_one_size_helper(20, size_mul, 4, var++);
731 check_one_size_helper(0, size_mul, 0, 3);
732 check_one_size_helper(0, size_mul, 3, 0);
733 check_one_size_helper(6, size_mul, 2, 3);
734 check_one_size_helper(SIZE_MAX, size_mul, SIZE_MAX, 1);
735 check_one_size_helper(SIZE_MAX, size_mul, SIZE_MAX, 3);
736 check_one_size_helper(SIZE_MAX, size_mul, SIZE_MAX, -3);
738 var = 4;
739 check_one_size_helper(9, size_add, var++, 5);
740 check_one_size_helper(9, size_add, 4, var++);
741 check_one_size_helper(9, size_add, 9, 0);
742 check_one_size_helper(9, size_add, 0, 9);
743 check_one_size_helper(5, size_add, 2, 3);
744 check_one_size_helper(SIZE_MAX, size_add, SIZE_MAX, 1);
745 check_one_size_helper(SIZE_MAX, size_add, SIZE_MAX, 3);
746 check_one_size_helper(SIZE_MAX, size_add, SIZE_MAX, -3);
748 var = 4;
749 check_one_size_helper(1, size_sub, var--, 3);
750 check_one_size_helper(1, size_sub, 4, var--);
751 check_one_size_helper(1, size_sub, 3, 2);
752 check_one_size_helper(9, size_sub, 9, 0);
753 check_one_size_helper(SIZE_MAX, size_sub, 9, -3);
754 check_one_size_helper(SIZE_MAX, size_sub, 0, 9);
755 check_one_size_helper(SIZE_MAX, size_sub, 2, 3);
756 check_one_size_helper(SIZE_MAX, size_sub, SIZE_MAX, 0);
757 check_one_size_helper(SIZE_MAX, size_sub, SIZE_MAX, 10);
758 check_one_size_helper(SIZE_MAX, size_sub, 0, SIZE_MAX);
759 check_one_size_helper(SIZE_MAX, size_sub, 14, SIZE_MAX);
760 check_one_size_helper(SIZE_MAX - 2, size_sub, SIZE_MAX - 1, 1);
761 check_one_size_helper(SIZE_MAX - 4, size_sub, SIZE_MAX - 1, 3);
762 check_one_size_helper(1, size_sub, SIZE_MAX - 1, -3);
764 var = 4;
765 check_one_size_helper(4 * sizeof(*obj->data),
766 flex_array_size, obj, data, var++);
767 check_one_size_helper(5 * sizeof(*obj->data),
768 flex_array_size, obj, data, var++);
769 check_one_size_helper(0, flex_array_size, obj, data, 0 + unconst);
770 check_one_size_helper(sizeof(*obj->data),
771 flex_array_size, obj, data, 1 + unconst);
772 check_one_size_helper(7 * sizeof(*obj->data),
773 flex_array_size, obj, data, 7 + unconst);
774 check_one_size_helper(SIZE_MAX,
775 flex_array_size, obj, data, -1 + unconst);
776 check_one_size_helper(SIZE_MAX,
777 flex_array_size, obj, data, SIZE_MAX - 4 + unconst);
779 var = 4;
780 check_one_size_helper(sizeof(*obj) + (4 * sizeof(*obj->data)),
781 struct_size, obj, data, var++);
782 check_one_size_helper(sizeof(*obj) + (5 * sizeof(*obj->data)),
783 struct_size, obj, data, var++);
784 check_one_size_helper(sizeof(*obj), struct_size, obj, data, 0 + unconst);
785 check_one_size_helper(sizeof(*obj) + sizeof(*obj->data),
786 struct_size, obj, data, 1 + unconst);
787 check_one_size_helper(SIZE_MAX,
788 struct_size, obj, data, -3 + unconst);
789 check_one_size_helper(SIZE_MAX,
790 struct_size, obj, data, SIZE_MAX - 3 + unconst);
792 kunit_info(test, "%d overflow size helper tests finished\n", count);
793 #undef check_one_size_helper
796 static void overflows_type_test(struct kunit *test)
798 int count = 0;
799 unsigned int var;
801 #define __TEST_OVERFLOWS_TYPE(func, arg1, arg2, of) do { \
802 bool __of = func(arg1, arg2); \
803 KUNIT_EXPECT_EQ_MSG(test, __of, of, \
804 "expected " #func "(" #arg1 ", " #arg2 " to%s overflow\n",\
805 of ? "" : " not"); \
806 count++; \
807 } while (0)
809 /* Args are: first type, second type, value, overflow expected */
810 #define TEST_OVERFLOWS_TYPE(__t1, __t2, v, of) do { \
811 __t1 t1 = (v); \
812 __t2 t2; \
813 __TEST_OVERFLOWS_TYPE(__overflows_type, t1, t2, of); \
814 __TEST_OVERFLOWS_TYPE(__overflows_type, t1, __t2, of); \
815 __TEST_OVERFLOWS_TYPE(__overflows_type_constexpr, t1, t2, of); \
816 __TEST_OVERFLOWS_TYPE(__overflows_type_constexpr, t1, __t2, of);\
817 } while (0)
819 TEST_OVERFLOWS_TYPE(u8, u8, U8_MAX, false);
820 TEST_OVERFLOWS_TYPE(u8, u16, U8_MAX, false);
821 TEST_OVERFLOWS_TYPE(u8, s8, U8_MAX, true);
822 TEST_OVERFLOWS_TYPE(u8, s8, S8_MAX, false);
823 TEST_OVERFLOWS_TYPE(u8, s8, (u8)S8_MAX + 1, true);
824 TEST_OVERFLOWS_TYPE(u8, s16, U8_MAX, false);
825 TEST_OVERFLOWS_TYPE(s8, u8, S8_MAX, false);
826 TEST_OVERFLOWS_TYPE(s8, u8, -1, true);
827 TEST_OVERFLOWS_TYPE(s8, u8, S8_MIN, true);
828 TEST_OVERFLOWS_TYPE(s8, u16, S8_MAX, false);
829 TEST_OVERFLOWS_TYPE(s8, u16, -1, true);
830 TEST_OVERFLOWS_TYPE(s8, u16, S8_MIN, true);
831 TEST_OVERFLOWS_TYPE(s8, u32, S8_MAX, false);
832 TEST_OVERFLOWS_TYPE(s8, u32, -1, true);
833 TEST_OVERFLOWS_TYPE(s8, u32, S8_MIN, true);
834 #if BITS_PER_LONG == 64
835 TEST_OVERFLOWS_TYPE(s8, u64, S8_MAX, false);
836 TEST_OVERFLOWS_TYPE(s8, u64, -1, true);
837 TEST_OVERFLOWS_TYPE(s8, u64, S8_MIN, true);
838 #endif
839 TEST_OVERFLOWS_TYPE(s8, s8, S8_MAX, false);
840 TEST_OVERFLOWS_TYPE(s8, s8, S8_MIN, false);
841 TEST_OVERFLOWS_TYPE(s8, s16, S8_MAX, false);
842 TEST_OVERFLOWS_TYPE(s8, s16, S8_MIN, false);
843 TEST_OVERFLOWS_TYPE(u16, u8, U8_MAX, false);
844 TEST_OVERFLOWS_TYPE(u16, u8, (u16)U8_MAX + 1, true);
845 TEST_OVERFLOWS_TYPE(u16, u8, U16_MAX, true);
846 TEST_OVERFLOWS_TYPE(u16, s8, S8_MAX, false);
847 TEST_OVERFLOWS_TYPE(u16, s8, (u16)S8_MAX + 1, true);
848 TEST_OVERFLOWS_TYPE(u16, s8, U16_MAX, true);
849 TEST_OVERFLOWS_TYPE(u16, s16, S16_MAX, false);
850 TEST_OVERFLOWS_TYPE(u16, s16, (u16)S16_MAX + 1, true);
851 TEST_OVERFLOWS_TYPE(u16, s16, U16_MAX, true);
852 TEST_OVERFLOWS_TYPE(u16, u32, U16_MAX, false);
853 TEST_OVERFLOWS_TYPE(u16, s32, U16_MAX, false);
854 TEST_OVERFLOWS_TYPE(s16, u8, U8_MAX, false);
855 TEST_OVERFLOWS_TYPE(s16, u8, (s16)U8_MAX + 1, true);
856 TEST_OVERFLOWS_TYPE(s16, u8, -1, true);
857 TEST_OVERFLOWS_TYPE(s16, u8, S16_MIN, true);
858 TEST_OVERFLOWS_TYPE(s16, u16, S16_MAX, false);
859 TEST_OVERFLOWS_TYPE(s16, u16, -1, true);
860 TEST_OVERFLOWS_TYPE(s16, u16, S16_MIN, true);
861 TEST_OVERFLOWS_TYPE(s16, u32, S16_MAX, false);
862 TEST_OVERFLOWS_TYPE(s16, u32, -1, true);
863 TEST_OVERFLOWS_TYPE(s16, u32, S16_MIN, true);
864 #if BITS_PER_LONG == 64
865 TEST_OVERFLOWS_TYPE(s16, u64, S16_MAX, false);
866 TEST_OVERFLOWS_TYPE(s16, u64, -1, true);
867 TEST_OVERFLOWS_TYPE(s16, u64, S16_MIN, true);
868 #endif
869 TEST_OVERFLOWS_TYPE(s16, s8, S8_MAX, false);
870 TEST_OVERFLOWS_TYPE(s16, s8, S8_MIN, false);
871 TEST_OVERFLOWS_TYPE(s16, s8, (s16)S8_MAX + 1, true);
872 TEST_OVERFLOWS_TYPE(s16, s8, (s16)S8_MIN - 1, true);
873 TEST_OVERFLOWS_TYPE(s16, s8, S16_MAX, true);
874 TEST_OVERFLOWS_TYPE(s16, s8, S16_MIN, true);
875 TEST_OVERFLOWS_TYPE(s16, s16, S16_MAX, false);
876 TEST_OVERFLOWS_TYPE(s16, s16, S16_MIN, false);
877 TEST_OVERFLOWS_TYPE(s16, s32, S16_MAX, false);
878 TEST_OVERFLOWS_TYPE(s16, s32, S16_MIN, false);
879 TEST_OVERFLOWS_TYPE(u32, u8, U8_MAX, false);
880 TEST_OVERFLOWS_TYPE(u32, u8, (u32)U8_MAX + 1, true);
881 TEST_OVERFLOWS_TYPE(u32, u8, U32_MAX, true);
882 TEST_OVERFLOWS_TYPE(u32, s8, S8_MAX, false);
883 TEST_OVERFLOWS_TYPE(u32, s8, (u32)S8_MAX + 1, true);
884 TEST_OVERFLOWS_TYPE(u32, s8, U32_MAX, true);
885 TEST_OVERFLOWS_TYPE(u32, u16, U16_MAX, false);
886 TEST_OVERFLOWS_TYPE(u32, u16, U16_MAX + 1, true);
887 TEST_OVERFLOWS_TYPE(u32, u16, U32_MAX, true);
888 TEST_OVERFLOWS_TYPE(u32, s16, S16_MAX, false);
889 TEST_OVERFLOWS_TYPE(u32, s16, (u32)S16_MAX + 1, true);
890 TEST_OVERFLOWS_TYPE(u32, s16, U32_MAX, true);
891 TEST_OVERFLOWS_TYPE(u32, u32, U32_MAX, false);
892 TEST_OVERFLOWS_TYPE(u32, s32, S32_MAX, false);
893 TEST_OVERFLOWS_TYPE(u32, s32, U32_MAX, true);
894 TEST_OVERFLOWS_TYPE(u32, s32, (u32)S32_MAX + 1, true);
895 #if BITS_PER_LONG == 64
896 TEST_OVERFLOWS_TYPE(u32, u64, U32_MAX, false);
897 TEST_OVERFLOWS_TYPE(u32, s64, U32_MAX, false);
898 #endif
899 TEST_OVERFLOWS_TYPE(s32, u8, U8_MAX, false);
900 TEST_OVERFLOWS_TYPE(s32, u8, (s32)U8_MAX + 1, true);
901 TEST_OVERFLOWS_TYPE(s32, u16, S32_MAX, true);
902 TEST_OVERFLOWS_TYPE(s32, u8, -1, true);
903 TEST_OVERFLOWS_TYPE(s32, u8, S32_MIN, true);
904 TEST_OVERFLOWS_TYPE(s32, u16, U16_MAX, false);
905 TEST_OVERFLOWS_TYPE(s32, u16, (s32)U16_MAX + 1, true);
906 TEST_OVERFLOWS_TYPE(s32, u16, S32_MAX, true);
907 TEST_OVERFLOWS_TYPE(s32, u16, -1, true);
908 TEST_OVERFLOWS_TYPE(s32, u16, S32_MIN, true);
909 TEST_OVERFLOWS_TYPE(s32, u32, S32_MAX, false);
910 TEST_OVERFLOWS_TYPE(s32, u32, -1, true);
911 TEST_OVERFLOWS_TYPE(s32, u32, S32_MIN, true);
912 #if BITS_PER_LONG == 64
913 TEST_OVERFLOWS_TYPE(s32, u64, S32_MAX, false);
914 TEST_OVERFLOWS_TYPE(s32, u64, -1, true);
915 TEST_OVERFLOWS_TYPE(s32, u64, S32_MIN, true);
916 #endif
917 TEST_OVERFLOWS_TYPE(s32, s8, S8_MAX, false);
918 TEST_OVERFLOWS_TYPE(s32, s8, S8_MIN, false);
919 TEST_OVERFLOWS_TYPE(s32, s8, (s32)S8_MAX + 1, true);
920 TEST_OVERFLOWS_TYPE(s32, s8, (s32)S8_MIN - 1, true);
921 TEST_OVERFLOWS_TYPE(s32, s8, S32_MAX, true);
922 TEST_OVERFLOWS_TYPE(s32, s8, S32_MIN, true);
923 TEST_OVERFLOWS_TYPE(s32, s16, S16_MAX, false);
924 TEST_OVERFLOWS_TYPE(s32, s16, S16_MIN, false);
925 TEST_OVERFLOWS_TYPE(s32, s16, (s32)S16_MAX + 1, true);
926 TEST_OVERFLOWS_TYPE(s32, s16, (s32)S16_MIN - 1, true);
927 TEST_OVERFLOWS_TYPE(s32, s16, S32_MAX, true);
928 TEST_OVERFLOWS_TYPE(s32, s16, S32_MIN, true);
929 TEST_OVERFLOWS_TYPE(s32, s32, S32_MAX, false);
930 TEST_OVERFLOWS_TYPE(s32, s32, S32_MIN, false);
931 #if BITS_PER_LONG == 64
932 TEST_OVERFLOWS_TYPE(s32, s64, S32_MAX, false);
933 TEST_OVERFLOWS_TYPE(s32, s64, S32_MIN, false);
934 TEST_OVERFLOWS_TYPE(u64, u8, U64_MAX, true);
935 TEST_OVERFLOWS_TYPE(u64, u8, U8_MAX, false);
936 TEST_OVERFLOWS_TYPE(u64, u8, (u64)U8_MAX + 1, true);
937 TEST_OVERFLOWS_TYPE(u64, u16, U64_MAX, true);
938 TEST_OVERFLOWS_TYPE(u64, u16, U16_MAX, false);
939 TEST_OVERFLOWS_TYPE(u64, u16, (u64)U16_MAX + 1, true);
940 TEST_OVERFLOWS_TYPE(u64, u32, U64_MAX, true);
941 TEST_OVERFLOWS_TYPE(u64, u32, U32_MAX, false);
942 TEST_OVERFLOWS_TYPE(u64, u32, (u64)U32_MAX + 1, true);
943 TEST_OVERFLOWS_TYPE(u64, u64, U64_MAX, false);
944 TEST_OVERFLOWS_TYPE(u64, s8, S8_MAX, false);
945 TEST_OVERFLOWS_TYPE(u64, s8, (u64)S8_MAX + 1, true);
946 TEST_OVERFLOWS_TYPE(u64, s8, U64_MAX, true);
947 TEST_OVERFLOWS_TYPE(u64, s16, S16_MAX, false);
948 TEST_OVERFLOWS_TYPE(u64, s16, (u64)S16_MAX + 1, true);
949 TEST_OVERFLOWS_TYPE(u64, s16, U64_MAX, true);
950 TEST_OVERFLOWS_TYPE(u64, s32, S32_MAX, false);
951 TEST_OVERFLOWS_TYPE(u64, s32, (u64)S32_MAX + 1, true);
952 TEST_OVERFLOWS_TYPE(u64, s32, U64_MAX, true);
953 TEST_OVERFLOWS_TYPE(u64, s64, S64_MAX, false);
954 TEST_OVERFLOWS_TYPE(u64, s64, U64_MAX, true);
955 TEST_OVERFLOWS_TYPE(u64, s64, (u64)S64_MAX + 1, true);
956 TEST_OVERFLOWS_TYPE(s64, u8, S64_MAX, true);
957 TEST_OVERFLOWS_TYPE(s64, u8, S64_MIN, true);
958 TEST_OVERFLOWS_TYPE(s64, u8, -1, true);
959 TEST_OVERFLOWS_TYPE(s64, u8, U8_MAX, false);
960 TEST_OVERFLOWS_TYPE(s64, u8, (s64)U8_MAX + 1, true);
961 TEST_OVERFLOWS_TYPE(s64, u16, S64_MAX, true);
962 TEST_OVERFLOWS_TYPE(s64, u16, S64_MIN, true);
963 TEST_OVERFLOWS_TYPE(s64, u16, -1, true);
964 TEST_OVERFLOWS_TYPE(s64, u16, U16_MAX, false);
965 TEST_OVERFLOWS_TYPE(s64, u16, (s64)U16_MAX + 1, true);
966 TEST_OVERFLOWS_TYPE(s64, u32, S64_MAX, true);
967 TEST_OVERFLOWS_TYPE(s64, u32, S64_MIN, true);
968 TEST_OVERFLOWS_TYPE(s64, u32, -1, true);
969 TEST_OVERFLOWS_TYPE(s64, u32, U32_MAX, false);
970 TEST_OVERFLOWS_TYPE(s64, u32, (s64)U32_MAX + 1, true);
971 TEST_OVERFLOWS_TYPE(s64, u64, S64_MAX, false);
972 TEST_OVERFLOWS_TYPE(s64, u64, S64_MIN, true);
973 TEST_OVERFLOWS_TYPE(s64, u64, -1, true);
974 TEST_OVERFLOWS_TYPE(s64, s8, S8_MAX, false);
975 TEST_OVERFLOWS_TYPE(s64, s8, S8_MIN, false);
976 TEST_OVERFLOWS_TYPE(s64, s8, (s64)S8_MAX + 1, true);
977 TEST_OVERFLOWS_TYPE(s64, s8, (s64)S8_MIN - 1, true);
978 TEST_OVERFLOWS_TYPE(s64, s8, S64_MAX, true);
979 TEST_OVERFLOWS_TYPE(s64, s16, S16_MAX, false);
980 TEST_OVERFLOWS_TYPE(s64, s16, S16_MIN, false);
981 TEST_OVERFLOWS_TYPE(s64, s16, (s64)S16_MAX + 1, true);
982 TEST_OVERFLOWS_TYPE(s64, s16, (s64)S16_MIN - 1, true);
983 TEST_OVERFLOWS_TYPE(s64, s16, S64_MAX, true);
984 TEST_OVERFLOWS_TYPE(s64, s32, S32_MAX, false);
985 TEST_OVERFLOWS_TYPE(s64, s32, S32_MIN, false);
986 TEST_OVERFLOWS_TYPE(s64, s32, (s64)S32_MAX + 1, true);
987 TEST_OVERFLOWS_TYPE(s64, s32, (s64)S32_MIN - 1, true);
988 TEST_OVERFLOWS_TYPE(s64, s32, S64_MAX, true);
989 TEST_OVERFLOWS_TYPE(s64, s64, S64_MAX, false);
990 TEST_OVERFLOWS_TYPE(s64, s64, S64_MIN, false);
991 #endif
993 /* Check for macro side-effects. */
994 var = INT_MAX - 1;
995 __TEST_OVERFLOWS_TYPE(__overflows_type, var++, int, false);
996 __TEST_OVERFLOWS_TYPE(__overflows_type, var++, int, false);
997 __TEST_OVERFLOWS_TYPE(__overflows_type, var++, int, true);
998 var = INT_MAX - 1;
999 __TEST_OVERFLOWS_TYPE(overflows_type, var++, int, false);
1000 __TEST_OVERFLOWS_TYPE(overflows_type, var++, int, false);
1001 __TEST_OVERFLOWS_TYPE(overflows_type, var++, int, true);
1003 kunit_info(test, "%d overflows_type() tests finished\n", count);
1004 #undef TEST_OVERFLOWS_TYPE
1005 #undef __TEST_OVERFLOWS_TYPE
1008 static void same_type_test(struct kunit *test)
1010 int count = 0;
1011 int var;
1013 #define TEST_SAME_TYPE(t1, t2, same) do { \
1014 typeof(t1) __t1h = type_max(t1); \
1015 typeof(t1) __t1l = type_min(t1); \
1016 typeof(t2) __t2h = type_max(t2); \
1017 typeof(t2) __t2l = type_min(t2); \
1018 KUNIT_EXPECT_EQ(test, true, __same_type(t1, __t1h)); \
1019 KUNIT_EXPECT_EQ(test, true, __same_type(t1, __t1l)); \
1020 KUNIT_EXPECT_EQ(test, true, __same_type(__t1h, t1)); \
1021 KUNIT_EXPECT_EQ(test, true, __same_type(__t1l, t1)); \
1022 KUNIT_EXPECT_EQ(test, true, __same_type(t2, __t2h)); \
1023 KUNIT_EXPECT_EQ(test, true, __same_type(t2, __t2l)); \
1024 KUNIT_EXPECT_EQ(test, true, __same_type(__t2h, t2)); \
1025 KUNIT_EXPECT_EQ(test, true, __same_type(__t2l, t2)); \
1026 KUNIT_EXPECT_EQ(test, same, __same_type(t1, t2)); \
1027 KUNIT_EXPECT_EQ(test, same, __same_type(t2, __t1h)); \
1028 KUNIT_EXPECT_EQ(test, same, __same_type(t2, __t1l)); \
1029 KUNIT_EXPECT_EQ(test, same, __same_type(__t1h, t2)); \
1030 KUNIT_EXPECT_EQ(test, same, __same_type(__t1l, t2)); \
1031 KUNIT_EXPECT_EQ(test, same, __same_type(t1, __t2h)); \
1032 KUNIT_EXPECT_EQ(test, same, __same_type(t1, __t2l)); \
1033 KUNIT_EXPECT_EQ(test, same, __same_type(__t2h, t1)); \
1034 KUNIT_EXPECT_EQ(test, same, __same_type(__t2l, t1)); \
1035 } while (0)
1037 #if BITS_PER_LONG == 64
1038 # define TEST_SAME_TYPE64(base, t, m) TEST_SAME_TYPE(base, t, m)
1039 #else
1040 # define TEST_SAME_TYPE64(base, t, m) do { } while (0)
1041 #endif
1043 #define TEST_TYPE_SETS(base, mu8, mu16, mu32, ms8, ms16, ms32, mu64, ms64) \
1044 do { \
1045 TEST_SAME_TYPE(base, u8, mu8); \
1046 TEST_SAME_TYPE(base, u16, mu16); \
1047 TEST_SAME_TYPE(base, u32, mu32); \
1048 TEST_SAME_TYPE(base, s8, ms8); \
1049 TEST_SAME_TYPE(base, s16, ms16); \
1050 TEST_SAME_TYPE(base, s32, ms32); \
1051 TEST_SAME_TYPE64(base, u64, mu64); \
1052 TEST_SAME_TYPE64(base, s64, ms64); \
1053 } while (0)
1055 TEST_TYPE_SETS(u8, true, false, false, false, false, false, false, false);
1056 TEST_TYPE_SETS(u16, false, true, false, false, false, false, false, false);
1057 TEST_TYPE_SETS(u32, false, false, true, false, false, false, false, false);
1058 TEST_TYPE_SETS(s8, false, false, false, true, false, false, false, false);
1059 TEST_TYPE_SETS(s16, false, false, false, false, true, false, false, false);
1060 TEST_TYPE_SETS(s32, false, false, false, false, false, true, false, false);
1061 #if BITS_PER_LONG == 64
1062 TEST_TYPE_SETS(u64, false, false, false, false, false, false, true, false);
1063 TEST_TYPE_SETS(s64, false, false, false, false, false, false, false, true);
1064 #endif
1066 /* Check for macro side-effects. */
1067 var = 4;
1068 KUNIT_EXPECT_EQ(test, var, 4);
1069 KUNIT_EXPECT_TRUE(test, __same_type(var++, int));
1070 KUNIT_EXPECT_EQ(test, var, 4);
1071 KUNIT_EXPECT_TRUE(test, __same_type(int, var++));
1072 KUNIT_EXPECT_EQ(test, var, 4);
1073 KUNIT_EXPECT_TRUE(test, __same_type(var++, var++));
1074 KUNIT_EXPECT_EQ(test, var, 4);
1076 kunit_info(test, "%d __same_type() tests finished\n", count);
1078 #undef TEST_TYPE_SETS
1079 #undef TEST_SAME_TYPE64
1080 #undef TEST_SAME_TYPE
1083 static void castable_to_type_test(struct kunit *test)
1085 int count = 0;
1087 #define TEST_CASTABLE_TO_TYPE(arg1, arg2, pass) do { \
1088 bool __pass = castable_to_type(arg1, arg2); \
1089 KUNIT_EXPECT_EQ_MSG(test, __pass, pass, \
1090 "expected castable_to_type(" #arg1 ", " #arg2 ") to%s pass\n",\
1091 pass ? "" : " not"); \
1092 count++; \
1093 } while (0)
1095 TEST_CASTABLE_TO_TYPE(16, u8, true);
1096 TEST_CASTABLE_TO_TYPE(16, u16, true);
1097 TEST_CASTABLE_TO_TYPE(16, u32, true);
1098 TEST_CASTABLE_TO_TYPE(16, s8, true);
1099 TEST_CASTABLE_TO_TYPE(16, s16, true);
1100 TEST_CASTABLE_TO_TYPE(16, s32, true);
1101 TEST_CASTABLE_TO_TYPE(-16, s8, true);
1102 TEST_CASTABLE_TO_TYPE(-16, s16, true);
1103 TEST_CASTABLE_TO_TYPE(-16, s32, true);
1104 #if BITS_PER_LONG == 64
1105 TEST_CASTABLE_TO_TYPE(16, u64, true);
1106 TEST_CASTABLE_TO_TYPE(-16, s64, true);
1107 #endif
1109 #define TEST_CASTABLE_TO_TYPE_VAR(width) do { \
1110 u ## width u ## width ## var = 0; \
1111 s ## width s ## width ## var = 0; \
1113 /* Constant expressions that fit types. */ \
1114 TEST_CASTABLE_TO_TYPE(type_max(u ## width), u ## width, true); \
1115 TEST_CASTABLE_TO_TYPE(type_min(u ## width), u ## width, true); \
1116 TEST_CASTABLE_TO_TYPE(type_max(u ## width), u ## width ## var, true); \
1117 TEST_CASTABLE_TO_TYPE(type_min(u ## width), u ## width ## var, true); \
1118 TEST_CASTABLE_TO_TYPE(type_max(s ## width), s ## width, true); \
1119 TEST_CASTABLE_TO_TYPE(type_min(s ## width), s ## width, true); \
1120 TEST_CASTABLE_TO_TYPE(type_max(s ## width), s ## width ## var, true); \
1121 TEST_CASTABLE_TO_TYPE(type_min(u ## width), s ## width ## var, true); \
1122 /* Constant expressions that do not fit types. */ \
1123 TEST_CASTABLE_TO_TYPE(type_max(u ## width), s ## width, false); \
1124 TEST_CASTABLE_TO_TYPE(type_max(u ## width), s ## width ## var, false); \
1125 TEST_CASTABLE_TO_TYPE(type_min(s ## width), u ## width, false); \
1126 TEST_CASTABLE_TO_TYPE(type_min(s ## width), u ## width ## var, false); \
1127 /* Non-constant expression with mismatched type. */ \
1128 TEST_CASTABLE_TO_TYPE(s ## width ## var, u ## width, false); \
1129 TEST_CASTABLE_TO_TYPE(u ## width ## var, s ## width, false); \
1130 } while (0)
1132 #define TEST_CASTABLE_TO_TYPE_RANGE(width) do { \
1133 unsigned long big = U ## width ## _MAX; \
1134 signed long small = S ## width ## _MIN; \
1135 u ## width u ## width ## var = 0; \
1136 s ## width s ## width ## var = 0; \
1138 /* Constant expression in range. */ \
1139 TEST_CASTABLE_TO_TYPE(U ## width ## _MAX, u ## width, true); \
1140 TEST_CASTABLE_TO_TYPE(U ## width ## _MAX, u ## width ## var, true); \
1141 TEST_CASTABLE_TO_TYPE(S ## width ## _MIN, s ## width, true); \
1142 TEST_CASTABLE_TO_TYPE(S ## width ## _MIN, s ## width ## var, true); \
1143 /* Constant expression out of range. */ \
1144 TEST_CASTABLE_TO_TYPE((unsigned long)U ## width ## _MAX + 1, u ## width, false); \
1145 TEST_CASTABLE_TO_TYPE((unsigned long)U ## width ## _MAX + 1, u ## width ## var, false); \
1146 TEST_CASTABLE_TO_TYPE((signed long)S ## width ## _MIN - 1, s ## width, false); \
1147 TEST_CASTABLE_TO_TYPE((signed long)S ## width ## _MIN - 1, s ## width ## var, false); \
1148 /* Non-constant expression with mismatched type. */ \
1149 TEST_CASTABLE_TO_TYPE(big, u ## width, false); \
1150 TEST_CASTABLE_TO_TYPE(big, u ## width ## var, false); \
1151 TEST_CASTABLE_TO_TYPE(small, s ## width, false); \
1152 TEST_CASTABLE_TO_TYPE(small, s ## width ## var, false); \
1153 } while (0)
1155 TEST_CASTABLE_TO_TYPE_VAR(8);
1156 TEST_CASTABLE_TO_TYPE_VAR(16);
1157 TEST_CASTABLE_TO_TYPE_VAR(32);
1158 #if BITS_PER_LONG == 64
1159 TEST_CASTABLE_TO_TYPE_VAR(64);
1160 #endif
1162 TEST_CASTABLE_TO_TYPE_RANGE(8);
1163 TEST_CASTABLE_TO_TYPE_RANGE(16);
1164 #if BITS_PER_LONG == 64
1165 TEST_CASTABLE_TO_TYPE_RANGE(32);
1166 #endif
1167 kunit_info(test, "%d castable_to_type() tests finished\n", count);
1169 #undef TEST_CASTABLE_TO_TYPE_RANGE
1170 #undef TEST_CASTABLE_TO_TYPE_VAR
1171 #undef TEST_CASTABLE_TO_TYPE
1174 struct foo {
1175 int a;
1176 u32 counter;
1177 s16 array[] __counted_by(counter);
1180 struct bar {
1181 int a;
1182 u32 counter;
1183 s16 array[];
1186 static void DEFINE_FLEX_test(struct kunit *test)
1188 /* Using _RAW_ on a __counted_by struct will initialize "counter" to zero */
1189 DEFINE_RAW_FLEX(struct foo, two_but_zero, array, 2);
1190 #ifdef CONFIG_CC_HAS_COUNTED_BY
1191 int expected_raw_size = sizeof(struct foo);
1192 #else
1193 int expected_raw_size = sizeof(struct foo) + 2 * sizeof(s16);
1194 #endif
1195 /* Without annotation, it will always be on-stack size. */
1196 DEFINE_RAW_FLEX(struct bar, two, array, 2);
1197 DEFINE_FLEX(struct foo, eight, array, counter, 8);
1198 DEFINE_FLEX(struct foo, empty, array, counter, 0);
1200 KUNIT_EXPECT_EQ(test, __struct_size(two_but_zero), expected_raw_size);
1201 KUNIT_EXPECT_EQ(test, __struct_size(two), sizeof(struct bar) + 2 * sizeof(s16));
1202 KUNIT_EXPECT_EQ(test, __struct_size(eight), 24);
1203 KUNIT_EXPECT_EQ(test, __struct_size(empty), sizeof(struct foo));
1206 static struct kunit_case overflow_test_cases[] = {
1207 KUNIT_CASE(u8_u8__u8_overflow_test),
1208 KUNIT_CASE(s8_s8__s8_overflow_test),
1209 KUNIT_CASE(u16_u16__u16_overflow_test),
1210 KUNIT_CASE(s16_s16__s16_overflow_test),
1211 KUNIT_CASE(u32_u32__u32_overflow_test),
1212 KUNIT_CASE(s32_s32__s32_overflow_test),
1213 KUNIT_CASE(u64_u64__u64_overflow_test),
1214 KUNIT_CASE(s64_s64__s64_overflow_test),
1215 KUNIT_CASE(u32_u32__int_overflow_test),
1216 KUNIT_CASE(u32_u32__u8_overflow_test),
1217 KUNIT_CASE(u8_u8__int_overflow_test),
1218 KUNIT_CASE(int_int__u8_overflow_test),
1219 KUNIT_CASE(shift_sane_test),
1220 KUNIT_CASE(shift_overflow_test),
1221 KUNIT_CASE(shift_truncate_test),
1222 KUNIT_CASE(shift_nonsense_test),
1223 KUNIT_CASE(overflow_allocation_test),
1224 KUNIT_CASE(overflow_size_helpers_test),
1225 KUNIT_CASE(overflows_type_test),
1226 KUNIT_CASE(same_type_test),
1227 KUNIT_CASE(castable_to_type_test),
1228 KUNIT_CASE(DEFINE_FLEX_test),
1232 static struct kunit_suite overflow_test_suite = {
1233 .name = "overflow",
1234 .test_cases = overflow_test_cases,
1237 kunit_test_suite(overflow_test_suite);
1239 MODULE_DESCRIPTION("Test cases for arithmetic overflow checks");
1240 MODULE_LICENSE("Dual MIT/GPL");