1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 2
2 ; RUN: opt -passes=hwasan -hwasan-use-stack-safety=0 -hwasan-use-after-scope -hwasan-inline-fast-path-checks=0 -S < %s | FileCheck %s
3 target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
4 target triple = "riscv64-unknown-linux"
6 @stackbuf = dso_local local_unnamed_addr global ptr null, align 8
7 @jbuf = dso_local global [32 x i64] zeroinitializer, align 8
9 declare void @may_jump()
11 define dso_local noundef i1 @_Z6targetv() sanitize_hwaddress {
12 ; CHECK-LABEL: define dso_local noundef i1 @_Z6targetv
13 ; CHECK-SAME: () #[[ATTR0:[0-9]+]] personality ptr @__hwasan_personality_thunk {
15 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__hwasan_tls, align 8
16 ; CHECK-NEXT: [[TMP1:%.*]] = and i64 [[TMP0]], 72057594037927935
17 ; CHECK-NEXT: [[TMP2:%.*]] = ashr i64 [[TMP0]], 3
18 ; CHECK-NEXT: [[TMP3:%.*]] = call ptr @llvm.frameaddress.p0(i32 0)
19 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[TMP3]] to i64
20 ; CHECK-NEXT: [[TMP5:%.*]] = shl i64 [[TMP4]], 44
21 ; CHECK-NEXT: [[TMP6:%.*]] = or i64 ptrtoint (ptr @_Z6targetv to i64), [[TMP5]]
22 ; CHECK-NEXT: [[TMP7:%.*]] = inttoptr i64 [[TMP1]] to ptr
23 ; CHECK-NEXT: store i64 [[TMP6]], ptr [[TMP7]], align 8
24 ; CHECK-NEXT: [[TMP8:%.*]] = ashr i64 [[TMP0]], 56
25 ; CHECK-NEXT: [[TMP9:%.*]] = shl nuw nsw i64 [[TMP8]], 12
26 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], -1
27 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP0]], 8
28 ; CHECK-NEXT: [[TMP12:%.*]] = and i64 [[TMP11]], [[TMP10]]
29 ; CHECK-NEXT: store i64 [[TMP12]], ptr @__hwasan_tls, align 8
30 ; CHECK-NEXT: [[TMP13:%.*]] = or i64 [[TMP1]], 4294967295
31 ; CHECK-NEXT: [[HWASAN_SHADOW:%.*]] = add i64 [[TMP13]], 1
32 ; CHECK-NEXT: [[TMP14:%.*]] = inttoptr i64 [[HWASAN_SHADOW]] to ptr
33 ; CHECK-NEXT: [[HWASAN_UAR_TAG:%.*]] = lshr i64 [[TMP4]], 56
34 ; CHECK-NEXT: [[BUF:%.*]] = alloca [4096 x i8], align 16
35 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP2]], 0
36 ; CHECK-NEXT: [[TMP16:%.*]] = ptrtoint ptr [[BUF]] to i64
37 ; CHECK-NEXT: [[TMP17:%.*]] = and i64 [[TMP16]], 72057594037927935
38 ; CHECK-NEXT: [[TMP18:%.*]] = shl i64 [[TMP15]], 56
39 ; CHECK-NEXT: [[TMP19:%.*]] = or i64 [[TMP17]], [[TMP18]]
40 ; CHECK-NEXT: [[BUF_HWASAN:%.*]] = inttoptr i64 [[TMP19]] to ptr
41 ; CHECK-NEXT: [[TMP20:%.*]] = trunc i64 [[TMP15]] to i8
42 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[BUF]] to i64
43 ; CHECK-NEXT: [[TMP22:%.*]] = and i64 [[TMP21]], 72057594037927935
44 ; CHECK-NEXT: [[TMP23:%.*]] = lshr i64 [[TMP22]], 4
45 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP23]]
46 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 1 [[TMP24]], i8 [[TMP20]], i64 256, i1 false)
47 ; CHECK-NEXT: [[CALL:%.*]] = call i32 @setjmp(ptr noundef @jbuf)
48 ; CHECK-NEXT: switch i32 [[CALL]], label [[WHILE_BODY:%.*]] [
49 ; CHECK-NEXT: i32 1, label [[RETURN:%.*]]
50 ; CHECK-NEXT: i32 2, label [[SW_BB1:%.*]]
53 ; CHECK-NEXT: br label [[RETURN]]
55 ; CHECK-NEXT: call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[TMP14]], ptr @stackbuf, i32 19)
56 ; CHECK-NEXT: store ptr [[BUF_HWASAN]], ptr @stackbuf, align 8
57 ; CHECK-NEXT: call void @may_jump()
58 ; CHECK-NEXT: br label [[RETURN]]
60 ; CHECK-NEXT: [[RETVAL_0:%.*]] = phi i1 [ true, [[WHILE_BODY]] ], [ true, [[SW_BB1]] ], [ false, [[ENTRY:%.*]] ]
61 ; CHECK-NEXT: [[TMP25:%.*]] = trunc i64 [[HWASAN_UAR_TAG]] to i8
62 ; CHECK-NEXT: [[TMP26:%.*]] = ptrtoint ptr [[BUF]] to i64
63 ; CHECK-NEXT: [[TMP27:%.*]] = and i64 [[TMP26]], 72057594037927935
64 ; CHECK-NEXT: [[TMP28:%.*]] = lshr i64 [[TMP27]], 4
65 ; CHECK-NEXT: [[TMP29:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP28]]
66 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 1 [[TMP29]], i8 [[TMP25]], i64 256, i1 false)
67 ; CHECK-NEXT: ret i1 [[RETVAL_0]]
70 %buf = alloca [4096 x i8], align 1
71 %call = call i32 @setjmp(ptr noundef @jbuf)
72 switch i32 %call, label %while.body [
77 sw.bb1: ; preds = %entry
80 while.body: ; preds = %entry
81 call void @llvm.lifetime.start.p0(i64 4096, ptr nonnull %buf) #10
82 store ptr %buf, ptr @stackbuf, align 8
83 ; may_jump may call longjmp, going back to the switch (and then the return),
84 ; bypassing the lifetime.end. This is why we need to untag on the return,
85 ; rather than the lifetime.end.
87 call void @llvm.lifetime.end.p0(i64 4096, ptr nonnull %buf) #10
90 return: ; preds = %entry, %while.body, %sw.bb1
91 %retval.0 = phi i1 [ true, %while.body ], [ true, %sw.bb1 ], [ false, %entry ]
95 declare i32 @setjmp(ptr noundef) returns_twice
97 declare void @llvm.lifetime.start.p0(i64 immarg, ptr nocapture)
98 declare void @llvm.lifetime.end.p0(i64 immarg, ptr nocapture)