1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 2
2 ; RUN: opt -passes=hwasan -hwasan-use-stack-safety=0 -hwasan-use-after-scope -S < %s | FileCheck %s
3 target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
4 target triple = "aarch64-unknown-linux-android29"
6 @stackbuf = dso_local local_unnamed_addr global ptr null, align 8
7 @jbuf = dso_local global [32 x i64] zeroinitializer, align 8
9 declare void @may_jump()
11 define dso_local noundef i1 @_Z6targetv() sanitize_hwaddress {
12 ; CHECK-LABEL: define dso_local noundef i1 @_Z6targetv
13 ; CHECK-SAME: () #[[ATTR0:[0-9]+]] {
15 ; CHECK-NEXT: [[TMP0:%.*]] = call ptr @llvm.thread.pointer()
16 ; CHECK-NEXT: [[TMP1:%.*]] = getelementptr i8, ptr [[TMP0]], i32 48
17 ; CHECK-NEXT: [[TMP2:%.*]] = load i64, ptr [[TMP1]], align 8
18 ; CHECK-NEXT: [[TMP3:%.*]] = ashr i64 [[TMP2]], 3
19 ; CHECK-NEXT: [[TMP4:%.*]] = call i64 @llvm.read_register.i64(metadata [[META1:![0-9]+]])
20 ; CHECK-NEXT: [[TMP5:%.*]] = call ptr @llvm.frameaddress.p0(i32 0)
21 ; CHECK-NEXT: [[TMP6:%.*]] = ptrtoint ptr [[TMP5]] to i64
22 ; CHECK-NEXT: [[TMP7:%.*]] = shl i64 [[TMP6]], 44
23 ; CHECK-NEXT: [[TMP8:%.*]] = or i64 [[TMP4]], [[TMP7]]
24 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP2]] to ptr
25 ; CHECK-NEXT: store i64 [[TMP8]], ptr [[TMP9]], align 8
26 ; CHECK-NEXT: [[TMP10:%.*]] = ashr i64 [[TMP2]], 56
27 ; CHECK-NEXT: [[TMP11:%.*]] = shl nuw nsw i64 [[TMP10]], 12
28 ; CHECK-NEXT: [[TMP12:%.*]] = xor i64 [[TMP11]], -1
29 ; CHECK-NEXT: [[TMP13:%.*]] = add i64 [[TMP2]], 8
30 ; CHECK-NEXT: [[TMP14:%.*]] = and i64 [[TMP13]], [[TMP12]]
31 ; CHECK-NEXT: store i64 [[TMP14]], ptr [[TMP1]], align 8
32 ; CHECK-NEXT: [[TMP15:%.*]] = or i64 [[TMP2]], 4294967295
33 ; CHECK-NEXT: [[HWASAN_SHADOW:%.*]] = add i64 [[TMP15]], 1
34 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[HWASAN_SHADOW]] to ptr
35 ; CHECK-NEXT: [[HWASAN_UAR_TAG:%.*]] = lshr i64 [[TMP6]], 56
36 ; CHECK-NEXT: [[BUF:%.*]] = alloca [4096 x i8], align 16
37 ; CHECK-NEXT: [[TMP17:%.*]] = xor i64 [[TMP3]], 0
38 ; CHECK-NEXT: [[TMP18:%.*]] = ptrtoint ptr [[BUF]] to i64
39 ; CHECK-NEXT: [[TMP19:%.*]] = and i64 [[TMP18]], 72057594037927935
40 ; CHECK-NEXT: [[TMP20:%.*]] = shl i64 [[TMP17]], 56
41 ; CHECK-NEXT: [[TMP21:%.*]] = or i64 [[TMP19]], [[TMP20]]
42 ; CHECK-NEXT: [[BUF_HWASAN:%.*]] = inttoptr i64 [[TMP21]] to ptr
43 ; CHECK-NEXT: [[TMP22:%.*]] = trunc i64 [[TMP17]] to i8
44 ; CHECK-NEXT: [[TMP23:%.*]] = ptrtoint ptr [[BUF]] to i64
45 ; CHECK-NEXT: [[TMP24:%.*]] = and i64 [[TMP23]], 72057594037927935
46 ; CHECK-NEXT: [[TMP25:%.*]] = lshr i64 [[TMP24]], 4
47 ; CHECK-NEXT: [[TMP26:%.*]] = getelementptr i8, ptr [[TMP16]], i64 [[TMP25]]
48 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 1 [[TMP26]], i8 [[TMP22]], i64 256, i1 false)
49 ; CHECK-NEXT: [[CALL:%.*]] = call i32 @setjmp(ptr noundef @jbuf)
50 ; CHECK-NEXT: switch i32 [[CALL]], label [[WHILE_BODY:%.*]] [
51 ; CHECK-NEXT: i32 1, label [[RETURN:%.*]]
52 ; CHECK-NEXT: i32 2, label [[SW_BB1:%.*]]
55 ; CHECK-NEXT: br label [[RETURN]]
57 ; CHECK-NEXT: call void @llvm.hwasan.check.memaccess(ptr [[TMP16]], ptr @stackbuf, i32 19)
58 ; CHECK-NEXT: store ptr [[BUF_HWASAN]], ptr @stackbuf, align 8
59 ; CHECK-NEXT: call void @may_jump()
60 ; CHECK-NEXT: br label [[RETURN]]
62 ; CHECK-NEXT: [[RETVAL_0:%.*]] = phi i1 [ true, [[WHILE_BODY]] ], [ true, [[SW_BB1]] ], [ false, [[ENTRY:%.*]] ]
63 ; CHECK-NEXT: [[TMP27:%.*]] = trunc i64 [[HWASAN_UAR_TAG]] to i8
64 ; CHECK-NEXT: [[TMP28:%.*]] = ptrtoint ptr [[BUF]] to i64
65 ; CHECK-NEXT: [[TMP29:%.*]] = and i64 [[TMP28]], 72057594037927935
66 ; CHECK-NEXT: [[TMP30:%.*]] = lshr i64 [[TMP29]], 4
67 ; CHECK-NEXT: [[TMP31:%.*]] = getelementptr i8, ptr [[TMP16]], i64 [[TMP30]]
68 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 1 [[TMP31]], i8 [[TMP27]], i64 256, i1 false)
69 ; CHECK-NEXT: ret i1 [[RETVAL_0]]
72 %buf = alloca [4096 x i8], align 1
73 %call = call i32 @setjmp(ptr noundef @jbuf)
74 switch i32 %call, label %while.body [
79 sw.bb1: ; preds = %entry
82 while.body: ; preds = %entry
83 call void @llvm.lifetime.start.p0(i64 4096, ptr nonnull %buf) #10
84 store ptr %buf, ptr @stackbuf, align 8
85 ; may_jump may call longjmp, going back to the switch (and then the return),
86 ; bypassing the lifetime.end. This is why we need to untag on the return,
87 ; rather than the lifetime.end.
89 call void @llvm.lifetime.end.p0(i64 4096, ptr nonnull %buf) #10
92 return: ; preds = %entry, %while.body, %sw.bb1
93 %retval.0 = phi i1 [ true, %while.body ], [ true, %sw.bb1 ], [ false, %entry ]
97 declare i32 @setjmp(ptr noundef) returns_twice
99 declare void @llvm.lifetime.start.p0(i64 immarg, ptr nocapture)
100 declare void @llvm.lifetime.end.p0(i64 immarg, ptr nocapture)