2 // RUN: %clang_cc1 -verify -fopenmp -fopenmp-version=50 -triple powerpc64le-unknown-unknown -fopenmp-targets=powerpc64le-ibm-linux-gnu -emit-llvm %s -o - | FileCheck %s
3 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -x c++ -triple powerpc64le-unknown-unknown -fopenmp-targets=powerpc64le-ibm-linux-gnu -verify -emit-pch -o %t %s
4 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -x c++ -triple powerpc64le-unknown-unknown -fopenmp-targets=powerpc64le-ibm-linux-gnu -include-pch %t %s -emit-llvm -o - | FileCheck %s
5 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -triple powerpc64le-unknown-unknown -fopenmp-targets=powerpc64le-ibm-linux-gnu -verify -emit-pch -o %t %s
6 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -triple powerpc64le-unknown-unknown -fopenmp-targets=powerpc64le-ibm-linux-gnu -include-pch %t %s -emit-llvm -o - | FileCheck %s
11 typedef enum omp_allocator_handle_t
{
12 omp_null_allocator
= 0,
13 omp_default_mem_alloc
= 1,
14 omp_large_cap_mem_alloc
= 2,
15 omp_const_mem_alloc
= 3,
16 omp_high_bw_mem_alloc
= 4,
17 omp_low_lat_mem_alloc
= 5,
18 omp_cgroup_mem_alloc
= 6,
19 omp_pteam_mem_alloc
= 7,
20 omp_thread_mem_alloc
= 8,
21 KMP_ALLOCATOR_MAX_HANDLE
= __UINTPTR_MAX__
22 } omp_allocator_handle_t
;
24 typedef enum omp_alloctrait_key_t
{ omp_atk_sync_hint
= 1,
25 omp_atk_alignment
= 2,
27 omp_atk_pool_size
= 4,
32 } omp_alloctrait_key_t
;
34 typedef struct omp_alloctrait_t
{
35 omp_alloctrait_key_t key
;
36 __UINTPTR_TYPE__ value
;
40 // CHECK: define {{.*}}[[FIE:@.+]]()
43 omp_allocator_handle_t my_allocator
;
44 omp_alloctrait_t traits
[10];
45 #pragma omp target uses_allocators(omp_null_allocator) allocate(omp_null_allocator: x) firstprivate(x)
47 #pragma omp target uses_allocators(omp_default_mem_alloc) allocate(omp_default_mem_alloc: x) firstprivate(x)
49 #pragma omp target uses_allocators(omp_large_cap_mem_alloc) allocate(omp_large_cap_mem_alloc: x) firstprivate(x)
51 #pragma omp target uses_allocators(omp_const_mem_alloc) allocate(omp_const_mem_alloc: x) firstprivate(x)
53 #pragma omp target uses_allocators(omp_high_bw_mem_alloc) allocate(omp_high_bw_mem_alloc: x) firstprivate(x)
55 #pragma omp target uses_allocators(omp_low_lat_mem_alloc) allocate(omp_low_lat_mem_alloc: x) firstprivate(x)
57 #pragma omp target uses_allocators(omp_cgroup_mem_alloc) allocate(omp_cgroup_mem_alloc: x) firstprivate(x)
59 #pragma omp target uses_allocators(omp_pteam_mem_alloc) allocate(omp_pteam_mem_alloc: x) firstprivate(x)
61 #pragma omp target uses_allocators(omp_thread_mem_alloc) allocate(omp_thread_mem_alloc: x) firstprivate(x) // expected-warning {{allocator with the 'thread' trait access has unspecified behavior on 'target' directive}}
63 #pragma omp target uses_allocators(omp_null_allocator, omp_thread_mem_alloc, my_allocator(traits))
67 typedef enum omp_memspace_handle_t
{
68 omp_default_mem_space
= 0,
69 omp_large_cap_mem_space
= 1,
70 omp_const_mem_space
= 2,
71 omp_high_bw_mem_space
= 3,
72 omp_low_lat_mem_space
= 4,
73 llvm_omp_target_host_mem_space
= 100,
74 llvm_omp_target_shared_mem_space
= 101,
75 llvm_omp_target_device_mem_space
= 102,
76 KMP_MEMSPACE_MAX_HANDLE
= __UINTPTR_MAX__
77 } omp_memspace_handle_t
;
79 extern omp_allocator_handle_t
80 omp_init_allocator(omp_memspace_handle_t memspace
, int ntraits
,
81 const omp_alloctrait_t traits
[]);
83 void *omp_aligned_alloc(unsigned long alignment
, unsigned long size
,
84 omp_allocator_handle_t allocator
);
85 extern void * omp_alloc(int size
, omp_allocator_handle_t a
);
90 omp_memspace_handle_t memspace
= omp_default_mem_space
;
91 omp_alloctrait_t traits
[1] = {{omp_atk_alignment
, 64}};
92 omp_allocator_handle_t alloc
= omp_init_allocator(memspace
,1,traits
);
93 #pragma omp target map(tofrom: errors) uses_allocators(alloc(traits))
98 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
99 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
100 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr null)
101 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
102 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
103 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr null)
105 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
106 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
107 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 1 to ptr))
108 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
109 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
110 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 1 to ptr))
112 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
113 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
114 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 2 to ptr))
115 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
116 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
117 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 2 to ptr))
119 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
120 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
121 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 3 to ptr))
122 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
123 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
124 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 3 to ptr))
126 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
127 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
128 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 4 to ptr))
129 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
130 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
131 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 4 to ptr))
133 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
134 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
135 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 5 to ptr))
136 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
137 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
138 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 5 to ptr))
140 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
141 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
142 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 6 to ptr))
143 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
144 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
145 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 6 to ptr))
147 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
148 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
149 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 7 to ptr))
150 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
151 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
152 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 7 to ptr))
154 // CHECK: %[[#R0:]] = call i32 @__kmpc_global_thread_num(ptr @1)
155 // CHECK-NEXT: store i64 %x, ptr %x.addr, align 8
156 // CHECK-NEXT: %.x..void.addr = call ptr @__kmpc_alloc(i32 %[[#R0]], i64 4, ptr inttoptr (i64 8 to ptr))
157 // CHECK-NEXT: %[[#R1:]] = load i32, ptr %x.addr, align 4
158 // CHECK-NEXT: store i32 %[[#R1]], ptr %.x..void.addr, align 4
159 // CHECK-NEXT: call void @__kmpc_free(i32 %[[#R0]], ptr %.x..void.addr, ptr inttoptr (i64 8 to ptr))
161 // CHECK: [[TRAITS_ADDR_REF:%.+]] = alloca ptr,
162 // CHECK: [[MY_ALLOCATOR_ADDR:%.+]] = alloca i64,
163 // CHECK: [[TRAITS_ADDR:%.+]] = load ptr, ptr [[TRAITS_ADDR_REF]],
164 // CHECK: [[ALLOCATOR:%.+]] = call ptr @__kmpc_init_allocator(i32 %{{.+}}, ptr null, i32 10, ptr [[TRAITS_ADDR]])
165 // CHECK: [[CONV:%.+]] = ptrtoint ptr [[ALLOCATOR]] to i64
166 // CHECK: store i64 [[CONV]], ptr [[MY_ALLOCATOR_ADDR]],
168 // Destroy allocator upon exit from the region.
169 // CHECK: [[ALLOCATOR:%.+]] = load i64, ptr [[MY_ALLOCATOR_ADDR]],
170 // CHECK: [[CONV:%.+]] = inttoptr i64 [[ALLOCATOR]] to ptr
171 // CHECK: call void @__kmpc_destroy_allocator(i32 %{{.+}}, ptr [[CONV]])
173 // CHECK: [[TRAITS_ADDR_REF:%.+]] = alloca ptr,
174 // CHECK: [[MY_ALLOCATOR_ADDR:%alloc]] = alloca i64,
175 // CHECK: [[TRAITS_ADDR:%.+]] = load ptr, ptr [[TRAITS_ADDR_REF]],
176 // CHECK: [[ALLOCATOR:%.+]] = call ptr @__kmpc_init_allocator(i32 %{{.+}}, ptr null, i32 1, ptr [[TRAITS_ADDR]])
177 // CHECK: [[CONV:%.+]] = ptrtoint ptr [[ALLOCATOR]] to i64
178 // CHECK: store i64 [[CONV]], ptr [[MY_ALLOCATOR_ADDR]],
180 // Destroy allocator upon exit from the region.
181 // CHECK: [[ALLOCATOR:%.+]] = load i64, ptr [[MY_ALLOCATOR_ADDR]],
182 // CHECK: [[CONV1:%.+]] = inttoptr i64 [[ALLOCATOR]] to ptr
183 // CHECK: call void @__kmpc_destroy_allocator(i32 %{{.+}}, ptr [[CONV1]])