1 | // RUN: %clang_cc1 -verify -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -emit-llvm %s -fexceptions -fcxx-exceptions -o - | FileCheck %s |
2 | // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -emit-pch -o %t %s |
3 | // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -x c++ -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s |
4 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -std=c++11 -fopenmp -fnoopenmp-use-tls -fexceptions -fcxx-exceptions -debug-info-kind=line-tables-only -x c++ -emit-llvm %s -o - | FileCheck %s --check-prefix=TERM_DEBUG |
5 | // RUN: %clang_cc1 -verify -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -DARRAY -triple x86_64-apple-darwin10 -emit-llvm %s -o - | FileCheck -check-prefix=ARRAY %s |
6 | |
7 | // RUN: %clang_cc1 -verify -fopenmp-simd -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -emit-llvm %s -fexceptions -fcxx-exceptions -o - | FileCheck --check-prefix SIMD-ONLY0 %s |
8 | // RUN: %clang_cc1 -fopenmp-simd -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -emit-pch -o %t %s |
9 | // RUN: %clang_cc1 -fopenmp-simd -fnoopenmp-use-tls -x c++ -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s |
10 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -std=c++11 -fopenmp-simd -fnoopenmp-use-tls -fexceptions -fcxx-exceptions -debug-info-kind=line-tables-only -x c++ -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s |
11 | // RUN: %clang_cc1 -verify -fopenmp-simd -fnoopenmp-use-tls -x c++ -std=c++11 -DARRAY -triple x86_64-apple-darwin10 -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s |
12 | // SIMD-ONLY0-NOT: {{__kmpc|__tgt}} |
13 | // expected-no-diagnostics |
14 | #ifndef ARRAY |
15 | #ifndef HEADER |
16 | #define HEADER |
17 | |
18 | class TestClass { |
19 | public: |
20 | int a; |
21 | TestClass() : a(0) {} |
22 | TestClass(const TestClass &C) : a(C.a) {} |
23 | TestClass &operator=(const TestClass &) { return *this;} |
24 | ~TestClass(){}; |
25 | }; |
26 | |
27 | // CHECK-DAG: [[TEST_CLASS_TY:%.+]] = type { i{{[0-9]+}} } |
28 | // CHECK-DAG: [[SST_TY:%.+]] = type { double } |
29 | // CHECK-DAG: [[SS_TY:%.+]] = type { i32, i8, i32* } |
30 | // CHECK-DAG: [[IDENT_T_TY:%.+]] = type { i32, i32, i32, i32, i8* } |
31 | // CHECK: [[IMPLICIT_BARRIER_SINGLE_LOC:@.+]] = private unnamed_addr global %{{.+}} { i32 0, i32 322, i32 0, i32 0, i8* |
32 | |
33 | // CHECK: define void [[FOO:@.+]]() |
34 | |
35 | TestClass tc; |
36 | TestClass tc2[2]; |
37 | #pragma omp threadprivate(tc, tc2) |
38 | |
39 | void foo() {} |
40 | |
41 | struct SS { |
42 | int a; |
43 | int b : 4; |
44 | int &c; |
45 | SS(int &d) : a(0), b(0), c(d) { |
46 | #pragma omp parallel firstprivate(a, b, c) |
47 | #pragma omp single copyprivate(a, this->b, (this)->c) |
48 | [&]() { |
49 | ++this->a, --b, (this)->c /= 1; |
50 | #pragma omp parallel firstprivate(a, b, c) |
51 | #pragma omp single copyprivate(a, this->b, (this)->c) |
52 | ++(this)->a, --b, this->c /= 1; |
53 | }(); |
54 | } |
55 | }; |
56 | |
57 | template<typename T> |
58 | struct SST { |
59 | T a; |
60 | SST() : a(T()) { |
61 | #pragma omp parallel firstprivate(a) |
62 | #pragma omp single copyprivate(this->a) |
63 | [&]() { |
64 | [&]() { |
65 | ++this->a; |
66 | #pragma omp parallel firstprivate(a) |
67 | #pragma omp single copyprivate((this)->a) |
68 | ++(this)->a; |
69 | }(); |
70 | }(); |
71 | } |
72 | }; |
73 | |
74 | // CHECK-LABEL: @main |
75 | // TERM_DEBUG-LABEL: @main |
76 | int main() { |
77 | // CHECK: alloca i32 |
78 | // CHECK-DAG: [[A_ADDR:%.+]] = alloca i8 |
79 | // CHECK-DAG: [[A2_ADDR:%.+]] = alloca [2 x i8] |
80 | // CHECK-DAG: [[C_ADDR:%.+]] = alloca [[TEST_CLASS_TY]] |
81 | // CHECK-DAG: [[DID_IT:%.+]] = alloca i32, |
82 | // CHECK-DAG: [[COPY_LIST:%.+]] = alloca [5 x i8*], |
83 | char a; |
84 | char a2[2]; |
85 | TestClass &c = tc; |
86 | SST<double> sst; |
87 | SS ss(c.a); |
88 | |
89 | // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T_TY]]* [[DEFAULT_LOC:@.+]]) |
90 | // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) |
91 | // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 |
92 | // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] |
93 | // CHECK: [[THEN]] |
94 | // CHECK-NEXT: store i8 2, i8* [[A_ADDR]] |
95 | // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) |
96 | // CHECK-NEXT: br label {{%?}}[[EXIT]] |
97 | // CHECK: [[EXIT]] |
98 | // CHECK-NOT: call {{.+}} @__kmpc_cancel_barrier |
99 | #pragma omp single nowait |
100 | a = 2; |
101 | // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) |
102 | // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 |
103 | // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] |
104 | // CHECK: [[THEN]] |
105 | // CHECK-NEXT: store i8 2, i8* [[A_ADDR]] |
106 | // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) |
107 | // CHECK-NEXT: br label {{%?}}[[EXIT]] |
108 | // CHECK: [[EXIT]] |
109 | // CHECK: call{{.*}} @__kmpc_barrier([[IDENT_T_TY]]* [[IMPLICIT_BARRIER_SINGLE_LOC]], i32 [[GTID]]) |
110 | #pragma omp single |
111 | a = 2; |
112 | // CHECK: store i32 0, i32* [[DID_IT]] |
113 | // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) |
114 | // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 |
115 | // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] |
116 | // CHECK: [[THEN]] |
117 | // CHECK-NEXT: invoke void [[FOO]]() |
118 | // CHECK: to label {{%?}}[[CONT:.+]] unwind |
119 | // CHECK: [[CONT]] |
120 | // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) |
121 | // CHECK: store i32 1, i32* [[DID_IT]] |
122 | // CHECK-NEXT: br label {{%?}}[[EXIT]] |
123 | // CHECK: [[EXIT]] |
124 | // CHECK: [[A_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 |
125 | // CHECK: store i8* [[A_ADDR]], i8** [[A_PTR_REF]], |
126 | // CHECK: [[C_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 |
127 | // CHECK: store i8* {{.+}}, i8** [[C_PTR_REF]], |
128 | // CHECK: [[TC_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 |
129 | // CHECK: [[TC_THREADPRIVATE_ADDR_VOID_PTR:%.+]] = call{{.*}} i8* @__kmpc_threadprivate_cached |
130 | // CHECK: [[TC_THREADPRIVATE_ADDR:%.+]] = bitcast i8* [[TC_THREADPRIVATE_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
131 | // CHECK: [[TC_PTR_REF_VOID_PTR:%.+]] = bitcast [[TEST_CLASS_TY]]* [[TC_THREADPRIVATE_ADDR]] to i8* |
132 | // CHECK: store i8* [[TC_PTR_REF_VOID_PTR]], i8** [[TC_PTR_REF]], |
133 | // CHECK: [[A2_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 |
134 | // CHECK: [[BITCAST:%.+]] = bitcast [2 x i8]* [[A2_ADDR]] to i8* |
135 | // CHECK: store i8* [[BITCAST]], i8** [[A2_PTR_REF]], |
136 | // CHECK: [[TC2_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 |
137 | // CHECK: [[TC2_THREADPRIVATE_ADDR_VOID_PTR:%.+]] = call{{.*}} i8* @__kmpc_threadprivate_cached |
138 | // CHECK: [[TC2_THREADPRIVATE_ADDR:%.+]] = bitcast i8* [[TC2_THREADPRIVATE_ADDR_VOID_PTR]] to [2 x [[TEST_CLASS_TY]]]* |
139 | // CHECK: [[TC2_PTR_REF_VOID_PTR:%.+]] = bitcast [2 x [[TEST_CLASS_TY]]]* [[TC2_THREADPRIVATE_ADDR]] to i8* |
140 | // CHECK: store i8* [[TC2_PTR_REF_VOID_PTR]], i8** [[TC2_PTR_REF]], |
141 | // CHECK: [[COPY_LIST_VOID_PTR:%.+]] = bitcast [5 x i8*]* [[COPY_LIST]] to i8* |
142 | // CHECK: [[DID_IT_VAL:%.+]] = load i32, i32* [[DID_IT]], |
143 | // CHECK: call void @__kmpc_copyprivate([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]], i64 40, i8* [[COPY_LIST_VOID_PTR]], void (i8*, i8*)* [[COPY_FUNC:@.+]], i32 [[DID_IT_VAL]]) |
144 | // CHECK-NOT: call {{.+}} @__kmpc_cancel_barrier |
145 | #pragma omp single copyprivate(a, c, tc, a2, tc2) |
146 | foo(); |
147 | // CHECK-NOT: call i32 @__kmpc_single |
148 | // CHECK-NOT: call void @__kmpc_end_single |
149 | return a; |
150 | } |
151 | |
152 | // CHECK: void [[COPY_FUNC]](i8*, i8*) |
153 | // CHECK: store i8* %0, i8** [[DST_ADDR_REF:%.+]], |
154 | // CHECK: store i8* %1, i8** [[SRC_ADDR_REF:%.+]], |
155 | // CHECK: [[DST_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_ADDR_REF]], |
156 | // CHECK: [[DST_ADDR:%.+]] = bitcast i8* [[DST_ADDR_VOID_PTR]] to [5 x i8*]* |
157 | // CHECK: [[SRC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_ADDR_REF]], |
158 | // CHECK: [[SRC_ADDR:%.+]] = bitcast i8* [[SRC_ADDR_VOID_PTR]] to [5 x i8*]* |
159 | // CHECK: [[DST_A_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 |
160 | // CHECK: [[DST_A_ADDR:%.+]] = load i8*, i8** [[DST_A_ADDR_REF]], |
161 | // CHECK: [[SRC_A_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 |
162 | // CHECK: [[SRC_A_ADDR:%.+]] = load i8*, i8** [[SRC_A_ADDR_REF]], |
163 | // CHECK: [[SRC_A_VAL:%.+]] = load i8, i8* [[SRC_A_ADDR]], |
164 | // CHECK: store i8 [[SRC_A_VAL]], i8* [[DST_A_ADDR]], |
165 | // CHECK: [[DST_C_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 |
166 | // CHECK: [[DST_C_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_C_ADDR_REF]], |
167 | // CHECK: [[DST_C_ADDR:%.+]] = bitcast i8* [[DST_C_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
168 | // CHECK: [[SRC_C_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 |
169 | // CHECK: [[SRC_C_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_C_ADDR_REF]], |
170 | // CHECK: [[SRC_C_ADDR:%.+]] = bitcast i8* [[SRC_C_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
171 | // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN:@.+]]([[TEST_CLASS_TY]]* [[DST_C_ADDR]], [[TEST_CLASS_TY]]* {{.*}}[[SRC_C_ADDR]]) |
172 | // CHECK: [[DST_TC_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 |
173 | // CHECK: [[DST_TC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_TC_ADDR_REF]], |
174 | // CHECK: [[DST_TC_ADDR:%.+]] = bitcast i8* [[DST_TC_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
175 | // CHECK: [[SRC_TC_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 |
176 | // CHECK: [[SRC_TC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_TC_ADDR_REF]], |
177 | // CHECK: [[SRC_TC_ADDR:%.+]] = bitcast i8* [[SRC_TC_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
178 | // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN]]([[TEST_CLASS_TY]]* [[DST_TC_ADDR]], [[TEST_CLASS_TY]]* {{.*}}[[SRC_TC_ADDR]]) |
179 | // CHECK: [[DST_A2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 |
180 | // CHECK: [[DST_A2_ADDR:%.+]] = load i8*, i8** [[DST_A2_ADDR_REF]], |
181 | // CHECK: [[SRC_A2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 |
182 | // CHECK: [[SRC_A2_ADDR:%.+]] = load i8*, i8** [[SRC_A2_ADDR_REF]], |
183 | // CHECK: call void @llvm.memcpy.{{.+}}(i8* align 1 [[DST_A2_ADDR]], i8* align 1 [[SRC_A2_ADDR]], i{{[0-9]+}} 2, i1 false) |
184 | // CHECK: [[DST_TC2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 |
185 | // CHECK: [[DST_TC2_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_TC2_ADDR_REF]], |
186 | // CHECK: [[DST_TC2_ADDR:%.+]] = bitcast i8* [[DST_TC2_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
187 | // CHECK: [[SRC_TC2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 |
188 | // CHECK: [[SRC_TC2_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_TC2_ADDR_REF]], |
189 | // CHECK: [[SRC_TC2_ADDR:%.+]] = bitcast i8* [[SRC_TC2_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* |
190 | // CHECK: br i1 |
191 | // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN]]([[TEST_CLASS_TY]]* %{{.+}}, [[TEST_CLASS_TY]]* {{.*}}) |
192 | // CHECK: br i1 |
193 | // CHECK: ret void |
194 | |
195 | // CHECK-LABEL: parallel_single |
196 | // TERM_DEBUG-LABEL: parallel_single |
197 | void parallel_single() { |
198 | #pragma omp parallel |
199 | #pragma omp single |
200 | // TERM_DEBUG-NOT: __kmpc_global_thread_num |
201 | // TERM_DEBUG: call i32 @__kmpc_single({{.+}}), !dbg [[DBG_LOC_START:![0-9]+]] |
202 | // TERM_DEBUG: invoke void {{.*}}foo{{.*}}() |
203 | // TERM_DEBUG: unwind label %[[TERM_LPAD:.+]], |
204 | // TERM_DEBUG-NOT: __kmpc_global_thread_num |
205 | // TERM_DEBUG: call void @__kmpc_end_single({{.+}}), !dbg [[DBG_LOC_END:![0-9]+]] |
206 | // TERM_DEBUG: [[TERM_LPAD]] |
207 | // TERM_DEBUG: call void @__clang_call_terminate |
208 | // TERM_DEBUG: unreachable |
209 | foo(); |
210 | } |
211 | // TERM_DEBUG-DAG: [[DBG_LOC_START]] = !DILocation(line: [[@LINE-12]], |
212 | // TERM_DEBUG-DAG: [[DBG_LOC_END]] = !DILocation(line: [[@LINE-3]], |
213 | #endif |
214 | #else |
215 | // ARRAY-LABEL: array_func |
216 | struct St { |
217 | int a, b; |
218 | St() : a(0), b(0) {} |
219 | St &operator=(const St &) { return *this; }; |
220 | ~St() {} |
221 | }; |
222 | |
223 | void array_func(int n, int a[n], St s[2]) { |
224 | // ARRAY: call void @__kmpc_copyprivate(%struct.ident_t* @{{.+}}, i32 %{{.+}}, i64 16, i8* %{{.+}}, void (i8*, i8*)* [[CPY:@.+]], i32 %{{.+}}) |
225 | #pragma omp single copyprivate(a, s) |
226 | ; |
227 | } |
228 | // ARRAY: define internal void [[CPY]] |
229 | // ARRAY: store i32* %{{.+}}, i32** %{{.+}}, |
230 | // ARRAY: store %struct.St* %{{.+}}, %struct.St** %{{.+}}, |
231 | #endif |
232 | |
233 | // CHECK-LABEL:@_ZN2SSC2ERi( |
234 | // CHECK: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 4, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SS_TY]]*, i64, i64, i64)* [[SS_MICROTASK:@.+]] to void |
235 | // CHECK-NEXT: ret void |
236 | |
237 | // CHECK: define internal void [[SS_MICROTASK]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SS_TY]]* {{.+}}, i64 {{.+}}, i64 {{.+}}, i64 {{.+}}) |
238 | // Private a |
239 | // CHECK: alloca i64, |
240 | // Private b |
241 | // CHECK: alloca i64, |
242 | // Private c |
243 | // CHECK: alloca i64, |
244 | // CHECK: alloca i32*, |
245 | // CHECK: alloca i32*, |
246 | // CHECK: alloca i32*, |
247 | // CHECK: alloca i32*, |
248 | // CHECK: [[DID_IT:%.+]] = alloca i32, |
249 | // CHECK: bitcast i64* %{{.+}} to i32* |
250 | // CHECK: bitcast i64* %{{.+}} to i32* |
251 | // CHECK: bitcast i64* %{{.+}} to i32* |
252 | // CHECK: store i32 0, i32* [[DID_IT]], |
253 | // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
254 | // CHECK-NEXT: icmp ne i32 [[RES]], 0 |
255 | // CHECK-NEXT: br i1 |
256 | |
257 | // CHECK: getelementptr inbounds [[CAP_TY:%.+]], [[CAP_TY]]* [[CAP:%.+]], i32 0, i32 0 |
258 | // CHECK: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 1 |
259 | // CHECK-NEXT: load i32*, i32** % |
260 | // CHECK-NEXT: store i32* % |
261 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 |
262 | // CHECK-NEXT: store i32* % |
263 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 |
264 | // CHECK-NEXT: load i32*, i32** % |
265 | // CHECK-NEXT: store i32* % |
266 | // CHECK-LABEL: invoke void @_ZZN2SSC1ERiENKUlvE_clEv( |
267 | // CHECK-SAME: [[CAP_TY]]* [[CAP]]) |
268 | |
269 | // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
270 | // CHECK: store i32 1, i32* [[DID_IT]], |
271 | // CHECK: br label |
272 | |
273 | // CHECK: call void @__kmpc_end_single(%{{.+}}* @{{.+}}, i32 %{{.+}}) |
274 | // CHECK: br label |
275 | |
276 | // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST:%.+]], i64 0, i64 0 |
277 | // CHECK: load i32*, i32** % |
278 | // CHECK-NEXT: bitcast i32* % |
279 | // CHECK-NEXT: store i8* % |
280 | // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 1 |
281 | // CHECK-NEXT: bitcast i32* % |
282 | // CHECK-NEXT: store i8* % |
283 | // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 2 |
284 | // CHECK: load i32*, i32** % |
285 | // CHECK-NEXT: bitcast i32* % |
286 | // CHECK-NEXT: store i8* % |
287 | // CHECK-NEXT: bitcast [3 x i8*]* [[LIST]] to i8* |
288 | // CHECK-NEXT: load i32, i32* [[DID_IT]], |
289 | // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 24, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) |
290 | // CHECK-NEXT: ret void |
291 | |
292 | // CHECK-LABEL: @_ZZN2SSC1ERiENKUlvE_clEv( |
293 | // CHECK: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP:%.+]], i32 0, i32 1 |
294 | // CHECK-NEXT: load i32*, i32** % |
295 | // CHECK-NEXT: load i32, i32* % |
296 | // CHECK-NEXT: add nsw i32 %{{.+}}, 1 |
297 | // CHECK-NEXT: store i32 % |
298 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 |
299 | // CHECK-NEXT: load i32*, i32** % |
300 | // CHECK-NEXT: load i32, i32* % |
301 | // CHECK-NEXT: add nsw i32 %{{.+}}, -1 |
302 | // CHECK-NEXT: store i32 % |
303 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 |
304 | // CHECK-NEXT: load i32*, i32** % |
305 | // CHECK-NEXT: load i32, i32* % |
306 | // CHECK-NEXT: sdiv i32 %{{.+}}, 1 |
307 | // CHECK-NEXT: store i32 % |
308 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 1 |
309 | // CHECK-NEXT: load i32*, i32** % |
310 | // CHECK-NEXT: load i32, i32* % |
311 | // CHECK-NEXT: bitcast i64* % |
312 | // CHECK-NEXT: store i32 %{{.+}}, i32* % |
313 | // CHECK-NEXT: load i64, i64* % |
314 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 |
315 | // CHECK-NEXT: load i32*, i32** % |
316 | // CHECK-NEXT: load i32, i32* % |
317 | // CHECK-NEXT: bitcast i64* % |
318 | // CHECK-NEXT: store i32 %{{.+}}, i32* % |
319 | // CHECK-NEXT: load i64, i64* % |
320 | // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 |
321 | // CHECK-NEXT: load i32*, i32** % |
322 | // CHECK-NEXT: load i32, i32* % |
323 | // CHECK-NEXT: bitcast i64* % |
324 | // CHECK-NEXT: store i32 %{{.+}}, i32* % |
325 | // CHECK-NEXT: load i64, i64* % |
326 | // CHECK-NEXT: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 4, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SS_TY]]*, i64, i64, i64)* [[SS_MICROTASK1:@.+]] to void |
327 | // CHECK-NEXT: ret void |
328 | |
329 | // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) |
330 | // CHECK: ret void |
331 | |
332 | // CHECK: define internal void [[SS_MICROTASK1]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SS_TY]]* {{.+}}, i64 {{.+}}, i64 {{.+}}, i64 {{.+}}) |
333 | // Private a |
334 | // CHECK: alloca i64, |
335 | // Private b |
336 | // CHECK: alloca i64, |
337 | // Private c |
338 | // CHECK: alloca i64, |
339 | // CHECK: alloca i32*, |
340 | // CHECK: alloca i32*, |
341 | // CHECK: alloca i32*, |
342 | // CHECK: alloca i32*, |
343 | // CHECK: [[DID_IT:%.+]] = alloca i32, |
344 | // CHECK: bitcast i64* %{{.+}} to i32* |
345 | // CHECK: bitcast i64* %{{.+}} to i32* |
346 | // CHECK: bitcast i64* %{{.+}} to i32* |
347 | // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
348 | // CHECK-NEXT: icmp ne i32 [[RES]], 0 |
349 | // CHECK-NEXT: br i1 |
350 | |
351 | // CHECK-NOT: getelementptr inbounds |
352 | // CHECK: load i32*, i32** % |
353 | // CHECK-NEXT: load i32, i32* % |
354 | // CHECK-NEXT: add nsw i32 %{{.+}}, 1 |
355 | // CHECK-NEXT: store i32 % |
356 | // CHECK-NOT: getelementptr inbounds |
357 | // CHECK: load i32, i32* % |
358 | // CHECK-NEXT: add nsw i32 %{{.+}}, -1 |
359 | // CHECK-NEXT: store i32 % |
360 | // CHECK-NOT: getelementptr inbounds |
361 | // CHECK: load i32*, i32** % |
362 | // CHECK-NEXT: load i32, i32* % |
363 | // CHECK-NEXT: sdiv i32 %{{.+}}, 1 |
364 | // CHECK-NEXT: store i32 % |
365 | // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
366 | // CHECK-NEXT: store i32 1, i32* [[DID_IT]], |
367 | // CHECK-NEXT: br label |
368 | |
369 | // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST:%.+]], i64 0, i64 0 |
370 | // CHECK: load i32*, i32** % |
371 | // CHECK-NEXT: bitcast i32* % |
372 | // CHECK-NEXT: store i8* % |
373 | // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 1 |
374 | // CHECK-NEXT: bitcast i32* % |
375 | // CHECK-NEXT: store i8* % |
376 | // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 2 |
377 | // CHECK: load i32*, i32** % |
378 | // CHECK-NEXT: bitcast i32* % |
379 | // CHECK-NEXT: store i8* % |
380 | // CHECK-NEXT: bitcast [3 x i8*]* [[LIST]] to i8* |
381 | // CHECK-NEXT: load i32, i32* [[DID_IT]], |
382 | // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 24, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) |
383 | // CHECK-NEXT: ret void |
384 | |
385 | // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) |
386 | // CHECK: ret void |
387 | |
388 | // CHECK-LABEL: @_ZN3SSTIdEC2Ev |
389 | // CHECK: getelementptr inbounds [[SST_TY]], [[SST_TY]]* %{{.+}}, i32 0, i32 0 |
390 | // CHECK-NEXT: store double 0.000000e+00, double* % |
391 | // CHECK-NEXT: getelementptr inbounds [[SST_TY]], [[SST_TY]]* %{{.+}}, i32 0, i32 0 |
392 | // CHECK-NEXT: store double* %{{.+}}, double** % |
393 | // CHECK-NEXT: load double*, double** % |
394 | // CHECK-NEXT: load double, double* % |
395 | // CHECK-NEXT: bitcast i64* %{{.+}} to double* |
396 | // CHECK-NEXT: store double %{{.+}}, double* % |
397 | // CHECK-NEXT: load i64, i64* % |
398 | // CHECK-NEXT: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 2, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SST_TY]]*, i64)* [[SST_MICROTASK:@.+]] to void |
399 | // CHECK-NEXT: ret void |
400 | |
401 | // CHECK: define internal void [[SST_MICROTASK]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SST_TY]]* {{.+}}, i64 {{.+}}) |
402 | // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
403 | // CHECK-NEXT: icmp ne i32 [[RES]], 0 |
404 | // CHECK-NEXT: br i1 |
405 | |
406 | // CHECK: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 |
407 | // CHECK-NEXT: load double*, double** % |
408 | // CHECK-NEXT: store double* % |
409 | // CHECK-LABEL: invoke void @_ZZN3SSTIdEC1EvENKUlvE_clEv( |
410 | |
411 | // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
412 | // CHECK-NEXT: store i32 1, i32* [[DID_IT]], |
413 | // CHECK-NEXT: br label |
414 | |
415 | // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) |
416 | // CHECK-NEXT: br label |
417 | |
418 | // CHECK: getelementptr inbounds [1 x i8*], [1 x i8*]* [[LIST:%.+]], i64 0, i64 0 |
419 | // CHECK: load double*, double** % |
420 | // CHECK-NEXT: bitcast double* % |
421 | // CHECK-NEXT: store i8* % |
422 | // CHECK-NEXT: bitcast [1 x i8*]* [[LIST]] to i8* |
423 | // CHECK-NEXT: load i32, i32* [[DID_IT]], |
424 | // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 8, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) |
425 | // CHECK-NEXT: ret void |
426 | |
427 | // CHECK-LABEL: @_ZZN3SSTIdEC1EvENKUlvE_clEv( |
428 | // CHECK: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 |
429 | // CHECK-NEXT: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 |
430 | // CHECK-NEXT: load double*, double** % |
431 | // CHECK-NEXT: store double* % |
432 | // CHECK-LABEL: call void @_ZZZN3SSTIdEC1EvENKUlvE_clEvENKUlvE_clEv( |
433 | // CHECK-NEXT: ret void |
434 | |
435 | // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) |
436 | // CHECK: ret void |
437 | |
438 | // CHECK-LABEL: @_ZZZN3SSTIdEC1EvENKUlvE_clEvENKUlvE_clEv( |
439 | |