1 // RUN: %clang_cc1 -triple aarch64 -target-feature +v8a -verify -S %s -o -
2 // REQUIRES: aarch64-registered-target
6 __attribute__((target("+crypto")))
7 void test_crypto(uint8x16_t data
, uint8x16_t key
)
10 vsha1su1q_u32(data
, key
);
13 __attribute__((target("crypto")))
14 void test_pluscrypto(uint8x16_t data
, uint8x16_t key
)
17 vsha1su1q_u32(data
, key
);
20 __attribute__((target("arch=armv8.2-a+crypto")))
21 void test_archcrypto(uint8x16_t data
, uint8x16_t key
)
24 vsha1su1q_u32(data
, key
);
27 // FIXME: This shouldn't need +crypto to be consistent with -mcpu options.
28 __attribute__((target("cpu=cortex-a55+crypto")))
29 void test_a55crypto(uint8x16_t data
, uint8x16_t key
)
32 vsha1su1q_u32(data
, key
);
35 __attribute__((target("cpu=cortex-a510+crypto")))
36 void test_a510crypto(uint8x16_t data
, uint8x16_t key
)
39 vsha1su1q_u32(data
, key
);
42 __attribute__((target("+sha2+aes")))
43 void test_sha2aes(uint8x16_t data
, uint8x16_t key
)
46 vsha1su1q_u32(data
, key
);
49 void test_errors(uint8x16_t data
, uint8x16_t key
)
51 vaeseq_u8(data
, key
); // expected-error {{always_inline function 'vaeseq_u8' requires target feature 'aes'}}
52 vsha1su1q_u32(data
, key
); // expected-error {{always_inline function 'vsha1su1q_u32' requires target feature 'sha2'}}