1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
5 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
8 #include <asm/crypto/camellia.h>
9 #include <asm/crypto/glue_helper.h>
10 #include <crypto/algapi.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/xts.h>
13 #include <linux/crypto.h>
14 #include <linux/err.h>
15 #include <linux/module.h>
16 #include <linux/types.h>
18 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
19 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
21 /* 32-way AVX2/AES-NI parallel cipher functions */
22 asmlinkage
void camellia_ecb_enc_32way(const void *ctx
, u8
*dst
, const u8
*src
);
23 asmlinkage
void camellia_ecb_dec_32way(const void *ctx
, u8
*dst
, const u8
*src
);
25 asmlinkage
void camellia_cbc_dec_32way(const void *ctx
, u8
*dst
, const u8
*src
);
26 asmlinkage
void camellia_ctr_32way(const void *ctx
, u8
*dst
, const u8
*src
,
29 asmlinkage
void camellia_xts_enc_32way(const void *ctx
, u8
*dst
, const u8
*src
,
31 asmlinkage
void camellia_xts_dec_32way(const void *ctx
, u8
*dst
, const u8
*src
,
34 static const struct common_glue_ctx camellia_enc
= {
36 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
39 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
40 .fn_u
= { .ecb
= camellia_ecb_enc_32way
}
42 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
43 .fn_u
= { .ecb
= camellia_ecb_enc_16way
}
46 .fn_u
= { .ecb
= camellia_enc_blk_2way
}
49 .fn_u
= { .ecb
= camellia_enc_blk
}
53 static const struct common_glue_ctx camellia_ctr
= {
55 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
58 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
59 .fn_u
= { .ctr
= camellia_ctr_32way
}
61 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
62 .fn_u
= { .ctr
= camellia_ctr_16way
}
65 .fn_u
= { .ctr
= camellia_crypt_ctr_2way
}
68 .fn_u
= { .ctr
= camellia_crypt_ctr
}
72 static const struct common_glue_ctx camellia_enc_xts
= {
74 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
77 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
78 .fn_u
= { .xts
= camellia_xts_enc_32way
}
80 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
81 .fn_u
= { .xts
= camellia_xts_enc_16way
}
84 .fn_u
= { .xts
= camellia_xts_enc
}
88 static const struct common_glue_ctx camellia_dec
= {
90 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
93 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
94 .fn_u
= { .ecb
= camellia_ecb_dec_32way
}
96 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
97 .fn_u
= { .ecb
= camellia_ecb_dec_16way
}
100 .fn_u
= { .ecb
= camellia_dec_blk_2way
}
103 .fn_u
= { .ecb
= camellia_dec_blk
}
107 static const struct common_glue_ctx camellia_dec_cbc
= {
109 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
112 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
113 .fn_u
= { .cbc
= camellia_cbc_dec_32way
}
115 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
116 .fn_u
= { .cbc
= camellia_cbc_dec_16way
}
119 .fn_u
= { .cbc
= camellia_decrypt_cbc_2way
}
122 .fn_u
= { .cbc
= camellia_dec_blk
}
126 static const struct common_glue_ctx camellia_dec_xts
= {
128 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
131 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
132 .fn_u
= { .xts
= camellia_xts_dec_32way
}
134 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
135 .fn_u
= { .xts
= camellia_xts_dec_16way
}
138 .fn_u
= { .xts
= camellia_xts_dec
}
142 static int camellia_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
145 return __camellia_setkey(crypto_skcipher_ctx(tfm
), key
, keylen
);
148 static int ecb_encrypt(struct skcipher_request
*req
)
150 return glue_ecb_req_128bit(&camellia_enc
, req
);
153 static int ecb_decrypt(struct skcipher_request
*req
)
155 return glue_ecb_req_128bit(&camellia_dec
, req
);
158 static int cbc_encrypt(struct skcipher_request
*req
)
160 return glue_cbc_encrypt_req_128bit(camellia_enc_blk
, req
);
163 static int cbc_decrypt(struct skcipher_request
*req
)
165 return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc
, req
);
168 static int ctr_crypt(struct skcipher_request
*req
)
170 return glue_ctr_req_128bit(&camellia_ctr
, req
);
173 static int xts_encrypt(struct skcipher_request
*req
)
175 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
176 struct camellia_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
178 return glue_xts_req_128bit(&camellia_enc_xts
, req
, camellia_enc_blk
,
179 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
, false);
182 static int xts_decrypt(struct skcipher_request
*req
)
184 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
185 struct camellia_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
187 return glue_xts_req_128bit(&camellia_dec_xts
, req
, camellia_enc_blk
,
188 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
, true);
191 static struct skcipher_alg camellia_algs
[] = {
193 .base
.cra_name
= "__ecb(camellia)",
194 .base
.cra_driver_name
= "__ecb-camellia-aesni-avx2",
195 .base
.cra_priority
= 500,
196 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
197 .base
.cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
198 .base
.cra_ctxsize
= sizeof(struct camellia_ctx
),
199 .base
.cra_module
= THIS_MODULE
,
200 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
201 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
202 .setkey
= camellia_setkey
,
203 .encrypt
= ecb_encrypt
,
204 .decrypt
= ecb_decrypt
,
206 .base
.cra_name
= "__cbc(camellia)",
207 .base
.cra_driver_name
= "__cbc-camellia-aesni-avx2",
208 .base
.cra_priority
= 500,
209 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
210 .base
.cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
211 .base
.cra_ctxsize
= sizeof(struct camellia_ctx
),
212 .base
.cra_module
= THIS_MODULE
,
213 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
214 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
215 .ivsize
= CAMELLIA_BLOCK_SIZE
,
216 .setkey
= camellia_setkey
,
217 .encrypt
= cbc_encrypt
,
218 .decrypt
= cbc_decrypt
,
220 .base
.cra_name
= "__ctr(camellia)",
221 .base
.cra_driver_name
= "__ctr-camellia-aesni-avx2",
222 .base
.cra_priority
= 500,
223 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
224 .base
.cra_blocksize
= 1,
225 .base
.cra_ctxsize
= sizeof(struct camellia_ctx
),
226 .base
.cra_module
= THIS_MODULE
,
227 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
228 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
229 .ivsize
= CAMELLIA_BLOCK_SIZE
,
230 .chunksize
= CAMELLIA_BLOCK_SIZE
,
231 .setkey
= camellia_setkey
,
232 .encrypt
= ctr_crypt
,
233 .decrypt
= ctr_crypt
,
235 .base
.cra_name
= "__xts(camellia)",
236 .base
.cra_driver_name
= "__xts-camellia-aesni-avx2",
237 .base
.cra_priority
= 500,
238 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
239 .base
.cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
240 .base
.cra_ctxsize
= sizeof(struct camellia_xts_ctx
),
241 .base
.cra_module
= THIS_MODULE
,
242 .min_keysize
= 2 * CAMELLIA_MIN_KEY_SIZE
,
243 .max_keysize
= 2 * CAMELLIA_MAX_KEY_SIZE
,
244 .ivsize
= CAMELLIA_BLOCK_SIZE
,
245 .setkey
= xts_camellia_setkey
,
246 .encrypt
= xts_encrypt
,
247 .decrypt
= xts_decrypt
,
251 static struct simd_skcipher_alg
*camellia_simd_algs
[ARRAY_SIZE(camellia_algs
)];
253 static int __init
camellia_aesni_init(void)
255 const char *feature_name
;
257 if (!boot_cpu_has(X86_FEATURE_AVX
) ||
258 !boot_cpu_has(X86_FEATURE_AVX2
) ||
259 !boot_cpu_has(X86_FEATURE_AES
) ||
260 !boot_cpu_has(X86_FEATURE_OSXSAVE
)) {
261 pr_info("AVX2 or AES-NI instructions are not detected.\n");
265 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE
| XFEATURE_MASK_YMM
,
267 pr_info("CPU feature '%s' is not supported.\n", feature_name
);
271 return simd_register_skciphers_compat(camellia_algs
,
272 ARRAY_SIZE(camellia_algs
),
276 static void __exit
camellia_aesni_fini(void)
278 simd_unregister_skciphers(camellia_algs
, ARRAY_SIZE(camellia_algs
),
282 module_init(camellia_aesni_init
);
283 module_exit(camellia_aesni_fini
);
285 MODULE_LICENSE("GPL");
286 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
287 MODULE_ALIAS_CRYPTO("camellia");
288 MODULE_ALIAS_CRYPTO("camellia-asm");