accel/amdxdna: use modern PM helpers
[drm/drm-misc.git] / arch / x86 / crypto / aegis128-aesni-glue.c
blobc19d8e3d96a35d96be796312b5a0be9ad552509b
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * The AEGIS-128 Authenticated-Encryption Algorithm
4 * Glue for AES-NI + SSE4.1 implementation
6 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
7 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
8 */
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/module.h>
15 #include <asm/fpu/api.h>
16 #include <asm/cpu_device_id.h>
18 #define AEGIS128_BLOCK_ALIGN 16
19 #define AEGIS128_BLOCK_SIZE 16
20 #define AEGIS128_NONCE_SIZE 16
21 #define AEGIS128_STATE_BLOCKS 5
22 #define AEGIS128_KEY_SIZE 16
23 #define AEGIS128_MIN_AUTH_SIZE 8
24 #define AEGIS128_MAX_AUTH_SIZE 16
26 struct aegis_block {
27 u8 bytes[AEGIS128_BLOCK_SIZE] __aligned(AEGIS128_BLOCK_ALIGN);
30 struct aegis_state {
31 struct aegis_block blocks[AEGIS128_STATE_BLOCKS];
34 struct aegis_ctx {
35 struct aegis_block key;
38 asmlinkage void aegis128_aesni_init(struct aegis_state *state,
39 const struct aegis_block *key,
40 const u8 iv[AEGIS128_NONCE_SIZE]);
42 asmlinkage void aegis128_aesni_ad(struct aegis_state *state, const u8 *data,
43 unsigned int len);
45 asmlinkage void aegis128_aesni_enc(struct aegis_state *state, const u8 *src,
46 u8 *dst, unsigned int len);
48 asmlinkage void aegis128_aesni_dec(struct aegis_state *state, const u8 *src,
49 u8 *dst, unsigned int len);
51 asmlinkage void aegis128_aesni_enc_tail(struct aegis_state *state,
52 const u8 *src, u8 *dst,
53 unsigned int len);
55 asmlinkage void aegis128_aesni_dec_tail(struct aegis_state *state,
56 const u8 *src, u8 *dst,
57 unsigned int len);
59 asmlinkage void aegis128_aesni_final(struct aegis_state *state,
60 struct aegis_block *tag_xor,
61 unsigned int assoclen,
62 unsigned int cryptlen);
64 static void crypto_aegis128_aesni_process_ad(
65 struct aegis_state *state, struct scatterlist *sg_src,
66 unsigned int assoclen)
68 struct scatter_walk walk;
69 struct aegis_block buf;
70 unsigned int pos = 0;
72 scatterwalk_start(&walk, sg_src);
73 while (assoclen != 0) {
74 unsigned int size = scatterwalk_clamp(&walk, assoclen);
75 unsigned int left = size;
76 void *mapped = scatterwalk_map(&walk);
77 const u8 *src = (const u8 *)mapped;
79 if (pos + size >= AEGIS128_BLOCK_SIZE) {
80 if (pos > 0) {
81 unsigned int fill = AEGIS128_BLOCK_SIZE - pos;
82 memcpy(buf.bytes + pos, src, fill);
83 aegis128_aesni_ad(state, buf.bytes,
84 AEGIS128_BLOCK_SIZE);
85 pos = 0;
86 left -= fill;
87 src += fill;
90 aegis128_aesni_ad(state, src,
91 left & ~(AEGIS128_BLOCK_SIZE - 1));
92 src += left & ~(AEGIS128_BLOCK_SIZE - 1);
93 left &= AEGIS128_BLOCK_SIZE - 1;
96 memcpy(buf.bytes + pos, src, left);
97 pos += left;
98 assoclen -= size;
100 scatterwalk_unmap(mapped);
101 scatterwalk_advance(&walk, size);
102 scatterwalk_done(&walk, 0, assoclen);
105 if (pos > 0) {
106 memset(buf.bytes + pos, 0, AEGIS128_BLOCK_SIZE - pos);
107 aegis128_aesni_ad(state, buf.bytes, AEGIS128_BLOCK_SIZE);
111 static __always_inline void
112 crypto_aegis128_aesni_process_crypt(struct aegis_state *state,
113 struct skcipher_walk *walk, bool enc)
115 while (walk->nbytes >= AEGIS128_BLOCK_SIZE) {
116 if (enc)
117 aegis128_aesni_enc(state, walk->src.virt.addr,
118 walk->dst.virt.addr,
119 round_down(walk->nbytes,
120 AEGIS128_BLOCK_SIZE));
121 else
122 aegis128_aesni_dec(state, walk->src.virt.addr,
123 walk->dst.virt.addr,
124 round_down(walk->nbytes,
125 AEGIS128_BLOCK_SIZE));
126 skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE);
129 if (walk->nbytes) {
130 if (enc)
131 aegis128_aesni_enc_tail(state, walk->src.virt.addr,
132 walk->dst.virt.addr,
133 walk->nbytes);
134 else
135 aegis128_aesni_dec_tail(state, walk->src.virt.addr,
136 walk->dst.virt.addr,
137 walk->nbytes);
138 skcipher_walk_done(walk, 0);
142 static struct aegis_ctx *crypto_aegis128_aesni_ctx(struct crypto_aead *aead)
144 u8 *ctx = crypto_aead_ctx(aead);
145 ctx = PTR_ALIGN(ctx, __alignof__(struct aegis_ctx));
146 return (void *)ctx;
149 static int crypto_aegis128_aesni_setkey(struct crypto_aead *aead, const u8 *key,
150 unsigned int keylen)
152 struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(aead);
154 if (keylen != AEGIS128_KEY_SIZE)
155 return -EINVAL;
157 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
159 return 0;
162 static int crypto_aegis128_aesni_setauthsize(struct crypto_aead *tfm,
163 unsigned int authsize)
165 if (authsize > AEGIS128_MAX_AUTH_SIZE)
166 return -EINVAL;
167 if (authsize < AEGIS128_MIN_AUTH_SIZE)
168 return -EINVAL;
169 return 0;
172 static __always_inline void
173 crypto_aegis128_aesni_crypt(struct aead_request *req,
174 struct aegis_block *tag_xor,
175 unsigned int cryptlen, bool enc)
177 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
178 struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(tfm);
179 struct skcipher_walk walk;
180 struct aegis_state state;
182 if (enc)
183 skcipher_walk_aead_encrypt(&walk, req, true);
184 else
185 skcipher_walk_aead_decrypt(&walk, req, true);
187 kernel_fpu_begin();
189 aegis128_aesni_init(&state, &ctx->key, req->iv);
190 crypto_aegis128_aesni_process_ad(&state, req->src, req->assoclen);
191 crypto_aegis128_aesni_process_crypt(&state, &walk, enc);
192 aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen);
194 kernel_fpu_end();
197 static int crypto_aegis128_aesni_encrypt(struct aead_request *req)
199 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
200 struct aegis_block tag = {};
201 unsigned int authsize = crypto_aead_authsize(tfm);
202 unsigned int cryptlen = req->cryptlen;
204 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, true);
206 scatterwalk_map_and_copy(tag.bytes, req->dst,
207 req->assoclen + cryptlen, authsize, 1);
208 return 0;
211 static int crypto_aegis128_aesni_decrypt(struct aead_request *req)
213 static const struct aegis_block zeros = {};
215 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
216 struct aegis_block tag;
217 unsigned int authsize = crypto_aead_authsize(tfm);
218 unsigned int cryptlen = req->cryptlen - authsize;
220 scatterwalk_map_and_copy(tag.bytes, req->src,
221 req->assoclen + cryptlen, authsize, 0);
223 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, false);
225 return crypto_memneq(tag.bytes, zeros.bytes, authsize) ? -EBADMSG : 0;
228 static struct aead_alg crypto_aegis128_aesni_alg = {
229 .setkey = crypto_aegis128_aesni_setkey,
230 .setauthsize = crypto_aegis128_aesni_setauthsize,
231 .encrypt = crypto_aegis128_aesni_encrypt,
232 .decrypt = crypto_aegis128_aesni_decrypt,
234 .ivsize = AEGIS128_NONCE_SIZE,
235 .maxauthsize = AEGIS128_MAX_AUTH_SIZE,
236 .chunksize = AEGIS128_BLOCK_SIZE,
238 .base = {
239 .cra_flags = CRYPTO_ALG_INTERNAL,
240 .cra_blocksize = 1,
241 .cra_ctxsize = sizeof(struct aegis_ctx) +
242 __alignof__(struct aegis_ctx),
243 .cra_alignmask = 0,
244 .cra_priority = 400,
246 .cra_name = "__aegis128",
247 .cra_driver_name = "__aegis128-aesni",
249 .cra_module = THIS_MODULE,
253 static struct simd_aead_alg *simd_alg;
255 static int __init crypto_aegis128_aesni_module_init(void)
257 if (!boot_cpu_has(X86_FEATURE_XMM4_1) ||
258 !boot_cpu_has(X86_FEATURE_AES) ||
259 !cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL))
260 return -ENODEV;
262 return simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
263 &simd_alg);
266 static void __exit crypto_aegis128_aesni_module_exit(void)
268 simd_unregister_aeads(&crypto_aegis128_aesni_alg, 1, &simd_alg);
271 module_init(crypto_aegis128_aesni_module_init);
272 module_exit(crypto_aegis128_aesni_module_exit);
274 MODULE_LICENSE("GPL");
275 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
276 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm -- AESNI+SSE4.1 implementation");
277 MODULE_ALIAS_CRYPTO("aegis128");
278 MODULE_ALIAS_CRYPTO("aegis128-aesni");