2 * The MORUS-1280 Authenticated-Encryption Algorithm
3 * Common x86 SIMD glue skeleton
5 * Copyright (c) 2016-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
10 * Software Foundation; either version 2 of the License, or (at your option)
14 #include <crypto/cryptd.h>
15 #include <crypto/internal/aead.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/morus1280_glue.h>
18 #include <crypto/scatterwalk.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/scatterlist.h>
24 #include <asm/fpu/api.h>
26 struct morus1280_state
{
27 struct morus1280_block s
[MORUS_STATE_BLOCKS
];
30 struct morus1280_ops
{
31 int (*skcipher_walk_init
)(struct skcipher_walk
*walk
,
32 struct aead_request
*req
, bool atomic
);
34 void (*crypt_blocks
)(void *state
, const void *src
, void *dst
,
36 void (*crypt_tail
)(void *state
, const void *src
, void *dst
,
40 static void crypto_morus1280_glue_process_ad(
41 struct morus1280_state
*state
,
42 const struct morus1280_glue_ops
*ops
,
43 struct scatterlist
*sg_src
, unsigned int assoclen
)
45 struct scatter_walk walk
;
46 struct morus1280_block buf
;
49 scatterwalk_start(&walk
, sg_src
);
50 while (assoclen
!= 0) {
51 unsigned int size
= scatterwalk_clamp(&walk
, assoclen
);
52 unsigned int left
= size
;
53 void *mapped
= scatterwalk_map(&walk
);
54 const u8
*src
= (const u8
*)mapped
;
56 if (pos
+ size
>= MORUS1280_BLOCK_SIZE
) {
58 unsigned int fill
= MORUS1280_BLOCK_SIZE
- pos
;
59 memcpy(buf
.bytes
+ pos
, src
, fill
);
60 ops
->ad(state
, buf
.bytes
, MORUS1280_BLOCK_SIZE
);
66 ops
->ad(state
, src
, left
);
67 src
+= left
& ~(MORUS1280_BLOCK_SIZE
- 1);
68 left
&= MORUS1280_BLOCK_SIZE
- 1;
71 memcpy(buf
.bytes
+ pos
, src
, left
);
75 scatterwalk_unmap(mapped
);
76 scatterwalk_advance(&walk
, size
);
77 scatterwalk_done(&walk
, 0, assoclen
);
81 memset(buf
.bytes
+ pos
, 0, MORUS1280_BLOCK_SIZE
- pos
);
82 ops
->ad(state
, buf
.bytes
, MORUS1280_BLOCK_SIZE
);
86 static void crypto_morus1280_glue_process_crypt(struct morus1280_state
*state
,
87 struct morus1280_ops ops
,
88 struct skcipher_walk
*walk
)
90 while (walk
->nbytes
>= MORUS1280_BLOCK_SIZE
) {
91 ops
.crypt_blocks(state
, walk
->src
.virt
.addr
,
93 round_down(walk
->nbytes
,
94 MORUS1280_BLOCK_SIZE
));
95 skcipher_walk_done(walk
, walk
->nbytes
% MORUS1280_BLOCK_SIZE
);
99 ops
.crypt_tail(state
, walk
->src
.virt
.addr
, walk
->dst
.virt
.addr
,
101 skcipher_walk_done(walk
, 0);
105 int crypto_morus1280_glue_setkey(struct crypto_aead
*aead
, const u8
*key
,
108 struct morus1280_ctx
*ctx
= crypto_aead_ctx(aead
);
110 if (keylen
== MORUS1280_BLOCK_SIZE
) {
111 memcpy(ctx
->key
.bytes
, key
, MORUS1280_BLOCK_SIZE
);
112 } else if (keylen
== MORUS1280_BLOCK_SIZE
/ 2) {
113 memcpy(ctx
->key
.bytes
, key
, keylen
);
114 memcpy(ctx
->key
.bytes
+ keylen
, key
, keylen
);
116 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
122 EXPORT_SYMBOL_GPL(crypto_morus1280_glue_setkey
);
124 int crypto_morus1280_glue_setauthsize(struct crypto_aead
*tfm
,
125 unsigned int authsize
)
127 return (authsize
<= MORUS_MAX_AUTH_SIZE
) ? 0 : -EINVAL
;
129 EXPORT_SYMBOL_GPL(crypto_morus1280_glue_setauthsize
);
131 static void crypto_morus1280_glue_crypt(struct aead_request
*req
,
132 struct morus1280_ops ops
,
133 unsigned int cryptlen
,
134 struct morus1280_block
*tag_xor
)
136 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
137 struct morus1280_ctx
*ctx
= crypto_aead_ctx(tfm
);
138 struct morus1280_state state
;
139 struct skcipher_walk walk
;
141 ops
.skcipher_walk_init(&walk
, req
, true);
145 ctx
->ops
->init(&state
, &ctx
->key
, req
->iv
);
146 crypto_morus1280_glue_process_ad(&state
, ctx
->ops
, req
->src
, req
->assoclen
);
147 crypto_morus1280_glue_process_crypt(&state
, ops
, &walk
);
148 ctx
->ops
->final(&state
, tag_xor
, req
->assoclen
, cryptlen
);
153 int crypto_morus1280_glue_encrypt(struct aead_request
*req
)
155 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
156 struct morus1280_ctx
*ctx
= crypto_aead_ctx(tfm
);
157 struct morus1280_ops OPS
= {
158 .skcipher_walk_init
= skcipher_walk_aead_encrypt
,
159 .crypt_blocks
= ctx
->ops
->enc
,
160 .crypt_tail
= ctx
->ops
->enc_tail
,
163 struct morus1280_block tag
= {};
164 unsigned int authsize
= crypto_aead_authsize(tfm
);
165 unsigned int cryptlen
= req
->cryptlen
;
167 crypto_morus1280_glue_crypt(req
, OPS
, cryptlen
, &tag
);
169 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
,
170 req
->assoclen
+ cryptlen
, authsize
, 1);
173 EXPORT_SYMBOL_GPL(crypto_morus1280_glue_encrypt
);
175 int crypto_morus1280_glue_decrypt(struct aead_request
*req
)
177 static const u8 zeros
[MORUS1280_BLOCK_SIZE
] = {};
179 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
180 struct morus1280_ctx
*ctx
= crypto_aead_ctx(tfm
);
181 struct morus1280_ops OPS
= {
182 .skcipher_walk_init
= skcipher_walk_aead_decrypt
,
183 .crypt_blocks
= ctx
->ops
->dec
,
184 .crypt_tail
= ctx
->ops
->dec_tail
,
187 struct morus1280_block tag
;
188 unsigned int authsize
= crypto_aead_authsize(tfm
);
189 unsigned int cryptlen
= req
->cryptlen
- authsize
;
191 scatterwalk_map_and_copy(tag
.bytes
, req
->src
,
192 req
->assoclen
+ cryptlen
, authsize
, 0);
194 crypto_morus1280_glue_crypt(req
, OPS
, cryptlen
, &tag
);
196 return crypto_memneq(tag
.bytes
, zeros
, authsize
) ? -EBADMSG
: 0;
198 EXPORT_SYMBOL_GPL(crypto_morus1280_glue_decrypt
);
200 void crypto_morus1280_glue_init_ops(struct crypto_aead
*aead
,
201 const struct morus1280_glue_ops
*ops
)
203 struct morus1280_ctx
*ctx
= crypto_aead_ctx(aead
);
206 EXPORT_SYMBOL_GPL(crypto_morus1280_glue_init_ops
);
208 int cryptd_morus1280_glue_setkey(struct crypto_aead
*aead
, const u8
*key
,
211 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
212 struct cryptd_aead
*cryptd_tfm
= *ctx
;
214 return crypto_aead_setkey(&cryptd_tfm
->base
, key
, keylen
);
216 EXPORT_SYMBOL_GPL(cryptd_morus1280_glue_setkey
);
218 int cryptd_morus1280_glue_setauthsize(struct crypto_aead
*aead
,
219 unsigned int authsize
)
221 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
222 struct cryptd_aead
*cryptd_tfm
= *ctx
;
224 return crypto_aead_setauthsize(&cryptd_tfm
->base
, authsize
);
226 EXPORT_SYMBOL_GPL(cryptd_morus1280_glue_setauthsize
);
228 int cryptd_morus1280_glue_encrypt(struct aead_request
*req
)
230 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
231 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
232 struct cryptd_aead
*cryptd_tfm
= *ctx
;
234 aead
= &cryptd_tfm
->base
;
235 if (irq_fpu_usable() && (!in_atomic() ||
236 !cryptd_aead_queued(cryptd_tfm
)))
237 aead
= cryptd_aead_child(cryptd_tfm
);
239 aead_request_set_tfm(req
, aead
);
241 return crypto_aead_encrypt(req
);
243 EXPORT_SYMBOL_GPL(cryptd_morus1280_glue_encrypt
);
245 int cryptd_morus1280_glue_decrypt(struct aead_request
*req
)
247 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
248 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
249 struct cryptd_aead
*cryptd_tfm
= *ctx
;
251 aead
= &cryptd_tfm
->base
;
252 if (irq_fpu_usable() && (!in_atomic() ||
253 !cryptd_aead_queued(cryptd_tfm
)))
254 aead
= cryptd_aead_child(cryptd_tfm
);
256 aead_request_set_tfm(req
, aead
);
258 return crypto_aead_decrypt(req
);
260 EXPORT_SYMBOL_GPL(cryptd_morus1280_glue_decrypt
);
262 int cryptd_morus1280_glue_init_tfm(struct crypto_aead
*aead
)
264 struct cryptd_aead
*cryptd_tfm
;
265 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
266 const char *name
= crypto_aead_alg(aead
)->base
.cra_driver_name
;
267 char internal_name
[CRYPTO_MAX_ALG_NAME
];
269 if (snprintf(internal_name
, CRYPTO_MAX_ALG_NAME
, "__%s", name
)
270 >= CRYPTO_MAX_ALG_NAME
)
271 return -ENAMETOOLONG
;
273 cryptd_tfm
= cryptd_alloc_aead(internal_name
, CRYPTO_ALG_INTERNAL
,
274 CRYPTO_ALG_INTERNAL
);
275 if (IS_ERR(cryptd_tfm
))
276 return PTR_ERR(cryptd_tfm
);
279 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(&cryptd_tfm
->base
));
282 EXPORT_SYMBOL_GPL(cryptd_morus1280_glue_init_tfm
);
284 void cryptd_morus1280_glue_exit_tfm(struct crypto_aead
*aead
)
286 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
288 cryptd_free_aead(*ctx
);
290 EXPORT_SYMBOL_GPL(cryptd_morus1280_glue_exit_tfm
);
292 MODULE_LICENSE("GPL");
293 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
294 MODULE_DESCRIPTION("MORUS-1280 AEAD mode -- glue for x86 optimizations");