1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (C)2006 USAGI/WIDE Project
6 * Kazunori Miyazawa <miyazawa@linux-ipv6.org>
9 #include <crypto/internal/hash.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
14 static u_int32_t ks
[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
15 0x02020202, 0x02020202, 0x02020202, 0x02020202,
16 0x03030303, 0x03030303, 0x03030303, 0x03030303};
19 * +------------------------
21 * +------------------------
23 * +------------------------
24 * | consts (block size * 2)
25 * +------------------------
28 struct crypto_cipher
*child
;
33 * +------------------------
35 * +------------------------
37 * +------------------------
39 * +------------------------
41 * +------------------------
43 struct xcbc_desc_ctx
{
48 #define XCBC_BLOCKSIZE 16
50 static int crypto_xcbc_digest_setkey(struct crypto_shash
*parent
,
51 const u8
*inkey
, unsigned int keylen
)
53 unsigned long alignmask
= crypto_shash_alignmask(parent
);
54 struct xcbc_tfm_ctx
*ctx
= crypto_shash_ctx(parent
);
55 u8
*consts
= PTR_ALIGN(&ctx
->ctx
[0], alignmask
+ 1);
57 u8 key1
[XCBC_BLOCKSIZE
];
58 int bs
= sizeof(key1
);
60 if ((err
= crypto_cipher_setkey(ctx
->child
, inkey
, keylen
)))
63 crypto_cipher_encrypt_one(ctx
->child
, consts
, (u8
*)ks
+ bs
);
64 crypto_cipher_encrypt_one(ctx
->child
, consts
+ bs
, (u8
*)ks
+ bs
* 2);
65 crypto_cipher_encrypt_one(ctx
->child
, key1
, (u8
*)ks
);
67 return crypto_cipher_setkey(ctx
->child
, key1
, bs
);
71 static int crypto_xcbc_digest_init(struct shash_desc
*pdesc
)
73 unsigned long alignmask
= crypto_shash_alignmask(pdesc
->tfm
);
74 struct xcbc_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
75 int bs
= crypto_shash_blocksize(pdesc
->tfm
);
76 u8
*prev
= PTR_ALIGN(&ctx
->ctx
[0], alignmask
+ 1) + bs
;
84 static int crypto_xcbc_digest_update(struct shash_desc
*pdesc
, const u8
*p
,
87 struct crypto_shash
*parent
= pdesc
->tfm
;
88 unsigned long alignmask
= crypto_shash_alignmask(parent
);
89 struct xcbc_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
90 struct xcbc_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
91 struct crypto_cipher
*tfm
= tctx
->child
;
92 int bs
= crypto_shash_blocksize(parent
);
93 u8
*odds
= PTR_ALIGN(&ctx
->ctx
[0], alignmask
+ 1);
96 /* checking the data can fill the block */
97 if ((ctx
->len
+ len
) <= bs
) {
98 memcpy(odds
+ ctx
->len
, p
, len
);
103 /* filling odds with new data and encrypting it */
104 memcpy(odds
+ ctx
->len
, p
, bs
- ctx
->len
);
105 len
-= bs
- ctx
->len
;
108 crypto_xor(prev
, odds
, bs
);
109 crypto_cipher_encrypt_one(tfm
, prev
, prev
);
111 /* clearing the length */
114 /* encrypting the rest of data */
116 crypto_xor(prev
, p
, bs
);
117 crypto_cipher_encrypt_one(tfm
, prev
, prev
);
122 /* keeping the surplus of blocksize */
124 memcpy(odds
, p
, len
);
131 static int crypto_xcbc_digest_final(struct shash_desc
*pdesc
, u8
*out
)
133 struct crypto_shash
*parent
= pdesc
->tfm
;
134 unsigned long alignmask
= crypto_shash_alignmask(parent
);
135 struct xcbc_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
136 struct xcbc_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
137 struct crypto_cipher
*tfm
= tctx
->child
;
138 int bs
= crypto_shash_blocksize(parent
);
139 u8
*consts
= PTR_ALIGN(&tctx
->ctx
[0], alignmask
+ 1);
140 u8
*odds
= PTR_ALIGN(&ctx
->ctx
[0], alignmask
+ 1);
141 u8
*prev
= odds
+ bs
;
142 unsigned int offset
= 0;
144 if (ctx
->len
!= bs
) {
146 u8
*p
= odds
+ ctx
->len
;
151 rlen
= bs
- ctx
->len
-1;
158 crypto_xor(prev
, odds
, bs
);
159 crypto_xor(prev
, consts
+ offset
, bs
);
161 crypto_cipher_encrypt_one(tfm
, out
, prev
);
166 static int xcbc_init_tfm(struct crypto_tfm
*tfm
)
168 struct crypto_cipher
*cipher
;
169 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
170 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
171 struct xcbc_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
173 cipher
= crypto_spawn_cipher(spawn
);
175 return PTR_ERR(cipher
);
182 static void xcbc_exit_tfm(struct crypto_tfm
*tfm
)
184 struct xcbc_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
185 crypto_free_cipher(ctx
->child
);
188 static int xcbc_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
190 struct shash_instance
*inst
;
191 struct crypto_alg
*alg
;
192 unsigned long alignmask
;
195 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SHASH
);
199 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
200 CRYPTO_ALG_TYPE_MASK
);
204 switch(alg
->cra_blocksize
) {
211 inst
= shash_alloc_instance("xcbc", alg
);
216 err
= crypto_init_spawn(shash_instance_ctx(inst
), alg
,
217 shash_crypto_instance(inst
),
218 CRYPTO_ALG_TYPE_MASK
);
222 alignmask
= alg
->cra_alignmask
| 3;
223 inst
->alg
.base
.cra_alignmask
= alignmask
;
224 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
225 inst
->alg
.base
.cra_blocksize
= alg
->cra_blocksize
;
227 inst
->alg
.digestsize
= alg
->cra_blocksize
;
228 inst
->alg
.descsize
= ALIGN(sizeof(struct xcbc_desc_ctx
),
229 crypto_tfm_ctx_alignment()) +
231 ~(crypto_tfm_ctx_alignment() - 1)) +
232 alg
->cra_blocksize
* 2;
234 inst
->alg
.base
.cra_ctxsize
= ALIGN(sizeof(struct xcbc_tfm_ctx
),
236 alg
->cra_blocksize
* 2;
237 inst
->alg
.base
.cra_init
= xcbc_init_tfm
;
238 inst
->alg
.base
.cra_exit
= xcbc_exit_tfm
;
240 inst
->alg
.init
= crypto_xcbc_digest_init
;
241 inst
->alg
.update
= crypto_xcbc_digest_update
;
242 inst
->alg
.final
= crypto_xcbc_digest_final
;
243 inst
->alg
.setkey
= crypto_xcbc_digest_setkey
;
245 err
= shash_register_instance(tmpl
, inst
);
248 shash_free_instance(shash_crypto_instance(inst
));
256 static struct crypto_template crypto_xcbc_tmpl
= {
258 .create
= xcbc_create
,
259 .free
= shash_free_instance
,
260 .module
= THIS_MODULE
,
263 static int __init
crypto_xcbc_module_init(void)
265 return crypto_register_template(&crypto_xcbc_tmpl
);
268 static void __exit
crypto_xcbc_module_exit(void)
270 crypto_unregister_template(&crypto_xcbc_tmpl
);
273 subsys_initcall(crypto_xcbc_module_init
);
274 module_exit(crypto_xcbc_module_exit
);
276 MODULE_LICENSE("GPL");
277 MODULE_DESCRIPTION("XCBC keyed hash algorithm");
278 MODULE_ALIAS_CRYPTO("xcbc");