2 * Copyright (C)2006 USAGI/WIDE Project
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 * Kazunori Miyazawa <miyazawa@linux-ipv6.org>
22 #include <crypto/scatterwalk.h>
23 #include <linux/crypto.h>
24 #include <linux/err.h>
25 #include <linux/hardirq.h>
26 #include <linux/kernel.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
30 #include <linux/scatterlist.h>
32 static u_int32_t ks
[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
33 0x02020202, 0x02020202, 0x02020202, 0x02020202,
34 0x03030303, 0x03030303, 0x03030303, 0x03030303};
36 * +------------------------
38 * +------------------------
40 * +------------------------
42 * +------------------------
44 * +------------------------
46 * +------------------------
47 * | consts (block size * 3)
48 * +------------------------
50 struct crypto_xcbc_ctx
{
51 struct crypto_cipher
*child
;
56 void (*xor)(u8
*a
, const u8
*b
, unsigned int bs
);
61 static void xor_128(u8
*a
, const u8
*b
, unsigned int bs
)
63 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
64 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
65 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
66 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
69 static int _crypto_xcbc_digest_setkey(struct crypto_hash
*parent
,
70 struct crypto_xcbc_ctx
*ctx
)
72 int bs
= crypto_hash_blocksize(parent
);
76 if ((err
= crypto_cipher_setkey(ctx
->child
, ctx
->key
, ctx
->keylen
)))
79 crypto_cipher_encrypt_one(ctx
->child
, key1
, ctx
->consts
);
81 return crypto_cipher_setkey(ctx
->child
, key1
, bs
);
84 static int crypto_xcbc_digest_setkey(struct crypto_hash
*parent
,
85 const u8
*inkey
, unsigned int keylen
)
87 struct crypto_xcbc_ctx
*ctx
= crypto_hash_ctx_aligned(parent
);
89 if (keylen
!= crypto_cipher_blocksize(ctx
->child
))
93 memcpy(ctx
->key
, inkey
, keylen
);
94 ctx
->consts
= (u8
*)ks
;
96 return _crypto_xcbc_digest_setkey(parent
, ctx
);
99 static int crypto_xcbc_digest_init(struct hash_desc
*pdesc
)
101 struct crypto_xcbc_ctx
*ctx
= crypto_hash_ctx_aligned(pdesc
->tfm
);
102 int bs
= crypto_hash_blocksize(pdesc
->tfm
);
105 memset(ctx
->odds
, 0, bs
);
106 memset(ctx
->prev
, 0, bs
);
111 static int crypto_xcbc_digest_update2(struct hash_desc
*pdesc
,
112 struct scatterlist
*sg
,
115 struct crypto_hash
*parent
= pdesc
->tfm
;
116 struct crypto_xcbc_ctx
*ctx
= crypto_hash_ctx_aligned(parent
);
117 struct crypto_cipher
*tfm
= ctx
->child
;
118 int bs
= crypto_hash_blocksize(parent
);
123 struct page
*pg
= sg_page(&sg
[i
]);
124 unsigned int offset
= sg
[i
].offset
;
125 unsigned int slen
= sg
[i
].length
;
127 <<<<<<< HEAD
:crypto
/xcbc
.c
129 if (unlikely(slen
> nbytes
))
134 >>>>>>> 264e3e889d86e552b4191d69bb60f4f3b383135a
:crypto
/xcbc
.c
136 unsigned int len
= min(slen
, ((unsigned int)(PAGE_SIZE
)) - offset
);
137 char *p
= crypto_kmap(pg
, 0) + offset
;
139 /* checking the data can fill the block */
140 if ((ctx
->len
+ len
) <= bs
) {
141 memcpy(ctx
->odds
+ ctx
->len
, p
, len
);
145 /* checking the rest of the page */
146 if (len
+ offset
>= PAGE_SIZE
) {
153 crypto_yield(pdesc
->flags
);
157 /* filling odds with new data and encrypting it */
158 memcpy(ctx
->odds
+ ctx
->len
, p
, bs
- ctx
->len
);
159 len
-= bs
- ctx
->len
;
162 ctx
->xor(ctx
->prev
, ctx
->odds
, bs
);
163 crypto_cipher_encrypt_one(tfm
, ctx
->prev
, ctx
->prev
);
165 /* clearing the length */
168 /* encrypting the rest of data */
170 ctx
->xor(ctx
->prev
, p
, bs
);
171 crypto_cipher_encrypt_one(tfm
, ctx
->prev
,
177 /* keeping the surplus of blocksize */
179 memcpy(ctx
->odds
, p
, len
);
183 crypto_yield(pdesc
->flags
);
184 slen
-= min(slen
, ((unsigned int)(PAGE_SIZE
)) - offset
);
188 <<<<<<< HEAD
:crypto
/xcbc
.c
189 nbytes
-=sg
[i
].length
;
191 >>>>>>> 264e3e889d86e552b4191d69bb60f4f3b383135a
:crypto
/xcbc
.c
198 static int crypto_xcbc_digest_update(struct hash_desc
*pdesc
,
199 struct scatterlist
*sg
,
202 if (WARN_ON_ONCE(in_irq()))
204 return crypto_xcbc_digest_update2(pdesc
, sg
, nbytes
);
207 static int crypto_xcbc_digest_final(struct hash_desc
*pdesc
, u8
*out
)
209 struct crypto_hash
*parent
= pdesc
->tfm
;
210 struct crypto_xcbc_ctx
*ctx
= crypto_hash_ctx_aligned(parent
);
211 struct crypto_cipher
*tfm
= ctx
->child
;
212 int bs
= crypto_hash_blocksize(parent
);
215 if (ctx
->len
== bs
) {
218 if ((err
= crypto_cipher_setkey(tfm
, ctx
->key
, ctx
->keylen
)) != 0)
221 crypto_cipher_encrypt_one(tfm
, key2
,
222 (u8
*)(ctx
->consts
+ bs
));
224 ctx
->xor(ctx
->prev
, ctx
->odds
, bs
);
225 ctx
->xor(ctx
->prev
, key2
, bs
);
226 _crypto_xcbc_digest_setkey(parent
, ctx
);
228 crypto_cipher_encrypt_one(tfm
, out
, ctx
->prev
);
232 u8
*p
= ctx
->odds
+ ctx
->len
;
236 rlen
= bs
- ctx
->len
-1;
240 if ((err
= crypto_cipher_setkey(tfm
, ctx
->key
, ctx
->keylen
)) != 0)
243 crypto_cipher_encrypt_one(tfm
, key3
,
244 (u8
*)(ctx
->consts
+ bs
* 2));
246 ctx
->xor(ctx
->prev
, ctx
->odds
, bs
);
247 ctx
->xor(ctx
->prev
, key3
, bs
);
249 _crypto_xcbc_digest_setkey(parent
, ctx
);
251 crypto_cipher_encrypt_one(tfm
, out
, ctx
->prev
);
257 static int crypto_xcbc_digest(struct hash_desc
*pdesc
,
258 struct scatterlist
*sg
, unsigned int nbytes
, u8
*out
)
260 if (WARN_ON_ONCE(in_irq()))
263 crypto_xcbc_digest_init(pdesc
);
264 crypto_xcbc_digest_update2(pdesc
, sg
, nbytes
);
265 return crypto_xcbc_digest_final(pdesc
, out
);
268 static int xcbc_init_tfm(struct crypto_tfm
*tfm
)
270 struct crypto_cipher
*cipher
;
271 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
272 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
273 struct crypto_xcbc_ctx
*ctx
= crypto_hash_ctx_aligned(__crypto_hash_cast(tfm
));
274 int bs
= crypto_hash_blocksize(__crypto_hash_cast(tfm
));
276 cipher
= crypto_spawn_cipher(spawn
);
278 return PTR_ERR(cipher
);
289 ctx
->odds
= (u8
*)(ctx
+1);
290 ctx
->prev
= ctx
->odds
+ bs
;
291 ctx
->key
= ctx
->prev
+ bs
;
296 static void xcbc_exit_tfm(struct crypto_tfm
*tfm
)
298 struct crypto_xcbc_ctx
*ctx
= crypto_hash_ctx_aligned(__crypto_hash_cast(tfm
));
299 crypto_free_cipher(ctx
->child
);
302 static struct crypto_instance
*xcbc_alloc(struct rtattr
**tb
)
304 struct crypto_instance
*inst
;
305 struct crypto_alg
*alg
;
308 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_HASH
);
312 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
313 CRYPTO_ALG_TYPE_MASK
);
315 return ERR_CAST(alg
);
317 switch(alg
->cra_blocksize
) {
321 inst
= ERR_PTR(-EINVAL
);
325 inst
= crypto_alloc_instance("xcbc", alg
);
329 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_HASH
;
330 inst
->alg
.cra_priority
= alg
->cra_priority
;
331 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
332 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
333 inst
->alg
.cra_type
= &crypto_hash_type
;
335 inst
->alg
.cra_hash
.digestsize
= alg
->cra_blocksize
;
336 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_xcbc_ctx
) +
337 ALIGN(inst
->alg
.cra_blocksize
* 3, sizeof(void *));
338 inst
->alg
.cra_init
= xcbc_init_tfm
;
339 inst
->alg
.cra_exit
= xcbc_exit_tfm
;
341 inst
->alg
.cra_hash
.init
= crypto_xcbc_digest_init
;
342 inst
->alg
.cra_hash
.update
= crypto_xcbc_digest_update
;
343 inst
->alg
.cra_hash
.final
= crypto_xcbc_digest_final
;
344 inst
->alg
.cra_hash
.digest
= crypto_xcbc_digest
;
345 inst
->alg
.cra_hash
.setkey
= crypto_xcbc_digest_setkey
;
352 static void xcbc_free(struct crypto_instance
*inst
)
354 crypto_drop_spawn(crypto_instance_ctx(inst
));
358 static struct crypto_template crypto_xcbc_tmpl
= {
362 .module
= THIS_MODULE
,
365 static int __init
crypto_xcbc_module_init(void)
367 return crypto_register_template(&crypto_xcbc_tmpl
);
370 static void __exit
crypto_xcbc_module_exit(void)
372 crypto_unregister_template(&crypto_xcbc_tmpl
);
375 module_init(crypto_xcbc_module_init
);
376 module_exit(crypto_xcbc_module_exit
);
378 MODULE_LICENSE("GPL");
379 MODULE_DESCRIPTION("XCBC keyed hash algorithm");