4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/algapi.h>
14 #include <crypto/ctr.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/slab.h>
22 struct crypto_rfc3686_ctx
{
23 struct crypto_skcipher
*child
;
24 u8 nonce
[CTR_RFC3686_NONCE_SIZE
];
27 struct crypto_rfc3686_req_ctx
{
28 u8 iv
[CTR_RFC3686_BLOCK_SIZE
];
29 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR
;
32 static void crypto_ctr_crypt_final(struct skcipher_walk
*walk
,
33 struct crypto_cipher
*tfm
)
35 unsigned int bsize
= crypto_cipher_blocksize(tfm
);
36 unsigned long alignmask
= crypto_cipher_alignmask(tfm
);
37 u8
*ctrblk
= walk
->iv
;
38 u8 tmp
[MAX_CIPHER_BLOCKSIZE
+ MAX_CIPHER_ALIGNMASK
];
39 u8
*keystream
= PTR_ALIGN(tmp
+ 0, alignmask
+ 1);
40 u8
*src
= walk
->src
.virt
.addr
;
41 u8
*dst
= walk
->dst
.virt
.addr
;
42 unsigned int nbytes
= walk
->nbytes
;
44 crypto_cipher_encrypt_one(tfm
, keystream
, ctrblk
);
45 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
47 crypto_inc(ctrblk
, bsize
);
50 static int crypto_ctr_crypt_segment(struct skcipher_walk
*walk
,
51 struct crypto_cipher
*tfm
)
53 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) =
54 crypto_cipher_alg(tfm
)->cia_encrypt
;
55 unsigned int bsize
= crypto_cipher_blocksize(tfm
);
56 u8
*ctrblk
= walk
->iv
;
57 u8
*src
= walk
->src
.virt
.addr
;
58 u8
*dst
= walk
->dst
.virt
.addr
;
59 unsigned int nbytes
= walk
->nbytes
;
62 /* create keystream */
63 fn(crypto_cipher_tfm(tfm
), dst
, ctrblk
);
64 crypto_xor(dst
, src
, bsize
);
66 /* increment counter in counterblock */
67 crypto_inc(ctrblk
, bsize
);
71 } while ((nbytes
-= bsize
) >= bsize
);
76 static int crypto_ctr_crypt_inplace(struct skcipher_walk
*walk
,
77 struct crypto_cipher
*tfm
)
79 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) =
80 crypto_cipher_alg(tfm
)->cia_encrypt
;
81 unsigned int bsize
= crypto_cipher_blocksize(tfm
);
82 unsigned long alignmask
= crypto_cipher_alignmask(tfm
);
83 unsigned int nbytes
= walk
->nbytes
;
84 u8
*ctrblk
= walk
->iv
;
85 u8
*src
= walk
->src
.virt
.addr
;
86 u8 tmp
[MAX_CIPHER_BLOCKSIZE
+ MAX_CIPHER_ALIGNMASK
];
87 u8
*keystream
= PTR_ALIGN(tmp
+ 0, alignmask
+ 1);
90 /* create keystream */
91 fn(crypto_cipher_tfm(tfm
), keystream
, ctrblk
);
92 crypto_xor(src
, keystream
, bsize
);
94 /* increment counter in counterblock */
95 crypto_inc(ctrblk
, bsize
);
98 } while ((nbytes
-= bsize
) >= bsize
);
103 static int crypto_ctr_crypt(struct skcipher_request
*req
)
105 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
106 struct crypto_cipher
*cipher
= skcipher_cipher_simple(tfm
);
107 const unsigned int bsize
= crypto_cipher_blocksize(cipher
);
108 struct skcipher_walk walk
;
112 err
= skcipher_walk_virt(&walk
, req
, false);
114 while (walk
.nbytes
>= bsize
) {
115 if (walk
.src
.virt
.addr
== walk
.dst
.virt
.addr
)
116 nbytes
= crypto_ctr_crypt_inplace(&walk
, cipher
);
118 nbytes
= crypto_ctr_crypt_segment(&walk
, cipher
);
120 err
= skcipher_walk_done(&walk
, nbytes
);
124 crypto_ctr_crypt_final(&walk
, cipher
);
125 err
= skcipher_walk_done(&walk
, 0);
131 static int crypto_ctr_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
133 struct skcipher_instance
*inst
;
134 struct crypto_alg
*alg
;
137 inst
= skcipher_alloc_instance_simple(tmpl
, tb
, &alg
);
139 return PTR_ERR(inst
);
141 /* Block size must be >= 4 bytes. */
143 if (alg
->cra_blocksize
< 4)
146 /* If this is false we'd fail the alignment of crypto_inc. */
147 if (alg
->cra_blocksize
% 4)
150 /* CTR mode is a stream cipher. */
151 inst
->alg
.base
.cra_blocksize
= 1;
154 * To simplify the implementation, configure the skcipher walk to only
155 * give a partial block at the very end, never earlier.
157 inst
->alg
.chunksize
= alg
->cra_blocksize
;
159 inst
->alg
.encrypt
= crypto_ctr_crypt
;
160 inst
->alg
.decrypt
= crypto_ctr_crypt
;
162 err
= skcipher_register_instance(tmpl
, inst
);
174 static int crypto_rfc3686_setkey(struct crypto_skcipher
*parent
,
175 const u8
*key
, unsigned int keylen
)
177 struct crypto_rfc3686_ctx
*ctx
= crypto_skcipher_ctx(parent
);
178 struct crypto_skcipher
*child
= ctx
->child
;
181 /* the nonce is stored in bytes at end of key */
182 if (keylen
< CTR_RFC3686_NONCE_SIZE
)
185 memcpy(ctx
->nonce
, key
+ (keylen
- CTR_RFC3686_NONCE_SIZE
),
186 CTR_RFC3686_NONCE_SIZE
);
188 keylen
-= CTR_RFC3686_NONCE_SIZE
;
190 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
191 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
192 CRYPTO_TFM_REQ_MASK
);
193 err
= crypto_skcipher_setkey(child
, key
, keylen
);
194 crypto_skcipher_set_flags(parent
, crypto_skcipher_get_flags(child
) &
195 CRYPTO_TFM_RES_MASK
);
200 static int crypto_rfc3686_crypt(struct skcipher_request
*req
)
202 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
203 struct crypto_rfc3686_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
204 struct crypto_skcipher
*child
= ctx
->child
;
205 unsigned long align
= crypto_skcipher_alignmask(tfm
);
206 struct crypto_rfc3686_req_ctx
*rctx
=
207 (void *)PTR_ALIGN((u8
*)skcipher_request_ctx(req
), align
+ 1);
208 struct skcipher_request
*subreq
= &rctx
->subreq
;
211 /* set up counter block */
212 memcpy(iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
213 memcpy(iv
+ CTR_RFC3686_NONCE_SIZE
, req
->iv
, CTR_RFC3686_IV_SIZE
);
215 /* initialize counter portion of counter block */
216 *(__be32
*)(iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
219 skcipher_request_set_tfm(subreq
, child
);
220 skcipher_request_set_callback(subreq
, req
->base
.flags
,
221 req
->base
.complete
, req
->base
.data
);
222 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
225 return crypto_skcipher_encrypt(subreq
);
228 static int crypto_rfc3686_init_tfm(struct crypto_skcipher
*tfm
)
230 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
231 struct crypto_skcipher_spawn
*spawn
= skcipher_instance_ctx(inst
);
232 struct crypto_rfc3686_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
233 struct crypto_skcipher
*cipher
;
235 unsigned int reqsize
;
237 cipher
= crypto_spawn_skcipher(spawn
);
239 return PTR_ERR(cipher
);
243 align
= crypto_skcipher_alignmask(tfm
);
244 align
&= ~(crypto_tfm_ctx_alignment() - 1);
245 reqsize
= align
+ sizeof(struct crypto_rfc3686_req_ctx
) +
246 crypto_skcipher_reqsize(cipher
);
247 crypto_skcipher_set_reqsize(tfm
, reqsize
);
252 static void crypto_rfc3686_exit_tfm(struct crypto_skcipher
*tfm
)
254 struct crypto_rfc3686_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
256 crypto_free_skcipher(ctx
->child
);
259 static void crypto_rfc3686_free(struct skcipher_instance
*inst
)
261 struct crypto_skcipher_spawn
*spawn
= skcipher_instance_ctx(inst
);
263 crypto_drop_skcipher(spawn
);
267 static int crypto_rfc3686_create(struct crypto_template
*tmpl
,
270 struct crypto_attr_type
*algt
;
271 struct skcipher_instance
*inst
;
272 struct skcipher_alg
*alg
;
273 struct crypto_skcipher_spawn
*spawn
;
274 const char *cipher_name
;
279 algt
= crypto_get_attr_type(tb
);
281 return PTR_ERR(algt
);
283 if ((algt
->type
^ CRYPTO_ALG_TYPE_SKCIPHER
) & algt
->mask
)
286 cipher_name
= crypto_attr_alg_name(tb
[1]);
287 if (IS_ERR(cipher_name
))
288 return PTR_ERR(cipher_name
);
290 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
294 mask
= crypto_requires_sync(algt
->type
, algt
->mask
) |
295 crypto_requires_off(algt
->type
, algt
->mask
,
296 CRYPTO_ALG_NEED_FALLBACK
);
298 spawn
= skcipher_instance_ctx(inst
);
300 crypto_set_skcipher_spawn(spawn
, skcipher_crypto_instance(inst
));
301 err
= crypto_grab_skcipher(spawn
, cipher_name
, 0, mask
);
305 alg
= crypto_spawn_skcipher_alg(spawn
);
307 /* We only support 16-byte blocks. */
309 if (crypto_skcipher_alg_ivsize(alg
) != CTR_RFC3686_BLOCK_SIZE
)
312 /* Not a stream cipher? */
313 if (alg
->base
.cra_blocksize
!= 1)
317 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
318 "rfc3686(%s)", alg
->base
.cra_name
) >= CRYPTO_MAX_ALG_NAME
)
320 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
321 "rfc3686(%s)", alg
->base
.cra_driver_name
) >=
325 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
326 inst
->alg
.base
.cra_blocksize
= 1;
327 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
329 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
331 inst
->alg
.ivsize
= CTR_RFC3686_IV_SIZE
;
332 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
333 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
) +
334 CTR_RFC3686_NONCE_SIZE
;
335 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
) +
336 CTR_RFC3686_NONCE_SIZE
;
338 inst
->alg
.setkey
= crypto_rfc3686_setkey
;
339 inst
->alg
.encrypt
= crypto_rfc3686_crypt
;
340 inst
->alg
.decrypt
= crypto_rfc3686_crypt
;
342 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc3686_ctx
);
344 inst
->alg
.init
= crypto_rfc3686_init_tfm
;
345 inst
->alg
.exit
= crypto_rfc3686_exit_tfm
;
347 inst
->free
= crypto_rfc3686_free
;
349 err
= skcipher_register_instance(tmpl
, inst
);
357 crypto_drop_skcipher(spawn
);
363 static struct crypto_template crypto_ctr_tmpls
[] = {
366 .create
= crypto_ctr_create
,
367 .module
= THIS_MODULE
,
370 .create
= crypto_rfc3686_create
,
371 .module
= THIS_MODULE
,
375 static int __init
crypto_ctr_module_init(void)
377 return crypto_register_templates(crypto_ctr_tmpls
,
378 ARRAY_SIZE(crypto_ctr_tmpls
));
381 static void __exit
crypto_ctr_module_exit(void)
383 crypto_unregister_templates(crypto_ctr_tmpls
,
384 ARRAY_SIZE(crypto_ctr_tmpls
));
387 module_init(crypto_ctr_module_init
);
388 module_exit(crypto_ctr_module_exit
);
390 MODULE_LICENSE("GPL");
391 MODULE_DESCRIPTION("CTR block cipher mode of operation");
392 MODULE_ALIAS_CRYPTO("rfc3686");
393 MODULE_ALIAS_CRYPTO("ctr");