6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
10 * Software Foundation; either version 2 of the License, or (at your option)
14 #include <linux/kernel.h>
15 //#include <linux/crypto.h>
16 #include "rtl_crypto.h"
17 #include <linux/errno.h>
19 #include <linux/slab.h>
20 #include <asm/scatterlist.h>
22 #include "scatterwalk.h"
24 typedef void (cryptfn_t
)(void *, u8
*, const u8
*);
25 typedef void (procfn_t
)(struct crypto_tfm
*, u8
*,
26 u8
*, cryptfn_t
, int enc
, void *, int);
28 static inline void xor_64(u8
*a
, const u8
*b
)
30 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
31 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
34 static inline void xor_128(u8
*a
, const u8
*b
)
36 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
37 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
38 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
39 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
44 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
45 * multiple page boundaries by using temporary blocks. In user context,
46 * the kernel is given a chance to schedule us once per block.
48 static int crypt(struct crypto_tfm
*tfm
,
49 struct scatterlist
*dst
,
50 struct scatterlist
*src
,
51 unsigned int nbytes
, cryptfn_t crfn
,
52 procfn_t prfn
, int enc
, void *info
)
54 struct scatter_walk walk_in
, walk_out
;
55 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
63 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
67 scatterwalk_start(&walk_in
, src
);
68 scatterwalk_start(&walk_out
, dst
);
74 scatterwalk_map(&walk_in
, 0);
75 scatterwalk_map(&walk_out
, 1);
76 src_p
= scatterwalk_whichbuf(&walk_in
, bsize
, tmp_src
);
77 dst_p
= scatterwalk_whichbuf(&walk_out
, bsize
, tmp_dst
);
78 in_place
= scatterwalk_samebuf(&walk_in
, &walk_out
,
83 scatterwalk_copychunks(src_p
, &walk_in
, bsize
, 0);
85 prfn(tfm
, dst_p
, src_p
, crfn
, enc
, info
, in_place
);
87 scatterwalk_done(&walk_in
, 0, nbytes
);
89 scatterwalk_copychunks(dst_p
, &walk_out
, bsize
, 1);
90 scatterwalk_done(&walk_out
, 1, nbytes
);
99 static void cbc_process(struct crypto_tfm
*tfm
, u8
*dst
, u8
*src
,
100 cryptfn_t fn
, int enc
, void *info
, int in_place
)
104 /* Null encryption */
109 tfm
->crt_u
.cipher
.cit_xor_block(iv
, src
);
110 fn(crypto_tfm_ctx(tfm
), dst
, iv
);
111 memcpy(iv
, dst
, crypto_tfm_alg_blocksize(tfm
));
113 u8 stack
[in_place
? crypto_tfm_alg_blocksize(tfm
) : 0];
114 u8
*buf
= in_place
? stack
: dst
;
116 fn(crypto_tfm_ctx(tfm
), buf
, src
);
117 tfm
->crt_u
.cipher
.cit_xor_block(buf
, iv
);
118 memcpy(iv
, src
, crypto_tfm_alg_blocksize(tfm
));
120 memcpy(dst
, buf
, crypto_tfm_alg_blocksize(tfm
));
124 static void ecb_process(struct crypto_tfm
*tfm
, u8
*dst
, u8
*src
,
125 cryptfn_t fn
, int enc
, void *info
, int in_place
)
127 fn(crypto_tfm_ctx(tfm
), dst
, src
);
130 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
132 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
134 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
135 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
138 return cia
->cia_setkey(crypto_tfm_ctx(tfm
), key
, keylen
,
142 static int ecb_encrypt(struct crypto_tfm
*tfm
,
143 struct scatterlist
*dst
,
144 struct scatterlist
*src
, unsigned int nbytes
)
146 return crypt(tfm
, dst
, src
, nbytes
,
147 tfm
->__crt_alg
->cra_cipher
.cia_encrypt
,
148 ecb_process
, 1, NULL
);
151 static int ecb_decrypt(struct crypto_tfm
*tfm
,
152 struct scatterlist
*dst
,
153 struct scatterlist
*src
,
156 return crypt(tfm
, dst
, src
, nbytes
,
157 tfm
->__crt_alg
->cra_cipher
.cia_decrypt
,
158 ecb_process
, 1, NULL
);
161 static int cbc_encrypt(struct crypto_tfm
*tfm
,
162 struct scatterlist
*dst
,
163 struct scatterlist
*src
,
166 return crypt(tfm
, dst
, src
, nbytes
,
167 tfm
->__crt_alg
->cra_cipher
.cia_encrypt
,
168 cbc_process
, 1, tfm
->crt_cipher
.cit_iv
);
171 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
172 struct scatterlist
*dst
,
173 struct scatterlist
*src
,
174 unsigned int nbytes
, u8
*iv
)
176 return crypt(tfm
, dst
, src
, nbytes
,
177 tfm
->__crt_alg
->cra_cipher
.cia_encrypt
,
181 static int cbc_decrypt(struct crypto_tfm
*tfm
,
182 struct scatterlist
*dst
,
183 struct scatterlist
*src
,
186 return crypt(tfm
, dst
, src
, nbytes
,
187 tfm
->__crt_alg
->cra_cipher
.cia_decrypt
,
188 cbc_process
, 0, tfm
->crt_cipher
.cit_iv
);
191 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
192 struct scatterlist
*dst
,
193 struct scatterlist
*src
,
194 unsigned int nbytes
, u8
*iv
)
196 return crypt(tfm
, dst
, src
, nbytes
,
197 tfm
->__crt_alg
->cra_cipher
.cia_decrypt
,
201 static int nocrypt(struct crypto_tfm
*tfm
,
202 struct scatterlist
*dst
,
203 struct scatterlist
*src
,
209 static int nocrypt_iv(struct crypto_tfm
*tfm
,
210 struct scatterlist
*dst
,
211 struct scatterlist
*src
,
212 unsigned int nbytes
, u8
*iv
)
217 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
219 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
221 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
222 if (flags
& CRYPTO_TFM_REQ_WEAK_KEY
)
223 tfm
->crt_flags
= CRYPTO_TFM_REQ_WEAK_KEY
;
228 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
231 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
233 ops
->cit_setkey
= setkey
;
235 switch (tfm
->crt_cipher
.cit_mode
) {
236 case CRYPTO_TFM_MODE_ECB
:
237 ops
->cit_encrypt
= ecb_encrypt
;
238 ops
->cit_decrypt
= ecb_decrypt
;
241 case CRYPTO_TFM_MODE_CBC
:
242 ops
->cit_encrypt
= cbc_encrypt
;
243 ops
->cit_decrypt
= cbc_decrypt
;
244 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
245 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
248 case CRYPTO_TFM_MODE_CFB
:
249 ops
->cit_encrypt
= nocrypt
;
250 ops
->cit_decrypt
= nocrypt
;
251 ops
->cit_encrypt_iv
= nocrypt_iv
;
252 ops
->cit_decrypt_iv
= nocrypt_iv
;
255 case CRYPTO_TFM_MODE_CTR
:
256 ops
->cit_encrypt
= nocrypt
;
257 ops
->cit_decrypt
= nocrypt
;
258 ops
->cit_encrypt_iv
= nocrypt_iv
;
259 ops
->cit_decrypt_iv
= nocrypt_iv
;
266 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
268 switch (crypto_tfm_alg_blocksize(tfm
)) {
270 ops
->cit_xor_block
= xor_64
;
274 ops
->cit_xor_block
= xor_128
;
278 printk(KERN_WARNING
"%s: block size %u not supported\n",
279 crypto_tfm_alg_name(tfm
),
280 crypto_tfm_alg_blocksize(tfm
));
285 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
286 ops
->cit_iv
= kmalloc(ops
->cit_ivsize
, GFP_KERNEL
);
287 if (ops
->cit_iv
== NULL
)
295 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)
297 kfree(tfm
->crt_cipher
.cit_iv
);