2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
8 * and Nettle, by Niels Möller.
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
16 #ifndef _LINUX_CRYPTO_H
17 #define _LINUX_CRYPTO_H
19 #include <linux/module.h>
20 #include <linux/kernel.h>
21 #include <linux/types.h>
22 #include <linux/list.h>
23 #include <linux/string.h>
27 * Algorithm masks and types.
29 #define CRYPTO_ALG_TYPE_MASK 0x000000ff
30 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
31 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
32 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
35 * Transform masks and values (for crt_flags).
37 #define CRYPTO_TFM_MODE_MASK 0x000000ff
38 #define CRYPTO_TFM_REQ_MASK 0x000fff00
39 #define CRYPTO_TFM_RES_MASK 0xfff00000
41 #define CRYPTO_TFM_MODE_ECB 0x00000001
42 #define CRYPTO_TFM_MODE_CBC 0x00000002
43 #define CRYPTO_TFM_MODE_CFB 0x00000004
44 #define CRYPTO_TFM_MODE_CTR 0x00000008
46 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
47 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
48 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
49 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
50 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
51 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
54 * Miscellaneous stuff.
56 #define CRYPTO_UNSPEC 0
57 #define CRYPTO_MAX_ALG_NAME 64
62 * Algorithms: modular crypto algorithm implementations, managed
63 * via crypto_register_alg() and crypto_unregister_alg().
66 unsigned int cia_min_keysize
;
67 unsigned int cia_max_keysize
;
68 int (*cia_setkey
)(void *ctx
, const u8
*key
,
69 unsigned int keylen
, u32
*flags
);
70 void (*cia_encrypt
)(void *ctx
, u8
*dst
, const u8
*src
);
71 void (*cia_decrypt
)(void *ctx
, u8
*dst
, const u8
*src
);
75 unsigned int dia_digestsize
;
76 void (*dia_init
)(void *ctx
);
77 void (*dia_update
)(void *ctx
, const u8
*data
, unsigned int len
);
78 void (*dia_final
)(void *ctx
, u8
*out
);
79 int (*dia_setkey
)(void *ctx
, const u8
*key
,
80 unsigned int keylen
, u32
*flags
);
84 int (*coa_init
)(void *ctx
);
85 void (*coa_exit
)(void *ctx
);
86 int (*coa_compress
)(void *ctx
, const u8
*src
, unsigned int slen
,
87 u8
*dst
, unsigned int *dlen
);
88 int (*coa_decompress
)(void *ctx
, const u8
*src
, unsigned int slen
,
89 u8
*dst
, unsigned int *dlen
);
92 #define cra_cipher cra_u.cipher
93 #define cra_digest cra_u.digest
94 #define cra_compress cra_u.compress
97 struct list_head cra_list
;
99 unsigned int cra_blocksize
;
100 unsigned int cra_ctxsize
;
101 const char cra_name
[CRYPTO_MAX_ALG_NAME
];
104 struct cipher_alg cipher
;
105 struct digest_alg digest
;
106 struct compress_alg compress
;
109 struct module
*cra_module
;
113 * Algorithm registration interface.
115 int crypto_register_alg(struct crypto_alg
*alg
);
116 int crypto_unregister_alg(struct crypto_alg
*alg
);
119 * Algorithm query interface.
121 int crypto_alg_available(const char *name
, u32 flags
);
124 * Transforms: user-instantiated objects which encapsulate algorithms
125 * and core processing logic. Managed via crypto_alloc_tfm() and
126 * crypto_free_tfm(), as well as the various helpers below.
132 unsigned int cit_ivsize
;
134 int (*cit_setkey
)(struct crypto_tfm
*tfm
,
135 const u8
*key
, unsigned int keylen
);
136 int (*cit_encrypt
)(struct crypto_tfm
*tfm
,
137 struct scatterlist
*dst
,
138 struct scatterlist
*src
,
139 unsigned int nbytes
);
140 int (*cit_encrypt_iv
)(struct crypto_tfm
*tfm
,
141 struct scatterlist
*dst
,
142 struct scatterlist
*src
,
143 unsigned int nbytes
, u8
*iv
);
144 int (*cit_decrypt
)(struct crypto_tfm
*tfm
,
145 struct scatterlist
*dst
,
146 struct scatterlist
*src
,
147 unsigned int nbytes
);
148 int (*cit_decrypt_iv
)(struct crypto_tfm
*tfm
,
149 struct scatterlist
*dst
,
150 struct scatterlist
*src
,
151 unsigned int nbytes
, u8
*iv
);
152 void (*cit_xor_block
)(u8
*dst
, const u8
*src
);
156 void (*dit_init
)(struct crypto_tfm
*tfm
);
157 void (*dit_update
)(struct crypto_tfm
*tfm
,
158 struct scatterlist
*sg
, unsigned int nsg
);
159 void (*dit_final
)(struct crypto_tfm
*tfm
, u8
*out
);
160 void (*dit_digest
)(struct crypto_tfm
*tfm
, struct scatterlist
*sg
,
161 unsigned int nsg
, u8
*out
);
162 int (*dit_setkey
)(struct crypto_tfm
*tfm
,
163 const u8
*key
, unsigned int keylen
);
164 #ifdef CONFIG_CRYPTO_HMAC
165 void *dit_hmac_block
;
169 struct compress_tfm
{
170 int (*cot_compress
)(struct crypto_tfm
*tfm
,
171 const u8
*src
, unsigned int slen
,
172 u8
*dst
, unsigned int *dlen
);
173 int (*cot_decompress
)(struct crypto_tfm
*tfm
,
174 const u8
*src
, unsigned int slen
,
175 u8
*dst
, unsigned int *dlen
);
178 #define crt_cipher crt_u.cipher
179 #define crt_digest crt_u.digest
180 #define crt_compress crt_u.compress
187 struct cipher_tfm cipher
;
188 struct digest_tfm digest
;
189 struct compress_tfm compress
;
192 struct crypto_alg
*__crt_alg
;
196 * Transform user interface.
200 * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
201 * If that fails and the kernel supports dynamically loadable modules, it
202 * will then attempt to load a module of the same name or alias. A refcount
203 * is grabbed on the algorithm which is then associated with the new transform.
205 * crypto_free_tfm() frees up the transform and any associated resources,
206 * then drops the refcount on the associated algorithm.
208 struct crypto_tfm
*crypto_alloc_tfm(const char *alg_name
, u32 tfm_flags
);
209 void crypto_free_tfm(struct crypto_tfm
*tfm
);
212 * Transform helpers which query the underlying algorithm.
214 static inline const char *crypto_tfm_alg_name(struct crypto_tfm
*tfm
)
216 return tfm
->__crt_alg
->cra_name
;
219 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm
*tfm
)
221 return module_name(tfm
->__crt_alg
->cra_module
);
224 static inline u32
crypto_tfm_alg_type(struct crypto_tfm
*tfm
)
226 return tfm
->__crt_alg
->cra_flags
& CRYPTO_ALG_TYPE_MASK
;
229 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm
*tfm
)
231 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
232 return tfm
->__crt_alg
->cra_cipher
.cia_min_keysize
;
235 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm
*tfm
)
237 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
238 return tfm
->__crt_alg
->cra_cipher
.cia_max_keysize
;
241 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm
*tfm
)
243 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
244 return tfm
->crt_cipher
.cit_ivsize
;
247 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm
*tfm
)
249 return tfm
->__crt_alg
->cra_blocksize
;
252 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm
*tfm
)
254 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_DIGEST
);
255 return tfm
->__crt_alg
->cra_digest
.dia_digestsize
;
261 static inline void crypto_digest_init(struct crypto_tfm
*tfm
)
263 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_DIGEST
);
264 tfm
->crt_digest
.dit_init(tfm
);
267 static inline void crypto_digest_update(struct crypto_tfm
*tfm
,
268 struct scatterlist
*sg
,
271 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_DIGEST
);
272 tfm
->crt_digest
.dit_update(tfm
, sg
, nsg
);
275 static inline void crypto_digest_final(struct crypto_tfm
*tfm
, u8
*out
)
277 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_DIGEST
);
278 tfm
->crt_digest
.dit_final(tfm
, out
);
281 static inline void crypto_digest_digest(struct crypto_tfm
*tfm
,
282 struct scatterlist
*sg
,
283 unsigned int nsg
, u8
*out
)
285 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_DIGEST
);
286 tfm
->crt_digest
.dit_digest(tfm
, sg
, nsg
, out
);
289 static inline int crypto_digest_setkey(struct crypto_tfm
*tfm
,
290 const u8
*key
, unsigned int keylen
)
292 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_DIGEST
);
293 if (tfm
->crt_digest
.dit_setkey
== NULL
)
295 return tfm
->crt_digest
.dit_setkey(tfm
, key
, keylen
);
298 static inline int crypto_cipher_setkey(struct crypto_tfm
*tfm
,
299 const u8
*key
, unsigned int keylen
)
301 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
302 return tfm
->crt_cipher
.cit_setkey(tfm
, key
, keylen
);
305 static inline int crypto_cipher_encrypt(struct crypto_tfm
*tfm
,
306 struct scatterlist
*dst
,
307 struct scatterlist
*src
,
310 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
311 return tfm
->crt_cipher
.cit_encrypt(tfm
, dst
, src
, nbytes
);
314 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm
*tfm
,
315 struct scatterlist
*dst
,
316 struct scatterlist
*src
,
317 unsigned int nbytes
, u8
*iv
)
319 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
320 BUG_ON(tfm
->crt_cipher
.cit_mode
== CRYPTO_TFM_MODE_ECB
);
321 return tfm
->crt_cipher
.cit_encrypt_iv(tfm
, dst
, src
, nbytes
, iv
);
324 static inline int crypto_cipher_decrypt(struct crypto_tfm
*tfm
,
325 struct scatterlist
*dst
,
326 struct scatterlist
*src
,
329 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
330 return tfm
->crt_cipher
.cit_decrypt(tfm
, dst
, src
, nbytes
);
333 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm
*tfm
,
334 struct scatterlist
*dst
,
335 struct scatterlist
*src
,
336 unsigned int nbytes
, u8
*iv
)
338 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
339 BUG_ON(tfm
->crt_cipher
.cit_mode
== CRYPTO_TFM_MODE_ECB
);
340 return tfm
->crt_cipher
.cit_decrypt_iv(tfm
, dst
, src
, nbytes
, iv
);
343 static inline void crypto_cipher_set_iv(struct crypto_tfm
*tfm
,
344 const u8
*src
, unsigned int len
)
346 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
347 memcpy(tfm
->crt_cipher
.cit_iv
, src
, len
);
350 static inline void crypto_cipher_get_iv(struct crypto_tfm
*tfm
,
351 u8
*dst
, unsigned int len
)
353 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_CIPHER
);
354 memcpy(dst
, tfm
->crt_cipher
.cit_iv
, len
);
357 static inline int crypto_comp_compress(struct crypto_tfm
*tfm
,
358 const u8
*src
, unsigned int slen
,
359 u8
*dst
, unsigned int *dlen
)
361 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_COMPRESS
);
362 return tfm
->crt_compress
.cot_compress(tfm
, src
, slen
, dst
, dlen
);
365 static inline int crypto_comp_decompress(struct crypto_tfm
*tfm
,
366 const u8
*src
, unsigned int slen
,
367 u8
*dst
, unsigned int *dlen
)
369 BUG_ON(crypto_tfm_alg_type(tfm
) != CRYPTO_ALG_TYPE_COMPRESS
);
370 return tfm
->crt_compress
.cot_decompress(tfm
, src
, slen
, dst
, dlen
);
376 #ifdef CONFIG_CRYPTO_HMAC
377 void crypto_hmac_init(struct crypto_tfm
*tfm
, u8
*key
, unsigned int *keylen
);
378 void crypto_hmac_update(struct crypto_tfm
*tfm
,
379 struct scatterlist
*sg
, unsigned int nsg
);
380 void crypto_hmac_final(struct crypto_tfm
*tfm
, u8
*key
,
381 unsigned int *keylen
, u8
*out
);
382 void crypto_hmac(struct crypto_tfm
*tfm
, u8
*key
, unsigned int *keylen
,
383 struct scatterlist
*sg
, unsigned int nsg
, u8
*out
);
384 #endif /* CONFIG_CRYPTO_HMAC */
386 #endif /* _LINUX_CRYPTO_H */