2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
25 unsigned int (*ctxsize
)(struct crypto_alg
*alg
, u32 type
, u32 mask
);
26 unsigned int (*extsize
)(struct crypto_alg
*alg
);
27 int (*init
)(struct crypto_tfm
*tfm
, u32 type
, u32 mask
);
28 int (*init_tfm
)(struct crypto_tfm
*tfm
);
29 void (*show
)(struct seq_file
*m
, struct crypto_alg
*alg
);
30 int (*report
)(struct sk_buff
*skb
, struct crypto_alg
*alg
);
31 struct crypto_alg
*(*lookup
)(const char *name
, u32 type
, u32 mask
);
34 unsigned int maskclear
;
39 struct crypto_instance
{
40 struct crypto_alg alg
;
42 struct crypto_template
*tmpl
;
43 struct hlist_node list
;
45 void *__ctx
[] CRYPTO_MINALIGN_ATTR
;
48 struct crypto_template
{
49 struct list_head list
;
50 struct hlist_head instances
;
51 struct module
*module
;
53 struct crypto_instance
*(*alloc
)(struct rtattr
**tb
);
54 void (*free
)(struct crypto_instance
*inst
);
55 int (*create
)(struct crypto_template
*tmpl
, struct rtattr
**tb
);
57 char name
[CRYPTO_MAX_ALG_NAME
];
61 struct list_head list
;
62 struct crypto_alg
*alg
;
63 struct crypto_instance
*inst
;
64 const struct crypto_type
*frontend
;
69 struct list_head list
;
70 struct list_head
*backlog
;
73 unsigned int max_qlen
;
77 struct scatterlist
*sg
;
81 struct blkcipher_walk
{
94 struct scatter_walk in
;
97 struct scatter_walk out
;
106 unsigned int walk_blocksize
;
107 unsigned int cipher_blocksize
;
108 unsigned int alignmask
;
111 struct ablkcipher_walk
{
117 struct scatter_walk in
;
119 struct scatter_walk out
;
121 struct list_head buffers
;
125 unsigned int blocksize
;
128 extern const struct crypto_type crypto_ablkcipher_type
;
129 extern const struct crypto_type crypto_aead_type
;
130 extern const struct crypto_type crypto_blkcipher_type
;
132 void crypto_mod_put(struct crypto_alg
*alg
);
134 int crypto_register_template(struct crypto_template
*tmpl
);
135 void crypto_unregister_template(struct crypto_template
*tmpl
);
136 struct crypto_template
*crypto_lookup_template(const char *name
);
138 int crypto_register_instance(struct crypto_template
*tmpl
,
139 struct crypto_instance
*inst
);
140 int crypto_unregister_instance(struct crypto_instance
*inst
);
142 int crypto_init_spawn(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
143 struct crypto_instance
*inst
, u32 mask
);
144 int crypto_init_spawn2(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
145 struct crypto_instance
*inst
,
146 const struct crypto_type
*frontend
);
148 void crypto_drop_spawn(struct crypto_spawn
*spawn
);
149 struct crypto_tfm
*crypto_spawn_tfm(struct crypto_spawn
*spawn
, u32 type
,
151 void *crypto_spawn_tfm2(struct crypto_spawn
*spawn
);
153 static inline void crypto_set_spawn(struct crypto_spawn
*spawn
,
154 struct crypto_instance
*inst
)
159 struct crypto_attr_type
*crypto_get_attr_type(struct rtattr
**tb
);
160 int crypto_check_attr_type(struct rtattr
**tb
, u32 type
);
161 const char *crypto_attr_alg_name(struct rtattr
*rta
);
162 struct crypto_alg
*crypto_attr_alg2(struct rtattr
*rta
,
163 const struct crypto_type
*frontend
,
166 static inline struct crypto_alg
*crypto_attr_alg(struct rtattr
*rta
,
169 return crypto_attr_alg2(rta
, NULL
, type
, mask
);
172 int crypto_attr_u32(struct rtattr
*rta
, u32
*num
);
173 void *crypto_alloc_instance2(const char *name
, struct crypto_alg
*alg
,
175 struct crypto_instance
*crypto_alloc_instance(const char *name
,
176 struct crypto_alg
*alg
);
178 void crypto_init_queue(struct crypto_queue
*queue
, unsigned int max_qlen
);
179 int crypto_enqueue_request(struct crypto_queue
*queue
,
180 struct crypto_async_request
*request
);
181 void *__crypto_dequeue_request(struct crypto_queue
*queue
, unsigned int offset
);
182 struct crypto_async_request
*crypto_dequeue_request(struct crypto_queue
*queue
);
183 int crypto_tfm_in_queue(struct crypto_queue
*queue
, struct crypto_tfm
*tfm
);
185 /* These functions require the input/output to be aligned as u32. */
186 void crypto_inc(u8
*a
, unsigned int size
);
187 void crypto_xor(u8
*dst
, const u8
*src
, unsigned int size
);
189 int blkcipher_walk_done(struct blkcipher_desc
*desc
,
190 struct blkcipher_walk
*walk
, int err
);
191 int blkcipher_walk_virt(struct blkcipher_desc
*desc
,
192 struct blkcipher_walk
*walk
);
193 int blkcipher_walk_phys(struct blkcipher_desc
*desc
,
194 struct blkcipher_walk
*walk
);
195 int blkcipher_walk_virt_block(struct blkcipher_desc
*desc
,
196 struct blkcipher_walk
*walk
,
197 unsigned int blocksize
);
198 int blkcipher_aead_walk_virt_block(struct blkcipher_desc
*desc
,
199 struct blkcipher_walk
*walk
,
200 struct crypto_aead
*tfm
,
201 unsigned int blocksize
);
203 int ablkcipher_walk_done(struct ablkcipher_request
*req
,
204 struct ablkcipher_walk
*walk
, int err
);
205 int ablkcipher_walk_phys(struct ablkcipher_request
*req
,
206 struct ablkcipher_walk
*walk
);
207 void __ablkcipher_walk_complete(struct ablkcipher_walk
*walk
);
209 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm
*tfm
)
211 return PTR_ALIGN(crypto_tfm_ctx(tfm
),
212 crypto_tfm_alg_alignmask(tfm
) + 1);
215 static inline struct crypto_instance
*crypto_tfm_alg_instance(
216 struct crypto_tfm
*tfm
)
218 return container_of(tfm
->__crt_alg
, struct crypto_instance
, alg
);
221 static inline void *crypto_instance_ctx(struct crypto_instance
*inst
)
226 static inline struct ablkcipher_alg
*crypto_ablkcipher_alg(
227 struct crypto_ablkcipher
*tfm
)
229 return &crypto_ablkcipher_tfm(tfm
)->__crt_alg
->cra_ablkcipher
;
232 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher
*tfm
)
234 return crypto_tfm_ctx(&tfm
->base
);
237 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher
*tfm
)
239 return crypto_tfm_ctx_aligned(&tfm
->base
);
242 static inline struct aead_alg
*crypto_aead_alg(struct crypto_aead
*tfm
)
244 return &crypto_aead_tfm(tfm
)->__crt_alg
->cra_aead
;
247 static inline void *crypto_aead_ctx(struct crypto_aead
*tfm
)
249 return crypto_tfm_ctx(&tfm
->base
);
252 static inline struct crypto_instance
*crypto_aead_alg_instance(
253 struct crypto_aead
*aead
)
255 return crypto_tfm_alg_instance(&aead
->base
);
258 static inline struct crypto_blkcipher
*crypto_spawn_blkcipher(
259 struct crypto_spawn
*spawn
)
261 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
262 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
264 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
267 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher
*tfm
)
269 return crypto_tfm_ctx(&tfm
->base
);
272 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher
*tfm
)
274 return crypto_tfm_ctx_aligned(&tfm
->base
);
277 static inline struct crypto_cipher
*crypto_spawn_cipher(
278 struct crypto_spawn
*spawn
)
280 u32 type
= CRYPTO_ALG_TYPE_CIPHER
;
281 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
283 return __crypto_cipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
286 static inline struct cipher_alg
*crypto_cipher_alg(struct crypto_cipher
*tfm
)
288 return &crypto_cipher_tfm(tfm
)->__crt_alg
->cra_cipher
;
291 static inline struct crypto_hash
*crypto_spawn_hash(struct crypto_spawn
*spawn
)
293 u32 type
= CRYPTO_ALG_TYPE_HASH
;
294 u32 mask
= CRYPTO_ALG_TYPE_HASH_MASK
;
296 return __crypto_hash_cast(crypto_spawn_tfm(spawn
, type
, mask
));
299 static inline void *crypto_hash_ctx(struct crypto_hash
*tfm
)
301 return crypto_tfm_ctx(&tfm
->base
);
304 static inline void *crypto_hash_ctx_aligned(struct crypto_hash
*tfm
)
306 return crypto_tfm_ctx_aligned(&tfm
->base
);
309 static inline void blkcipher_walk_init(struct blkcipher_walk
*walk
,
310 struct scatterlist
*dst
,
311 struct scatterlist
*src
,
316 walk
->total
= nbytes
;
319 static inline void ablkcipher_walk_init(struct ablkcipher_walk
*walk
,
320 struct scatterlist
*dst
,
321 struct scatterlist
*src
,
326 walk
->total
= nbytes
;
327 INIT_LIST_HEAD(&walk
->buffers
);
330 static inline void ablkcipher_walk_complete(struct ablkcipher_walk
*walk
)
332 if (unlikely(!list_empty(&walk
->buffers
)))
333 __ablkcipher_walk_complete(walk
);
336 static inline struct crypto_async_request
*crypto_get_backlog(
337 struct crypto_queue
*queue
)
339 return queue
->backlog
== &queue
->list
? NULL
:
340 container_of(queue
->backlog
, struct crypto_async_request
, list
);
343 static inline int ablkcipher_enqueue_request(struct crypto_queue
*queue
,
344 struct ablkcipher_request
*request
)
346 return crypto_enqueue_request(queue
, &request
->base
);
349 static inline struct ablkcipher_request
*ablkcipher_dequeue_request(
350 struct crypto_queue
*queue
)
352 return ablkcipher_request_cast(crypto_dequeue_request(queue
));
355 static inline void *ablkcipher_request_ctx(struct ablkcipher_request
*req
)
360 static inline int ablkcipher_tfm_in_queue(struct crypto_queue
*queue
,
361 struct crypto_ablkcipher
*tfm
)
363 return crypto_tfm_in_queue(queue
, crypto_ablkcipher_tfm(tfm
));
366 static inline void *aead_request_ctx(struct aead_request
*req
)
371 static inline void aead_request_complete(struct aead_request
*req
, int err
)
373 req
->base
.complete(&req
->base
, err
);
376 static inline u32
aead_request_flags(struct aead_request
*req
)
378 return req
->base
.flags
;
381 static inline struct crypto_alg
*crypto_get_attr_alg(struct rtattr
**tb
,
384 return crypto_attr_alg(tb
[1], type
, mask
);
388 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
389 * Otherwise returns zero.
391 static inline int crypto_requires_sync(u32 type
, u32 mask
)
393 return (type
^ CRYPTO_ALG_ASYNC
) & mask
& CRYPTO_ALG_ASYNC
;
396 noinline
unsigned long __crypto_memneq(const void *a
, const void *b
, size_t size
);
399 * crypto_memneq - Compare two areas of memory without leaking
400 * timing information.
402 * @a: One area of memory
403 * @b: Another area of memory
404 * @size: The size of the area.
406 * Returns 0 when data is equal, 1 otherwise.
408 static inline int crypto_memneq(const void *a
, const void *b
, size_t size
)
410 return __crypto_memneq(a
, b
, size
) != 0UL ? 1 : 0;
413 static inline void crypto_yield(u32 flags
)
415 if (flags
& CRYPTO_TFM_REQ_MAY_SLEEP
)
419 #endif /* _CRYPTO_ALGAPI_H */