2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
21 * Maximum values for blocksize and alignmask, used to allocate
22 * static buffers that are big enough for any combination of
23 * algs and architectures. Ciphers have a lower maximum size.
25 #define MAX_ALGAPI_BLOCKSIZE 160
26 #define MAX_ALGAPI_ALIGNMASK 63
27 #define MAX_CIPHER_BLOCKSIZE 16
28 #define MAX_CIPHER_ALIGNMASK 15
31 struct crypto_instance
;
37 unsigned int (*ctxsize
)(struct crypto_alg
*alg
, u32 type
, u32 mask
);
38 unsigned int (*extsize
)(struct crypto_alg
*alg
);
39 int (*init
)(struct crypto_tfm
*tfm
, u32 type
, u32 mask
);
40 int (*init_tfm
)(struct crypto_tfm
*tfm
);
41 void (*show
)(struct seq_file
*m
, struct crypto_alg
*alg
);
42 int (*report
)(struct sk_buff
*skb
, struct crypto_alg
*alg
);
43 void (*free
)(struct crypto_instance
*inst
);
46 unsigned int maskclear
;
51 struct crypto_instance
{
52 struct crypto_alg alg
;
54 struct crypto_template
*tmpl
;
55 struct hlist_node list
;
57 void *__ctx
[] CRYPTO_MINALIGN_ATTR
;
60 struct crypto_template
{
61 struct list_head list
;
62 struct hlist_head instances
;
63 struct module
*module
;
65 struct crypto_instance
*(*alloc
)(struct rtattr
**tb
);
66 void (*free
)(struct crypto_instance
*inst
);
67 int (*create
)(struct crypto_template
*tmpl
, struct rtattr
**tb
);
69 char name
[CRYPTO_MAX_ALG_NAME
];
73 struct list_head list
;
74 struct crypto_alg
*alg
;
75 struct crypto_instance
*inst
;
76 const struct crypto_type
*frontend
;
81 struct list_head list
;
82 struct list_head
*backlog
;
85 unsigned int max_qlen
;
89 struct scatterlist
*sg
;
93 struct blkcipher_walk
{
106 struct scatter_walk in
;
109 struct scatter_walk out
;
118 unsigned int walk_blocksize
;
119 unsigned int cipher_blocksize
;
120 unsigned int alignmask
;
123 struct ablkcipher_walk
{
129 struct scatter_walk in
;
131 struct scatter_walk out
;
133 struct list_head buffers
;
137 unsigned int blocksize
;
140 extern const struct crypto_type crypto_ablkcipher_type
;
141 extern const struct crypto_type crypto_blkcipher_type
;
143 void crypto_mod_put(struct crypto_alg
*alg
);
145 int crypto_register_template(struct crypto_template
*tmpl
);
146 void crypto_unregister_template(struct crypto_template
*tmpl
);
147 struct crypto_template
*crypto_lookup_template(const char *name
);
149 int crypto_register_instance(struct crypto_template
*tmpl
,
150 struct crypto_instance
*inst
);
151 int crypto_unregister_instance(struct crypto_instance
*inst
);
153 int crypto_init_spawn(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
154 struct crypto_instance
*inst
, u32 mask
);
155 int crypto_init_spawn2(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
156 struct crypto_instance
*inst
,
157 const struct crypto_type
*frontend
);
158 int crypto_grab_spawn(struct crypto_spawn
*spawn
, const char *name
,
161 void crypto_drop_spawn(struct crypto_spawn
*spawn
);
162 struct crypto_tfm
*crypto_spawn_tfm(struct crypto_spawn
*spawn
, u32 type
,
164 void *crypto_spawn_tfm2(struct crypto_spawn
*spawn
);
166 static inline void crypto_set_spawn(struct crypto_spawn
*spawn
,
167 struct crypto_instance
*inst
)
172 struct crypto_attr_type
*crypto_get_attr_type(struct rtattr
**tb
);
173 int crypto_check_attr_type(struct rtattr
**tb
, u32 type
);
174 const char *crypto_attr_alg_name(struct rtattr
*rta
);
175 struct crypto_alg
*crypto_attr_alg2(struct rtattr
*rta
,
176 const struct crypto_type
*frontend
,
179 static inline struct crypto_alg
*crypto_attr_alg(struct rtattr
*rta
,
182 return crypto_attr_alg2(rta
, NULL
, type
, mask
);
185 int crypto_attr_u32(struct rtattr
*rta
, u32
*num
);
186 int crypto_inst_setname(struct crypto_instance
*inst
, const char *name
,
187 struct crypto_alg
*alg
);
188 void *crypto_alloc_instance2(const char *name
, struct crypto_alg
*alg
,
190 struct crypto_instance
*crypto_alloc_instance(const char *name
,
191 struct crypto_alg
*alg
);
193 void crypto_init_queue(struct crypto_queue
*queue
, unsigned int max_qlen
);
194 int crypto_enqueue_request(struct crypto_queue
*queue
,
195 struct crypto_async_request
*request
);
196 struct crypto_async_request
*crypto_dequeue_request(struct crypto_queue
*queue
);
197 int crypto_tfm_in_queue(struct crypto_queue
*queue
, struct crypto_tfm
*tfm
);
198 static inline unsigned int crypto_queue_len(struct crypto_queue
*queue
)
203 void crypto_inc(u8
*a
, unsigned int size
);
204 void __crypto_xor(u8
*dst
, const u8
*src1
, const u8
*src2
, unsigned int size
);
206 static inline void crypto_xor(u8
*dst
, const u8
*src
, unsigned int size
)
208 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
) &&
209 __builtin_constant_p(size
) &&
210 (size
% sizeof(unsigned long)) == 0) {
211 unsigned long *d
= (unsigned long *)dst
;
212 unsigned long *s
= (unsigned long *)src
;
216 size
-= sizeof(unsigned long);
219 __crypto_xor(dst
, dst
, src
, size
);
223 static inline void crypto_xor_cpy(u8
*dst
, const u8
*src1
, const u8
*src2
,
226 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
) &&
227 __builtin_constant_p(size
) &&
228 (size
% sizeof(unsigned long)) == 0) {
229 unsigned long *d
= (unsigned long *)dst
;
230 unsigned long *s1
= (unsigned long *)src1
;
231 unsigned long *s2
= (unsigned long *)src2
;
234 *d
++ = *s1
++ ^ *s2
++;
235 size
-= sizeof(unsigned long);
238 __crypto_xor(dst
, src1
, src2
, size
);
242 int blkcipher_walk_done(struct blkcipher_desc
*desc
,
243 struct blkcipher_walk
*walk
, int err
);
244 int blkcipher_walk_virt(struct blkcipher_desc
*desc
,
245 struct blkcipher_walk
*walk
);
246 int blkcipher_walk_phys(struct blkcipher_desc
*desc
,
247 struct blkcipher_walk
*walk
);
248 int blkcipher_walk_virt_block(struct blkcipher_desc
*desc
,
249 struct blkcipher_walk
*walk
,
250 unsigned int blocksize
);
251 int blkcipher_aead_walk_virt_block(struct blkcipher_desc
*desc
,
252 struct blkcipher_walk
*walk
,
253 struct crypto_aead
*tfm
,
254 unsigned int blocksize
);
256 int ablkcipher_walk_done(struct ablkcipher_request
*req
,
257 struct ablkcipher_walk
*walk
, int err
);
258 int ablkcipher_walk_phys(struct ablkcipher_request
*req
,
259 struct ablkcipher_walk
*walk
);
260 void __ablkcipher_walk_complete(struct ablkcipher_walk
*walk
);
262 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm
*tfm
)
264 return PTR_ALIGN(crypto_tfm_ctx(tfm
),
265 crypto_tfm_alg_alignmask(tfm
) + 1);
268 static inline struct crypto_instance
*crypto_tfm_alg_instance(
269 struct crypto_tfm
*tfm
)
271 return container_of(tfm
->__crt_alg
, struct crypto_instance
, alg
);
274 static inline void *crypto_instance_ctx(struct crypto_instance
*inst
)
279 static inline struct ablkcipher_alg
*crypto_ablkcipher_alg(
280 struct crypto_ablkcipher
*tfm
)
282 return &crypto_ablkcipher_tfm(tfm
)->__crt_alg
->cra_ablkcipher
;
285 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher
*tfm
)
287 return crypto_tfm_ctx(&tfm
->base
);
290 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher
*tfm
)
292 return crypto_tfm_ctx_aligned(&tfm
->base
);
295 static inline struct crypto_blkcipher
*crypto_spawn_blkcipher(
296 struct crypto_spawn
*spawn
)
298 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
299 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
301 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
304 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher
*tfm
)
306 return crypto_tfm_ctx(&tfm
->base
);
309 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher
*tfm
)
311 return crypto_tfm_ctx_aligned(&tfm
->base
);
314 static inline struct crypto_cipher
*crypto_spawn_cipher(
315 struct crypto_spawn
*spawn
)
317 u32 type
= CRYPTO_ALG_TYPE_CIPHER
;
318 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
320 return __crypto_cipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
323 static inline struct cipher_alg
*crypto_cipher_alg(struct crypto_cipher
*tfm
)
325 return &crypto_cipher_tfm(tfm
)->__crt_alg
->cra_cipher
;
328 static inline void blkcipher_walk_init(struct blkcipher_walk
*walk
,
329 struct scatterlist
*dst
,
330 struct scatterlist
*src
,
335 walk
->total
= nbytes
;
338 static inline void ablkcipher_walk_init(struct ablkcipher_walk
*walk
,
339 struct scatterlist
*dst
,
340 struct scatterlist
*src
,
345 walk
->total
= nbytes
;
346 INIT_LIST_HEAD(&walk
->buffers
);
349 static inline void ablkcipher_walk_complete(struct ablkcipher_walk
*walk
)
351 if (unlikely(!list_empty(&walk
->buffers
)))
352 __ablkcipher_walk_complete(walk
);
355 static inline struct crypto_async_request
*crypto_get_backlog(
356 struct crypto_queue
*queue
)
358 return queue
->backlog
== &queue
->list
? NULL
:
359 container_of(queue
->backlog
, struct crypto_async_request
, list
);
362 static inline int ablkcipher_enqueue_request(struct crypto_queue
*queue
,
363 struct ablkcipher_request
*request
)
365 return crypto_enqueue_request(queue
, &request
->base
);
368 static inline struct ablkcipher_request
*ablkcipher_dequeue_request(
369 struct crypto_queue
*queue
)
371 return ablkcipher_request_cast(crypto_dequeue_request(queue
));
374 static inline void *ablkcipher_request_ctx(struct ablkcipher_request
*req
)
379 static inline int ablkcipher_tfm_in_queue(struct crypto_queue
*queue
,
380 struct crypto_ablkcipher
*tfm
)
382 return crypto_tfm_in_queue(queue
, crypto_ablkcipher_tfm(tfm
));
385 static inline struct crypto_alg
*crypto_get_attr_alg(struct rtattr
**tb
,
388 return crypto_attr_alg(tb
[1], type
, mask
);
391 static inline int crypto_requires_off(u32 type
, u32 mask
, u32 off
)
393 return (type
^ off
) & mask
& off
;
397 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
398 * Otherwise returns zero.
400 static inline int crypto_requires_sync(u32 type
, u32 mask
)
402 return crypto_requires_off(type
, mask
, CRYPTO_ALG_ASYNC
);
405 noinline
unsigned long __crypto_memneq(const void *a
, const void *b
, size_t size
);
408 * crypto_memneq - Compare two areas of memory without leaking
409 * timing information.
411 * @a: One area of memory
412 * @b: Another area of memory
413 * @size: The size of the area.
415 * Returns 0 when data is equal, 1 otherwise.
417 static inline int crypto_memneq(const void *a
, const void *b
, size_t size
)
419 return __crypto_memneq(a
, b
, size
) != 0UL ? 1 : 0;
422 static inline void crypto_yield(u32 flags
)
424 #if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
425 if (flags
& CRYPTO_TFM_REQ_MAY_SLEEP
)
430 int crypto_register_notifier(struct notifier_block
*nb
);
431 int crypto_unregister_notifier(struct notifier_block
*nb
);
433 /* Crypto notification events. */
435 CRYPTO_MSG_ALG_REQUEST
,
436 CRYPTO_MSG_ALG_REGISTER
,
437 CRYPTO_MSG_ALG_LOADED
,
440 #endif /* _CRYPTO_ALGAPI_H */