2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
21 * Maximum values for blocksize and alignmask, used to allocate
22 * static buffers that are big enough for any combination of
23 * ciphers and architectures.
25 #define MAX_CIPHER_BLOCKSIZE 16
26 #define MAX_CIPHER_ALIGNMASK 15
29 struct crypto_instance
;
35 unsigned int (*ctxsize
)(struct crypto_alg
*alg
, u32 type
, u32 mask
);
36 unsigned int (*extsize
)(struct crypto_alg
*alg
);
37 int (*init
)(struct crypto_tfm
*tfm
, u32 type
, u32 mask
);
38 int (*init_tfm
)(struct crypto_tfm
*tfm
);
39 void (*show
)(struct seq_file
*m
, struct crypto_alg
*alg
);
40 int (*report
)(struct sk_buff
*skb
, struct crypto_alg
*alg
);
41 void (*free
)(struct crypto_instance
*inst
);
44 unsigned int maskclear
;
49 struct crypto_instance
{
50 struct crypto_alg alg
;
52 struct crypto_template
*tmpl
;
53 struct hlist_node list
;
55 void *__ctx
[] CRYPTO_MINALIGN_ATTR
;
58 struct crypto_template
{
59 struct list_head list
;
60 struct hlist_head instances
;
61 struct module
*module
;
63 struct crypto_instance
*(*alloc
)(struct rtattr
**tb
);
64 void (*free
)(struct crypto_instance
*inst
);
65 int (*create
)(struct crypto_template
*tmpl
, struct rtattr
**tb
);
67 char name
[CRYPTO_MAX_ALG_NAME
];
71 struct list_head list
;
72 struct crypto_alg
*alg
;
73 struct crypto_instance
*inst
;
74 const struct crypto_type
*frontend
;
79 struct list_head list
;
80 struct list_head
*backlog
;
83 unsigned int max_qlen
;
87 struct scatterlist
*sg
;
91 struct blkcipher_walk
{
104 struct scatter_walk in
;
107 struct scatter_walk out
;
116 unsigned int walk_blocksize
;
117 unsigned int cipher_blocksize
;
118 unsigned int alignmask
;
121 struct ablkcipher_walk
{
127 struct scatter_walk in
;
129 struct scatter_walk out
;
131 struct list_head buffers
;
135 unsigned int blocksize
;
138 extern const struct crypto_type crypto_ablkcipher_type
;
139 extern const struct crypto_type crypto_blkcipher_type
;
141 void crypto_mod_put(struct crypto_alg
*alg
);
143 int crypto_register_template(struct crypto_template
*tmpl
);
144 void crypto_unregister_template(struct crypto_template
*tmpl
);
145 struct crypto_template
*crypto_lookup_template(const char *name
);
147 int crypto_register_instance(struct crypto_template
*tmpl
,
148 struct crypto_instance
*inst
);
149 int crypto_unregister_instance(struct crypto_instance
*inst
);
151 int crypto_init_spawn(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
152 struct crypto_instance
*inst
, u32 mask
);
153 int crypto_init_spawn2(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
154 struct crypto_instance
*inst
,
155 const struct crypto_type
*frontend
);
156 int crypto_grab_spawn(struct crypto_spawn
*spawn
, const char *name
,
159 void crypto_drop_spawn(struct crypto_spawn
*spawn
);
160 struct crypto_tfm
*crypto_spawn_tfm(struct crypto_spawn
*spawn
, u32 type
,
162 void *crypto_spawn_tfm2(struct crypto_spawn
*spawn
);
164 static inline void crypto_set_spawn(struct crypto_spawn
*spawn
,
165 struct crypto_instance
*inst
)
170 struct crypto_attr_type
*crypto_get_attr_type(struct rtattr
**tb
);
171 int crypto_check_attr_type(struct rtattr
**tb
, u32 type
);
172 const char *crypto_attr_alg_name(struct rtattr
*rta
);
173 struct crypto_alg
*crypto_attr_alg2(struct rtattr
*rta
,
174 const struct crypto_type
*frontend
,
177 static inline struct crypto_alg
*crypto_attr_alg(struct rtattr
*rta
,
180 return crypto_attr_alg2(rta
, NULL
, type
, mask
);
183 int crypto_attr_u32(struct rtattr
*rta
, u32
*num
);
184 int crypto_inst_setname(struct crypto_instance
*inst
, const char *name
,
185 struct crypto_alg
*alg
);
186 void *crypto_alloc_instance2(const char *name
, struct crypto_alg
*alg
,
188 struct crypto_instance
*crypto_alloc_instance(const char *name
,
189 struct crypto_alg
*alg
);
191 void crypto_init_queue(struct crypto_queue
*queue
, unsigned int max_qlen
);
192 int crypto_enqueue_request(struct crypto_queue
*queue
,
193 struct crypto_async_request
*request
);
194 struct crypto_async_request
*crypto_dequeue_request(struct crypto_queue
*queue
);
195 int crypto_tfm_in_queue(struct crypto_queue
*queue
, struct crypto_tfm
*tfm
);
196 static inline unsigned int crypto_queue_len(struct crypto_queue
*queue
)
201 void crypto_inc(u8
*a
, unsigned int size
);
202 void __crypto_xor(u8
*dst
, const u8
*src1
, const u8
*src2
, unsigned int size
);
204 static inline void crypto_xor(u8
*dst
, const u8
*src
, unsigned int size
)
206 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
) &&
207 __builtin_constant_p(size
) &&
208 (size
% sizeof(unsigned long)) == 0) {
209 unsigned long *d
= (unsigned long *)dst
;
210 unsigned long *s
= (unsigned long *)src
;
214 size
-= sizeof(unsigned long);
217 __crypto_xor(dst
, dst
, src
, size
);
221 static inline void crypto_xor_cpy(u8
*dst
, const u8
*src1
, const u8
*src2
,
224 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
) &&
225 __builtin_constant_p(size
) &&
226 (size
% sizeof(unsigned long)) == 0) {
227 unsigned long *d
= (unsigned long *)dst
;
228 unsigned long *s1
= (unsigned long *)src1
;
229 unsigned long *s2
= (unsigned long *)src2
;
232 *d
++ = *s1
++ ^ *s2
++;
233 size
-= sizeof(unsigned long);
236 __crypto_xor(dst
, src1
, src2
, size
);
240 int blkcipher_walk_done(struct blkcipher_desc
*desc
,
241 struct blkcipher_walk
*walk
, int err
);
242 int blkcipher_walk_virt(struct blkcipher_desc
*desc
,
243 struct blkcipher_walk
*walk
);
244 int blkcipher_walk_phys(struct blkcipher_desc
*desc
,
245 struct blkcipher_walk
*walk
);
246 int blkcipher_walk_virt_block(struct blkcipher_desc
*desc
,
247 struct blkcipher_walk
*walk
,
248 unsigned int blocksize
);
249 int blkcipher_aead_walk_virt_block(struct blkcipher_desc
*desc
,
250 struct blkcipher_walk
*walk
,
251 struct crypto_aead
*tfm
,
252 unsigned int blocksize
);
254 int ablkcipher_walk_done(struct ablkcipher_request
*req
,
255 struct ablkcipher_walk
*walk
, int err
);
256 int ablkcipher_walk_phys(struct ablkcipher_request
*req
,
257 struct ablkcipher_walk
*walk
);
258 void __ablkcipher_walk_complete(struct ablkcipher_walk
*walk
);
260 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm
*tfm
)
262 return PTR_ALIGN(crypto_tfm_ctx(tfm
),
263 crypto_tfm_alg_alignmask(tfm
) + 1);
266 static inline struct crypto_instance
*crypto_tfm_alg_instance(
267 struct crypto_tfm
*tfm
)
269 return container_of(tfm
->__crt_alg
, struct crypto_instance
, alg
);
272 static inline void *crypto_instance_ctx(struct crypto_instance
*inst
)
277 static inline struct ablkcipher_alg
*crypto_ablkcipher_alg(
278 struct crypto_ablkcipher
*tfm
)
280 return &crypto_ablkcipher_tfm(tfm
)->__crt_alg
->cra_ablkcipher
;
283 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher
*tfm
)
285 return crypto_tfm_ctx(&tfm
->base
);
288 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher
*tfm
)
290 return crypto_tfm_ctx_aligned(&tfm
->base
);
293 static inline struct crypto_blkcipher
*crypto_spawn_blkcipher(
294 struct crypto_spawn
*spawn
)
296 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
297 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
299 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
302 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher
*tfm
)
304 return crypto_tfm_ctx(&tfm
->base
);
307 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher
*tfm
)
309 return crypto_tfm_ctx_aligned(&tfm
->base
);
312 static inline struct crypto_cipher
*crypto_spawn_cipher(
313 struct crypto_spawn
*spawn
)
315 u32 type
= CRYPTO_ALG_TYPE_CIPHER
;
316 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
318 return __crypto_cipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
321 static inline struct cipher_alg
*crypto_cipher_alg(struct crypto_cipher
*tfm
)
323 return &crypto_cipher_tfm(tfm
)->__crt_alg
->cra_cipher
;
326 static inline void blkcipher_walk_init(struct blkcipher_walk
*walk
,
327 struct scatterlist
*dst
,
328 struct scatterlist
*src
,
333 walk
->total
= nbytes
;
336 static inline void ablkcipher_walk_init(struct ablkcipher_walk
*walk
,
337 struct scatterlist
*dst
,
338 struct scatterlist
*src
,
343 walk
->total
= nbytes
;
344 INIT_LIST_HEAD(&walk
->buffers
);
347 static inline void ablkcipher_walk_complete(struct ablkcipher_walk
*walk
)
349 if (unlikely(!list_empty(&walk
->buffers
)))
350 __ablkcipher_walk_complete(walk
);
353 static inline struct crypto_async_request
*crypto_get_backlog(
354 struct crypto_queue
*queue
)
356 return queue
->backlog
== &queue
->list
? NULL
:
357 container_of(queue
->backlog
, struct crypto_async_request
, list
);
360 static inline int ablkcipher_enqueue_request(struct crypto_queue
*queue
,
361 struct ablkcipher_request
*request
)
363 return crypto_enqueue_request(queue
, &request
->base
);
366 static inline struct ablkcipher_request
*ablkcipher_dequeue_request(
367 struct crypto_queue
*queue
)
369 return ablkcipher_request_cast(crypto_dequeue_request(queue
));
372 static inline void *ablkcipher_request_ctx(struct ablkcipher_request
*req
)
377 static inline int ablkcipher_tfm_in_queue(struct crypto_queue
*queue
,
378 struct crypto_ablkcipher
*tfm
)
380 return crypto_tfm_in_queue(queue
, crypto_ablkcipher_tfm(tfm
));
383 static inline struct crypto_alg
*crypto_get_attr_alg(struct rtattr
**tb
,
386 return crypto_attr_alg(tb
[1], type
, mask
);
389 static inline int crypto_requires_off(u32 type
, u32 mask
, u32 off
)
391 return (type
^ off
) & mask
& off
;
395 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
396 * Otherwise returns zero.
398 static inline int crypto_requires_sync(u32 type
, u32 mask
)
400 return crypto_requires_off(type
, mask
, CRYPTO_ALG_ASYNC
);
403 noinline
unsigned long __crypto_memneq(const void *a
, const void *b
, size_t size
);
406 * crypto_memneq - Compare two areas of memory without leaking
407 * timing information.
409 * @a: One area of memory
410 * @b: Another area of memory
411 * @size: The size of the area.
413 * Returns 0 when data is equal, 1 otherwise.
415 static inline int crypto_memneq(const void *a
, const void *b
, size_t size
)
417 return __crypto_memneq(a
, b
, size
) != 0UL ? 1 : 0;
420 static inline void crypto_yield(u32 flags
)
422 #if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
423 if (flags
& CRYPTO_TFM_REQ_MAY_SLEEP
)
428 #endif /* _CRYPTO_ALGAPI_H */