2 * Symmetric key ciphers.
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
14 #define _CRYPTO_INTERNAL_SKCIPHER_H
16 #include <crypto/algapi.h>
17 #include <crypto/skcipher.h>
18 #include <linux/list.h>
19 #include <linux/types.h>
24 struct skcipher_instance
{
25 void (*free
)(struct skcipher_instance
*inst
);
28 char head
[offsetof(struct skcipher_alg
, base
)];
29 struct crypto_instance base
;
31 struct skcipher_alg alg
;
35 struct crypto_skcipher_spawn
{
36 struct crypto_spawn base
;
39 struct skcipher_walk
{
52 struct scatter_walk in
;
55 struct scatter_walk out
;
58 struct list_head buffers
;
68 unsigned int blocksize
;
70 unsigned int alignmask
;
73 static inline struct crypto_instance
*skcipher_crypto_instance(
74 struct skcipher_instance
*inst
)
79 static inline struct skcipher_instance
*skcipher_alg_instance(
80 struct crypto_skcipher
*skcipher
)
82 return container_of(crypto_skcipher_alg(skcipher
),
83 struct skcipher_instance
, alg
);
86 static inline void *skcipher_instance_ctx(struct skcipher_instance
*inst
)
88 return crypto_instance_ctx(skcipher_crypto_instance(inst
));
91 static inline void skcipher_request_complete(struct skcipher_request
*req
, int err
)
93 req
->base
.complete(&req
->base
, err
);
96 static inline void crypto_set_skcipher_spawn(
97 struct crypto_skcipher_spawn
*spawn
, struct crypto_instance
*inst
)
99 crypto_set_spawn(&spawn
->base
, inst
);
102 int crypto_grab_skcipher(struct crypto_skcipher_spawn
*spawn
, const char *name
,
105 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn
*spawn
)
107 crypto_drop_spawn(&spawn
->base
);
110 static inline struct skcipher_alg
*crypto_skcipher_spawn_alg(
111 struct crypto_skcipher_spawn
*spawn
)
113 return container_of(spawn
->base
.alg
, struct skcipher_alg
, base
);
116 static inline struct skcipher_alg
*crypto_spawn_skcipher_alg(
117 struct crypto_skcipher_spawn
*spawn
)
119 return crypto_skcipher_spawn_alg(spawn
);
122 static inline struct crypto_skcipher
*crypto_spawn_skcipher(
123 struct crypto_skcipher_spawn
*spawn
)
125 return crypto_spawn_tfm2(&spawn
->base
);
128 static inline void crypto_skcipher_set_reqsize(
129 struct crypto_skcipher
*skcipher
, unsigned int reqsize
)
131 skcipher
->reqsize
= reqsize
;
134 int crypto_register_skcipher(struct skcipher_alg
*alg
);
135 void crypto_unregister_skcipher(struct skcipher_alg
*alg
);
136 int crypto_register_skciphers(struct skcipher_alg
*algs
, int count
);
137 void crypto_unregister_skciphers(struct skcipher_alg
*algs
, int count
);
138 int skcipher_register_instance(struct crypto_template
*tmpl
,
139 struct skcipher_instance
*inst
);
141 int skcipher_walk_done(struct skcipher_walk
*walk
, int err
);
142 int skcipher_walk_virt(struct skcipher_walk
*walk
,
143 struct skcipher_request
*req
,
145 void skcipher_walk_atomise(struct skcipher_walk
*walk
);
146 int skcipher_walk_async(struct skcipher_walk
*walk
,
147 struct skcipher_request
*req
);
148 int skcipher_walk_aead(struct skcipher_walk
*walk
, struct aead_request
*req
,
150 int skcipher_walk_aead_encrypt(struct skcipher_walk
*walk
,
151 struct aead_request
*req
, bool atomic
);
152 int skcipher_walk_aead_decrypt(struct skcipher_walk
*walk
,
153 struct aead_request
*req
, bool atomic
);
154 void skcipher_walk_complete(struct skcipher_walk
*walk
, int err
);
156 static inline void ablkcipher_request_complete(struct ablkcipher_request
*req
,
159 req
->base
.complete(&req
->base
, err
);
162 static inline u32
ablkcipher_request_flags(struct ablkcipher_request
*req
)
164 return req
->base
.flags
;
167 static inline void *crypto_skcipher_ctx(struct crypto_skcipher
*tfm
)
169 return crypto_tfm_ctx(&tfm
->base
);
172 static inline void *skcipher_request_ctx(struct skcipher_request
*req
)
177 static inline u32
skcipher_request_flags(struct skcipher_request
*req
)
179 return req
->base
.flags
;
182 static inline unsigned int crypto_skcipher_alg_min_keysize(
183 struct skcipher_alg
*alg
)
185 if ((alg
->base
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) ==
186 CRYPTO_ALG_TYPE_BLKCIPHER
)
187 return alg
->base
.cra_blkcipher
.min_keysize
;
189 if (alg
->base
.cra_ablkcipher
.encrypt
)
190 return alg
->base
.cra_ablkcipher
.min_keysize
;
192 return alg
->min_keysize
;
195 static inline unsigned int crypto_skcipher_alg_max_keysize(
196 struct skcipher_alg
*alg
)
198 if ((alg
->base
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) ==
199 CRYPTO_ALG_TYPE_BLKCIPHER
)
200 return alg
->base
.cra_blkcipher
.max_keysize
;
202 if (alg
->base
.cra_ablkcipher
.encrypt
)
203 return alg
->base
.cra_ablkcipher
.max_keysize
;
205 return alg
->max_keysize
;
208 /* Helpers for simple block cipher modes of operation */
209 struct skcipher_ctx_simple
{
210 struct crypto_cipher
*cipher
; /* underlying block cipher */
212 static inline struct crypto_cipher
*
213 skcipher_cipher_simple(struct crypto_skcipher
*tfm
)
215 struct skcipher_ctx_simple
*ctx
= crypto_skcipher_ctx(tfm
);
219 struct skcipher_instance
*
220 skcipher_alloc_instance_simple(struct crypto_template
*tmpl
, struct rtattr
**tb
,
221 struct crypto_alg
**cipher_alg_ret
);
223 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */