1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Symmetric key ciphers.
5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
11 #include <crypto/algapi.h>
12 #include <crypto/skcipher.h>
13 #include <linux/list.h>
14 #include <linux/types.h>
19 struct skcipher_instance
{
20 void (*free
)(struct skcipher_instance
*inst
);
23 char head
[offsetof(struct skcipher_alg
, base
)];
24 struct crypto_instance base
;
26 struct skcipher_alg alg
;
30 struct crypto_skcipher_spawn
{
31 struct crypto_spawn base
;
34 struct skcipher_walk
{
47 struct scatter_walk in
;
50 struct scatter_walk out
;
53 struct list_head buffers
;
63 unsigned int blocksize
;
65 unsigned int alignmask
;
68 static inline struct crypto_instance
*skcipher_crypto_instance(
69 struct skcipher_instance
*inst
)
74 static inline struct skcipher_instance
*skcipher_alg_instance(
75 struct crypto_skcipher
*skcipher
)
77 return container_of(crypto_skcipher_alg(skcipher
),
78 struct skcipher_instance
, alg
);
81 static inline void *skcipher_instance_ctx(struct skcipher_instance
*inst
)
83 return crypto_instance_ctx(skcipher_crypto_instance(inst
));
86 static inline void skcipher_request_complete(struct skcipher_request
*req
, int err
)
88 req
->base
.complete(&req
->base
, err
);
91 int crypto_grab_skcipher(struct crypto_skcipher_spawn
*spawn
,
92 struct crypto_instance
*inst
,
93 const char *name
, u32 type
, u32 mask
);
95 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn
*spawn
)
97 crypto_drop_spawn(&spawn
->base
);
100 static inline struct skcipher_alg
*crypto_skcipher_spawn_alg(
101 struct crypto_skcipher_spawn
*spawn
)
103 return container_of(spawn
->base
.alg
, struct skcipher_alg
, base
);
106 static inline struct skcipher_alg
*crypto_spawn_skcipher_alg(
107 struct crypto_skcipher_spawn
*spawn
)
109 return crypto_skcipher_spawn_alg(spawn
);
112 static inline struct crypto_skcipher
*crypto_spawn_skcipher(
113 struct crypto_skcipher_spawn
*spawn
)
115 return crypto_spawn_tfm2(&spawn
->base
);
118 static inline void crypto_skcipher_set_reqsize(
119 struct crypto_skcipher
*skcipher
, unsigned int reqsize
)
121 skcipher
->reqsize
= reqsize
;
124 int crypto_register_skcipher(struct skcipher_alg
*alg
);
125 void crypto_unregister_skcipher(struct skcipher_alg
*alg
);
126 int crypto_register_skciphers(struct skcipher_alg
*algs
, int count
);
127 void crypto_unregister_skciphers(struct skcipher_alg
*algs
, int count
);
128 int skcipher_register_instance(struct crypto_template
*tmpl
,
129 struct skcipher_instance
*inst
);
131 int skcipher_walk_done(struct skcipher_walk
*walk
, int err
);
132 int skcipher_walk_virt(struct skcipher_walk
*walk
,
133 struct skcipher_request
*req
,
135 void skcipher_walk_atomise(struct skcipher_walk
*walk
);
136 int skcipher_walk_async(struct skcipher_walk
*walk
,
137 struct skcipher_request
*req
);
138 int skcipher_walk_aead_encrypt(struct skcipher_walk
*walk
,
139 struct aead_request
*req
, bool atomic
);
140 int skcipher_walk_aead_decrypt(struct skcipher_walk
*walk
,
141 struct aead_request
*req
, bool atomic
);
142 void skcipher_walk_complete(struct skcipher_walk
*walk
, int err
);
144 static inline void skcipher_walk_abort(struct skcipher_walk
*walk
)
146 skcipher_walk_done(walk
, -ECANCELED
);
149 static inline void *crypto_skcipher_ctx(struct crypto_skcipher
*tfm
)
151 return crypto_tfm_ctx(&tfm
->base
);
154 static inline void *skcipher_request_ctx(struct skcipher_request
*req
)
159 static inline u32
skcipher_request_flags(struct skcipher_request
*req
)
161 return req
->base
.flags
;
164 static inline unsigned int crypto_skcipher_alg_min_keysize(
165 struct skcipher_alg
*alg
)
167 return alg
->min_keysize
;
170 static inline unsigned int crypto_skcipher_alg_max_keysize(
171 struct skcipher_alg
*alg
)
173 return alg
->max_keysize
;
176 static inline unsigned int crypto_skcipher_alg_walksize(
177 struct skcipher_alg
*alg
)
179 return alg
->walksize
;
183 * crypto_skcipher_walksize() - obtain walk size
184 * @tfm: cipher handle
186 * In some cases, algorithms can only perform optimally when operating on
187 * multiple blocks in parallel. This is reflected by the walksize, which
188 * must be a multiple of the chunksize (or equal if the concern does not
191 * Return: walk size in bytes
193 static inline unsigned int crypto_skcipher_walksize(
194 struct crypto_skcipher
*tfm
)
196 return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm
));
199 /* Helpers for simple block cipher modes of operation */
200 struct skcipher_ctx_simple
{
201 struct crypto_cipher
*cipher
; /* underlying block cipher */
203 static inline struct crypto_cipher
*
204 skcipher_cipher_simple(struct crypto_skcipher
*tfm
)
206 struct skcipher_ctx_simple
*ctx
= crypto_skcipher_ctx(tfm
);
211 struct skcipher_instance
*skcipher_alloc_instance_simple(
212 struct crypto_template
*tmpl
, struct rtattr
**tb
);
214 static inline struct crypto_alg
*skcipher_ialg_simple(
215 struct skcipher_instance
*inst
)
217 struct crypto_cipher_spawn
*spawn
= skcipher_instance_ctx(inst
);
219 return crypto_spawn_cipher_alg(spawn
);
222 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */