Linux 4.14.5
[linux/fpc-iii.git] / include / crypto / internal / skcipher.h
blobe42f7063f245b9609671dff95a7e1db267e23b0f
1 /*
2 * Symmetric key ciphers.
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
13 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
14 #define _CRYPTO_INTERNAL_SKCIPHER_H
16 #include <crypto/algapi.h>
17 #include <crypto/skcipher.h>
18 #include <linux/list.h>
19 #include <linux/types.h>
21 struct aead_request;
22 struct rtattr;
24 struct skcipher_instance {
25 void (*free)(struct skcipher_instance *inst);
26 union {
27 struct {
28 char head[offsetof(struct skcipher_alg, base)];
29 struct crypto_instance base;
30 } s;
31 struct skcipher_alg alg;
35 struct crypto_skcipher_spawn {
36 struct crypto_spawn base;
39 struct skcipher_walk {
40 union {
41 struct {
42 struct page *page;
43 unsigned long offset;
44 } phys;
46 struct {
47 u8 *page;
48 void *addr;
49 } virt;
50 } src, dst;
52 struct scatter_walk in;
53 unsigned int nbytes;
55 struct scatter_walk out;
56 unsigned int total;
58 struct list_head buffers;
60 u8 *page;
61 u8 *buffer;
62 u8 *oiv;
63 void *iv;
65 unsigned int ivsize;
67 int flags;
68 unsigned int blocksize;
69 unsigned int stride;
70 unsigned int alignmask;
73 extern const struct crypto_type crypto_givcipher_type;
75 static inline struct crypto_instance *skcipher_crypto_instance(
76 struct skcipher_instance *inst)
78 return &inst->s.base;
81 static inline struct skcipher_instance *skcipher_alg_instance(
82 struct crypto_skcipher *skcipher)
84 return container_of(crypto_skcipher_alg(skcipher),
85 struct skcipher_instance, alg);
88 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
90 return crypto_instance_ctx(skcipher_crypto_instance(inst));
93 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
95 req->base.complete(&req->base, err);
98 static inline void crypto_set_skcipher_spawn(
99 struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
101 crypto_set_spawn(&spawn->base, inst);
104 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
105 u32 type, u32 mask);
107 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
109 crypto_drop_spawn(&spawn->base);
112 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
113 struct crypto_skcipher_spawn *spawn)
115 return container_of(spawn->base.alg, struct skcipher_alg, base);
118 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
119 struct crypto_skcipher_spawn *spawn)
121 return crypto_skcipher_spawn_alg(spawn);
124 static inline struct crypto_skcipher *crypto_spawn_skcipher(
125 struct crypto_skcipher_spawn *spawn)
127 return crypto_spawn_tfm2(&spawn->base);
130 static inline void crypto_skcipher_set_reqsize(
131 struct crypto_skcipher *skcipher, unsigned int reqsize)
133 skcipher->reqsize = reqsize;
136 int crypto_register_skcipher(struct skcipher_alg *alg);
137 void crypto_unregister_skcipher(struct skcipher_alg *alg);
138 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
139 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
140 int skcipher_register_instance(struct crypto_template *tmpl,
141 struct skcipher_instance *inst);
143 int skcipher_walk_done(struct skcipher_walk *walk, int err);
144 int skcipher_walk_virt(struct skcipher_walk *walk,
145 struct skcipher_request *req,
146 bool atomic);
147 void skcipher_walk_atomise(struct skcipher_walk *walk);
148 int skcipher_walk_async(struct skcipher_walk *walk,
149 struct skcipher_request *req);
150 int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
151 bool atomic);
152 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
153 struct aead_request *req, bool atomic);
154 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
155 struct aead_request *req, bool atomic);
156 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
158 static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
159 int err)
161 req->base.complete(&req->base, err);
164 static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
166 return req->base.flags;
169 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
171 return crypto_tfm_ctx(&tfm->base);
174 static inline void *skcipher_request_ctx(struct skcipher_request *req)
176 return req->__ctx;
179 static inline u32 skcipher_request_flags(struct skcipher_request *req)
181 return req->base.flags;
184 static inline unsigned int crypto_skcipher_alg_min_keysize(
185 struct skcipher_alg *alg)
187 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
188 CRYPTO_ALG_TYPE_BLKCIPHER)
189 return alg->base.cra_blkcipher.min_keysize;
191 if (alg->base.cra_ablkcipher.encrypt)
192 return alg->base.cra_ablkcipher.min_keysize;
194 return alg->min_keysize;
197 static inline unsigned int crypto_skcipher_alg_max_keysize(
198 struct skcipher_alg *alg)
200 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
201 CRYPTO_ALG_TYPE_BLKCIPHER)
202 return alg->base.cra_blkcipher.max_keysize;
204 if (alg->base.cra_ablkcipher.encrypt)
205 return alg->base.cra_ablkcipher.max_keysize;
207 return alg->max_keysize;
210 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */