Merge tag 'chrome-platform-for-linus-4.13' of git://git.kernel.org/pub/scm/linux...
[linux/fpc-iii.git] / arch / x86 / include / asm / crypto / glue_helper.h
blobed8b66de541f32dd41f75b7a92a116d12bbc8595
1 /*
2 * Shared glue code for 128bit block ciphers
3 */
5 #ifndef _CRYPTO_GLUE_HELPER_H
6 #define _CRYPTO_GLUE_HELPER_H
8 #include <crypto/internal/skcipher.h>
9 #include <linux/kernel.h>
10 #include <asm/fpu/api.h>
11 #include <crypto/b128ops.h>
13 typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
14 typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
15 typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
16 le128 *iv);
17 typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src,
18 le128 *iv);
20 #define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn))
21 #define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn))
22 #define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn))
23 #define GLUE_XTS_FUNC_CAST(fn) ((common_glue_xts_func_t)(fn))
25 struct common_glue_func_entry {
26 unsigned int num_blocks; /* number of blocks that @fn will process */
27 union {
28 common_glue_func_t ecb;
29 common_glue_cbc_func_t cbc;
30 common_glue_ctr_func_t ctr;
31 common_glue_xts_func_t xts;
32 } fn_u;
35 struct common_glue_ctx {
36 unsigned int num_funcs;
37 int fpu_blocks_limit; /* -1 means fpu not needed at all */
40 * First funcs entry must have largest num_blocks and last funcs entry
41 * must have num_blocks == 1!
43 struct common_glue_func_entry funcs[];
46 static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit,
47 struct blkcipher_desc *desc,
48 bool fpu_enabled, unsigned int nbytes)
50 if (likely(fpu_blocks_limit < 0))
51 return false;
53 if (fpu_enabled)
54 return true;
57 * Vector-registers are only used when chunk to be processed is large
58 * enough, so do not enable FPU until it is necessary.
60 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
61 return false;
63 if (desc) {
64 /* prevent sleeping if FPU is in use */
65 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
68 kernel_fpu_begin();
69 return true;
72 static inline bool glue_skwalk_fpu_begin(unsigned int bsize,
73 int fpu_blocks_limit,
74 struct skcipher_walk *walk,
75 bool fpu_enabled, unsigned int nbytes)
77 if (likely(fpu_blocks_limit < 0))
78 return false;
80 if (fpu_enabled)
81 return true;
84 * Vector-registers are only used when chunk to be processed is large
85 * enough, so do not enable FPU until it is necessary.
87 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
88 return false;
90 /* prevent sleeping if FPU is in use */
91 skcipher_walk_atomise(walk);
93 kernel_fpu_begin();
94 return true;
97 static inline void glue_fpu_end(bool fpu_enabled)
99 if (fpu_enabled)
100 kernel_fpu_end();
103 static inline void le128_to_be128(be128 *dst, const le128 *src)
105 dst->a = cpu_to_be64(le64_to_cpu(src->a));
106 dst->b = cpu_to_be64(le64_to_cpu(src->b));
109 static inline void be128_to_le128(le128 *dst, const be128 *src)
111 dst->a = cpu_to_le64(be64_to_cpu(src->a));
112 dst->b = cpu_to_le64(be64_to_cpu(src->b));
115 static inline void le128_inc(le128 *i)
117 u64 a = le64_to_cpu(i->a);
118 u64 b = le64_to_cpu(i->b);
120 b++;
121 if (!b)
122 a++;
124 i->a = cpu_to_le64(a);
125 i->b = cpu_to_le64(b);
128 extern int glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
129 struct blkcipher_desc *desc,
130 struct scatterlist *dst,
131 struct scatterlist *src, unsigned int nbytes);
133 extern int glue_cbc_encrypt_128bit(const common_glue_func_t fn,
134 struct blkcipher_desc *desc,
135 struct scatterlist *dst,
136 struct scatterlist *src,
137 unsigned int nbytes);
139 extern int glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx,
140 struct blkcipher_desc *desc,
141 struct scatterlist *dst,
142 struct scatterlist *src,
143 unsigned int nbytes);
145 extern int glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
146 struct blkcipher_desc *desc,
147 struct scatterlist *dst,
148 struct scatterlist *src, unsigned int nbytes);
150 extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
151 struct blkcipher_desc *desc,
152 struct scatterlist *dst,
153 struct scatterlist *src, unsigned int nbytes,
154 common_glue_func_t tweak_fn, void *tweak_ctx,
155 void *crypt_ctx);
157 extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
158 struct blkcipher_desc *desc,
159 struct scatterlist *dst,
160 struct scatterlist *src, unsigned int nbytes,
161 common_glue_func_t tweak_fn, void *tweak_ctx,
162 void *crypt_ctx);
164 extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
165 struct skcipher_request *req,
166 common_glue_func_t tweak_fn, void *tweak_ctx,
167 void *crypt_ctx);
169 extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src,
170 le128 *iv, common_glue_func_t fn);
172 #endif /* _CRYPTO_GLUE_HELPER_H */