1 // SPDX-License-Identifier: GPL-2.0-only
3 * AMD Cryptographic Coprocessor (CCP) AES crypto API support
5 * Copyright (C) 2013-2019 Advanced Micro Devices, Inc.
7 * Author: Tom Lendacky <thomas.lendacky@amd.com>
10 #include <linux/module.h>
11 #include <linux/sched.h>
12 #include <linux/delay.h>
13 #include <linux/scatterlist.h>
14 #include <linux/crypto.h>
15 #include <crypto/algapi.h>
16 #include <crypto/aes.h>
17 #include <crypto/ctr.h>
18 #include <crypto/scatterwalk.h>
20 #include "ccp-crypto.h"
22 static int ccp_aes_complete(struct crypto_async_request
*async_req
, int ret
)
24 struct skcipher_request
*req
= skcipher_request_cast(async_req
);
25 struct ccp_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
26 struct ccp_aes_req_ctx
*rctx
= skcipher_request_ctx(req
);
31 if (ctx
->u
.aes
.mode
!= CCP_AES_MODE_ECB
)
32 memcpy(req
->iv
, rctx
->iv
, AES_BLOCK_SIZE
);
37 static int ccp_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
40 struct ccp_crypto_skcipher_alg
*alg
= ccp_crypto_skcipher_alg(tfm
);
41 struct ccp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
45 ctx
->u
.aes
.type
= CCP_AES_TYPE_128
;
48 ctx
->u
.aes
.type
= CCP_AES_TYPE_192
;
51 ctx
->u
.aes
.type
= CCP_AES_TYPE_256
;
56 ctx
->u
.aes
.mode
= alg
->mode
;
57 ctx
->u
.aes
.key_len
= key_len
;
59 memcpy(ctx
->u
.aes
.key
, key
, key_len
);
60 sg_init_one(&ctx
->u
.aes
.key_sg
, ctx
->u
.aes
.key
, key_len
);
65 static int ccp_aes_crypt(struct skcipher_request
*req
, bool encrypt
)
67 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
68 struct ccp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
69 struct ccp_aes_req_ctx
*rctx
= skcipher_request_ctx(req
);
70 struct scatterlist
*iv_sg
= NULL
;
71 unsigned int iv_len
= 0;
74 if (!ctx
->u
.aes
.key_len
)
77 if (((ctx
->u
.aes
.mode
== CCP_AES_MODE_ECB
) ||
78 (ctx
->u
.aes
.mode
== CCP_AES_MODE_CBC
)) &&
79 (req
->cryptlen
& (AES_BLOCK_SIZE
- 1)))
82 if (ctx
->u
.aes
.mode
!= CCP_AES_MODE_ECB
) {
86 memcpy(rctx
->iv
, req
->iv
, AES_BLOCK_SIZE
);
88 iv_len
= AES_BLOCK_SIZE
;
89 sg_init_one(iv_sg
, rctx
->iv
, iv_len
);
92 memset(&rctx
->cmd
, 0, sizeof(rctx
->cmd
));
93 INIT_LIST_HEAD(&rctx
->cmd
.entry
);
94 rctx
->cmd
.engine
= CCP_ENGINE_AES
;
95 rctx
->cmd
.u
.aes
.type
= ctx
->u
.aes
.type
;
96 rctx
->cmd
.u
.aes
.mode
= ctx
->u
.aes
.mode
;
97 rctx
->cmd
.u
.aes
.action
=
98 (encrypt
) ? CCP_AES_ACTION_ENCRYPT
: CCP_AES_ACTION_DECRYPT
;
99 rctx
->cmd
.u
.aes
.key
= &ctx
->u
.aes
.key_sg
;
100 rctx
->cmd
.u
.aes
.key_len
= ctx
->u
.aes
.key_len
;
101 rctx
->cmd
.u
.aes
.iv
= iv_sg
;
102 rctx
->cmd
.u
.aes
.iv_len
= iv_len
;
103 rctx
->cmd
.u
.aes
.src
= req
->src
;
104 rctx
->cmd
.u
.aes
.src_len
= req
->cryptlen
;
105 rctx
->cmd
.u
.aes
.dst
= req
->dst
;
107 ret
= ccp_crypto_enqueue_request(&req
->base
, &rctx
->cmd
);
112 static int ccp_aes_encrypt(struct skcipher_request
*req
)
114 return ccp_aes_crypt(req
, true);
117 static int ccp_aes_decrypt(struct skcipher_request
*req
)
119 return ccp_aes_crypt(req
, false);
122 static int ccp_aes_init_tfm(struct crypto_skcipher
*tfm
)
124 struct ccp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
126 ctx
->complete
= ccp_aes_complete
;
127 ctx
->u
.aes
.key_len
= 0;
129 crypto_skcipher_set_reqsize(tfm
, sizeof(struct ccp_aes_req_ctx
));
134 static int ccp_aes_rfc3686_complete(struct crypto_async_request
*async_req
,
137 struct skcipher_request
*req
= skcipher_request_cast(async_req
);
138 struct ccp_aes_req_ctx
*rctx
= skcipher_request_ctx(req
);
140 /* Restore the original pointer */
141 req
->iv
= rctx
->rfc3686_info
;
143 return ccp_aes_complete(async_req
, ret
);
146 static int ccp_aes_rfc3686_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
147 unsigned int key_len
)
149 struct ccp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
151 if (key_len
< CTR_RFC3686_NONCE_SIZE
)
154 key_len
-= CTR_RFC3686_NONCE_SIZE
;
155 memcpy(ctx
->u
.aes
.nonce
, key
+ key_len
, CTR_RFC3686_NONCE_SIZE
);
157 return ccp_aes_setkey(tfm
, key
, key_len
);
160 static int ccp_aes_rfc3686_crypt(struct skcipher_request
*req
, bool encrypt
)
162 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
163 struct ccp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
164 struct ccp_aes_req_ctx
*rctx
= skcipher_request_ctx(req
);
167 /* Initialize the CTR block */
168 iv
= rctx
->rfc3686_iv
;
169 memcpy(iv
, ctx
->u
.aes
.nonce
, CTR_RFC3686_NONCE_SIZE
);
171 iv
+= CTR_RFC3686_NONCE_SIZE
;
172 memcpy(iv
, req
->iv
, CTR_RFC3686_IV_SIZE
);
174 iv
+= CTR_RFC3686_IV_SIZE
;
175 *(__be32
*)iv
= cpu_to_be32(1);
177 /* Point to the new IV */
178 rctx
->rfc3686_info
= req
->iv
;
179 req
->iv
= rctx
->rfc3686_iv
;
181 return ccp_aes_crypt(req
, encrypt
);
184 static int ccp_aes_rfc3686_encrypt(struct skcipher_request
*req
)
186 return ccp_aes_rfc3686_crypt(req
, true);
189 static int ccp_aes_rfc3686_decrypt(struct skcipher_request
*req
)
191 return ccp_aes_rfc3686_crypt(req
, false);
194 static int ccp_aes_rfc3686_init_tfm(struct crypto_skcipher
*tfm
)
196 struct ccp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
198 ctx
->complete
= ccp_aes_rfc3686_complete
;
199 ctx
->u
.aes
.key_len
= 0;
201 crypto_skcipher_set_reqsize(tfm
, sizeof(struct ccp_aes_req_ctx
));
206 static const struct skcipher_alg ccp_aes_defaults
= {
207 .setkey
= ccp_aes_setkey
,
208 .encrypt
= ccp_aes_encrypt
,
209 .decrypt
= ccp_aes_decrypt
,
210 .min_keysize
= AES_MIN_KEY_SIZE
,
211 .max_keysize
= AES_MAX_KEY_SIZE
,
212 .init
= ccp_aes_init_tfm
,
214 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
215 CRYPTO_ALG_KERN_DRIVER_ONLY
|
216 CRYPTO_ALG_NEED_FALLBACK
,
217 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
218 .base
.cra_ctxsize
= sizeof(struct ccp_ctx
),
219 .base
.cra_priority
= CCP_CRA_PRIORITY
,
220 .base
.cra_module
= THIS_MODULE
,
223 static const struct skcipher_alg ccp_aes_rfc3686_defaults
= {
224 .setkey
= ccp_aes_rfc3686_setkey
,
225 .encrypt
= ccp_aes_rfc3686_encrypt
,
226 .decrypt
= ccp_aes_rfc3686_decrypt
,
227 .min_keysize
= AES_MIN_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
228 .max_keysize
= AES_MAX_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
229 .init
= ccp_aes_rfc3686_init_tfm
,
231 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
232 CRYPTO_ALG_KERN_DRIVER_ONLY
|
233 CRYPTO_ALG_NEED_FALLBACK
,
234 .base
.cra_blocksize
= CTR_RFC3686_BLOCK_SIZE
,
235 .base
.cra_ctxsize
= sizeof(struct ccp_ctx
),
236 .base
.cra_priority
= CCP_CRA_PRIORITY
,
237 .base
.cra_module
= THIS_MODULE
,
241 enum ccp_aes_mode mode
;
242 unsigned int version
;
244 const char *driver_name
;
245 unsigned int blocksize
;
247 const struct skcipher_alg
*alg_defaults
;
250 static struct ccp_aes_def aes_algs
[] = {
252 .mode
= CCP_AES_MODE_ECB
,
253 .version
= CCP_VERSION(3, 0),
255 .driver_name
= "ecb-aes-ccp",
256 .blocksize
= AES_BLOCK_SIZE
,
258 .alg_defaults
= &ccp_aes_defaults
,
261 .mode
= CCP_AES_MODE_CBC
,
262 .version
= CCP_VERSION(3, 0),
264 .driver_name
= "cbc-aes-ccp",
265 .blocksize
= AES_BLOCK_SIZE
,
266 .ivsize
= AES_BLOCK_SIZE
,
267 .alg_defaults
= &ccp_aes_defaults
,
270 .mode
= CCP_AES_MODE_CFB
,
271 .version
= CCP_VERSION(3, 0),
273 .driver_name
= "cfb-aes-ccp",
275 .ivsize
= AES_BLOCK_SIZE
,
276 .alg_defaults
= &ccp_aes_defaults
,
279 .mode
= CCP_AES_MODE_OFB
,
280 .version
= CCP_VERSION(3, 0),
282 .driver_name
= "ofb-aes-ccp",
284 .ivsize
= AES_BLOCK_SIZE
,
285 .alg_defaults
= &ccp_aes_defaults
,
288 .mode
= CCP_AES_MODE_CTR
,
289 .version
= CCP_VERSION(3, 0),
291 .driver_name
= "ctr-aes-ccp",
293 .ivsize
= AES_BLOCK_SIZE
,
294 .alg_defaults
= &ccp_aes_defaults
,
297 .mode
= CCP_AES_MODE_CTR
,
298 .version
= CCP_VERSION(3, 0),
299 .name
= "rfc3686(ctr(aes))",
300 .driver_name
= "rfc3686-ctr-aes-ccp",
302 .ivsize
= CTR_RFC3686_IV_SIZE
,
303 .alg_defaults
= &ccp_aes_rfc3686_defaults
,
307 static int ccp_register_aes_alg(struct list_head
*head
,
308 const struct ccp_aes_def
*def
)
310 struct ccp_crypto_skcipher_alg
*ccp_alg
;
311 struct skcipher_alg
*alg
;
314 ccp_alg
= kzalloc(sizeof(*ccp_alg
), GFP_KERNEL
);
318 INIT_LIST_HEAD(&ccp_alg
->entry
);
320 ccp_alg
->mode
= def
->mode
;
322 /* Copy the defaults and override as necessary */
324 *alg
= *def
->alg_defaults
;
325 snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", def
->name
);
326 snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
328 alg
->base
.cra_blocksize
= def
->blocksize
;
329 alg
->ivsize
= def
->ivsize
;
331 ret
= crypto_register_skcipher(alg
);
333 pr_err("%s skcipher algorithm registration error (%d)\n",
334 alg
->base
.cra_name
, ret
);
339 list_add(&ccp_alg
->entry
, head
);
344 int ccp_register_aes_algs(struct list_head
*head
)
347 unsigned int ccpversion
= ccp_version();
349 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
350 if (aes_algs
[i
].version
> ccpversion
)
352 ret
= ccp_register_aes_alg(head
, &aes_algs
[i
]);