1 // SPDX-License-Identifier: GPL-2.0-only
3 * AMD Cryptographic Coprocessor (CCP) RSA crypto API support
5 * Copyright (C) 2017 Advanced Micro Devices, Inc.
7 * Author: Gary R Hook <gary.hook@amd.com>
10 #include <linux/module.h>
11 #include <linux/sched.h>
12 #include <linux/scatterlist.h>
13 #include <linux/crypto.h>
14 #include <crypto/algapi.h>
15 #include <crypto/internal/rsa.h>
16 #include <crypto/internal/akcipher.h>
17 #include <crypto/akcipher.h>
18 #include <crypto/scatterwalk.h>
20 #include "ccp-crypto.h"
22 static inline struct akcipher_request
*akcipher_request_cast(
23 struct crypto_async_request
*req
)
25 return container_of(req
, struct akcipher_request
, base
);
28 static inline int ccp_copy_and_save_keypart(u8
**kpbuf
, unsigned int *kplen
,
29 const u8
*buf
, size_t sz
)
33 for (nskip
= 0; nskip
< sz
; nskip
++)
37 *kpbuf
= kmemdup(buf
+ nskip
, *kplen
, GFP_KERNEL
);
44 static int ccp_rsa_complete(struct crypto_async_request
*async_req
, int ret
)
46 struct akcipher_request
*req
= akcipher_request_cast(async_req
);
47 struct ccp_rsa_req_ctx
*rctx
= akcipher_request_ctx_dma(req
);
52 req
->dst_len
= rctx
->cmd
.u
.rsa
.key_size
>> 3;
57 static unsigned int ccp_rsa_maxsize(struct crypto_akcipher
*tfm
)
59 struct ccp_ctx
*ctx
= akcipher_tfm_ctx_dma(tfm
);
61 return ctx
->u
.rsa
.n_len
;
64 static int ccp_rsa_crypt(struct akcipher_request
*req
, bool encrypt
)
66 struct crypto_akcipher
*tfm
= crypto_akcipher_reqtfm(req
);
67 struct ccp_ctx
*ctx
= akcipher_tfm_ctx_dma(tfm
);
68 struct ccp_rsa_req_ctx
*rctx
= akcipher_request_ctx_dma(req
);
71 memset(&rctx
->cmd
, 0, sizeof(rctx
->cmd
));
72 INIT_LIST_HEAD(&rctx
->cmd
.entry
);
73 rctx
->cmd
.engine
= CCP_ENGINE_RSA
;
75 rctx
->cmd
.u
.rsa
.key_size
= ctx
->u
.rsa
.key_len
; /* in bits */
77 rctx
->cmd
.u
.rsa
.exp
= &ctx
->u
.rsa
.e_sg
;
78 rctx
->cmd
.u
.rsa
.exp_len
= ctx
->u
.rsa
.e_len
;
80 rctx
->cmd
.u
.rsa
.exp
= &ctx
->u
.rsa
.d_sg
;
81 rctx
->cmd
.u
.rsa
.exp_len
= ctx
->u
.rsa
.d_len
;
83 rctx
->cmd
.u
.rsa
.mod
= &ctx
->u
.rsa
.n_sg
;
84 rctx
->cmd
.u
.rsa
.mod_len
= ctx
->u
.rsa
.n_len
;
85 rctx
->cmd
.u
.rsa
.src
= req
->src
;
86 rctx
->cmd
.u
.rsa
.src_len
= req
->src_len
;
87 rctx
->cmd
.u
.rsa
.dst
= req
->dst
;
89 ret
= ccp_crypto_enqueue_request(&req
->base
, &rctx
->cmd
);
94 static int ccp_rsa_encrypt(struct akcipher_request
*req
)
96 return ccp_rsa_crypt(req
, true);
99 static int ccp_rsa_decrypt(struct akcipher_request
*req
)
101 return ccp_rsa_crypt(req
, false);
104 static int ccp_check_key_length(unsigned int len
)
107 if (len
< 8 || len
> 4096)
112 static void ccp_rsa_free_key_bufs(struct ccp_ctx
*ctx
)
114 /* Clean up old key data */
115 kfree_sensitive(ctx
->u
.rsa
.e_buf
);
116 ctx
->u
.rsa
.e_buf
= NULL
;
117 ctx
->u
.rsa
.e_len
= 0;
118 kfree_sensitive(ctx
->u
.rsa
.n_buf
);
119 ctx
->u
.rsa
.n_buf
= NULL
;
120 ctx
->u
.rsa
.n_len
= 0;
121 kfree_sensitive(ctx
->u
.rsa
.d_buf
);
122 ctx
->u
.rsa
.d_buf
= NULL
;
123 ctx
->u
.rsa
.d_len
= 0;
126 static int ccp_rsa_setkey(struct crypto_akcipher
*tfm
, const void *key
,
127 unsigned int keylen
, bool private)
129 struct ccp_ctx
*ctx
= akcipher_tfm_ctx_dma(tfm
);
130 struct rsa_key raw_key
;
133 ccp_rsa_free_key_bufs(ctx
);
134 memset(&raw_key
, 0, sizeof(raw_key
));
136 /* Code borrowed from crypto/rsa.c */
138 ret
= rsa_parse_priv_key(&raw_key
, key
, keylen
);
140 ret
= rsa_parse_pub_key(&raw_key
, key
, keylen
);
144 ret
= ccp_copy_and_save_keypart(&ctx
->u
.rsa
.n_buf
, &ctx
->u
.rsa
.n_len
,
145 raw_key
.n
, raw_key
.n_sz
);
148 sg_init_one(&ctx
->u
.rsa
.n_sg
, ctx
->u
.rsa
.n_buf
, ctx
->u
.rsa
.n_len
);
150 ctx
->u
.rsa
.key_len
= ctx
->u
.rsa
.n_len
<< 3; /* convert to bits */
151 if (ccp_check_key_length(ctx
->u
.rsa
.key_len
)) {
156 ret
= ccp_copy_and_save_keypart(&ctx
->u
.rsa
.e_buf
, &ctx
->u
.rsa
.e_len
,
157 raw_key
.e
, raw_key
.e_sz
);
160 sg_init_one(&ctx
->u
.rsa
.e_sg
, ctx
->u
.rsa
.e_buf
, ctx
->u
.rsa
.e_len
);
163 ret
= ccp_copy_and_save_keypart(&ctx
->u
.rsa
.d_buf
,
165 raw_key
.d
, raw_key
.d_sz
);
168 sg_init_one(&ctx
->u
.rsa
.d_sg
,
169 ctx
->u
.rsa
.d_buf
, ctx
->u
.rsa
.d_len
);
175 ccp_rsa_free_key_bufs(ctx
);
181 static int ccp_rsa_setprivkey(struct crypto_akcipher
*tfm
, const void *key
,
184 return ccp_rsa_setkey(tfm
, key
, keylen
, true);
187 static int ccp_rsa_setpubkey(struct crypto_akcipher
*tfm
, const void *key
,
190 return ccp_rsa_setkey(tfm
, key
, keylen
, false);
193 static int ccp_rsa_init_tfm(struct crypto_akcipher
*tfm
)
195 struct ccp_ctx
*ctx
= akcipher_tfm_ctx_dma(tfm
);
197 akcipher_set_reqsize_dma(tfm
, sizeof(struct ccp_rsa_req_ctx
));
198 ctx
->complete
= ccp_rsa_complete
;
203 static void ccp_rsa_exit_tfm(struct crypto_akcipher
*tfm
)
205 struct ccp_ctx
*ctx
= akcipher_tfm_ctx_dma(tfm
);
207 ccp_rsa_free_key_bufs(ctx
);
210 static struct akcipher_alg ccp_rsa_defaults
= {
211 .encrypt
= ccp_rsa_encrypt
,
212 .decrypt
= ccp_rsa_decrypt
,
213 .set_pub_key
= ccp_rsa_setpubkey
,
214 .set_priv_key
= ccp_rsa_setprivkey
,
215 .max_size
= ccp_rsa_maxsize
,
216 .init
= ccp_rsa_init_tfm
,
217 .exit
= ccp_rsa_exit_tfm
,
220 .cra_driver_name
= "rsa-ccp",
221 .cra_priority
= CCP_CRA_PRIORITY
,
222 .cra_module
= THIS_MODULE
,
223 .cra_ctxsize
= 2 * sizeof(struct ccp_ctx
) + CRYPTO_DMA_PADDING
,
228 unsigned int version
;
230 const char *driver_name
;
231 unsigned int reqsize
;
232 struct akcipher_alg
*alg_defaults
;
235 static struct ccp_rsa_def rsa_algs
[] = {
237 .version
= CCP_VERSION(3, 0),
239 .driver_name
= "rsa-ccp",
240 .reqsize
= sizeof(struct ccp_rsa_req_ctx
),
241 .alg_defaults
= &ccp_rsa_defaults
,
245 static int ccp_register_rsa_alg(struct list_head
*head
,
246 const struct ccp_rsa_def
*def
)
248 struct ccp_crypto_akcipher_alg
*ccp_alg
;
249 struct akcipher_alg
*alg
;
252 ccp_alg
= kzalloc(sizeof(*ccp_alg
), GFP_KERNEL
);
256 INIT_LIST_HEAD(&ccp_alg
->entry
);
259 *alg
= *def
->alg_defaults
;
260 snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", def
->name
);
261 snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
263 ret
= crypto_register_akcipher(alg
);
265 pr_err("%s akcipher algorithm registration error (%d)\n",
266 alg
->base
.cra_name
, ret
);
271 list_add(&ccp_alg
->entry
, head
);
276 int ccp_register_rsa_algs(struct list_head
*head
)
279 unsigned int ccpversion
= ccp_version();
281 /* Register the RSA algorithm in standard mode
282 * This works for CCP v3 and later
284 for (i
= 0; i
< ARRAY_SIZE(rsa_algs
); i
++) {
285 if (rsa_algs
[i
].version
> ccpversion
)
287 ret
= ccp_register_rsa_alg(head
, &rsa_algs
[i
]);