1 // SPDX-License-Identifier: GPL-2.0
3 #include <linux/ceph/ceph_debug.h>
6 #include <linux/scatterlist.h>
7 #include <linux/sched.h>
8 #include <linux/slab.h>
9 #include <crypto/aes.h>
10 #include <crypto/skcipher.h>
11 #include <linux/key-type.h>
12 #include <linux/sched/mm.h>
14 #include <keys/ceph-type.h>
15 #include <keys/user-type.h>
16 #include <linux/ceph/decode.h>
20 * Set ->key and ->tfm. The rest of the key should be filled in before
21 * this function is called.
23 static int set_secret(struct ceph_crypto_key
*key
, void *buf
)
25 unsigned int noio_flag
;
32 case CEPH_CRYPTO_NONE
:
33 return 0; /* nothing to do */
43 key
->key
= kmemdup(buf
, key
->len
, GFP_NOIO
);
49 /* crypto_alloc_skcipher() allocates with GFP_KERNEL */
50 noio_flag
= memalloc_noio_save();
51 key
->tfm
= crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC
);
52 memalloc_noio_restore(noio_flag
);
53 if (IS_ERR(key
->tfm
)) {
54 ret
= PTR_ERR(key
->tfm
);
59 ret
= crypto_skcipher_setkey(key
->tfm
, key
->key
, key
->len
);
66 ceph_crypto_key_destroy(key
);
70 int ceph_crypto_key_clone(struct ceph_crypto_key
*dst
,
71 const struct ceph_crypto_key
*src
)
73 memcpy(dst
, src
, sizeof(struct ceph_crypto_key
));
74 return set_secret(dst
, src
->key
);
77 int ceph_crypto_key_encode(struct ceph_crypto_key
*key
, void **p
, void *end
)
79 if (*p
+ sizeof(u16
) + sizeof(key
->created
) +
80 sizeof(u16
) + key
->len
> end
)
82 ceph_encode_16(p
, key
->type
);
83 ceph_encode_copy(p
, &key
->created
, sizeof(key
->created
));
84 ceph_encode_16(p
, key
->len
);
85 ceph_encode_copy(p
, key
->key
, key
->len
);
89 int ceph_crypto_key_decode(struct ceph_crypto_key
*key
, void **p
, void *end
)
93 ceph_decode_need(p
, end
, 2*sizeof(u16
) + sizeof(key
->created
), bad
);
94 key
->type
= ceph_decode_16(p
);
95 ceph_decode_copy(p
, &key
->created
, sizeof(key
->created
));
96 key
->len
= ceph_decode_16(p
);
97 ceph_decode_need(p
, end
, key
->len
, bad
);
98 ret
= set_secret(key
, *p
);
103 dout("failed to decode crypto key\n");
107 int ceph_crypto_key_unarmor(struct ceph_crypto_key
*key
, const char *inkey
)
109 int inlen
= strlen(inkey
);
110 int blen
= inlen
* 3 / 4;
114 dout("crypto_key_unarmor %s\n", inkey
);
115 buf
= kmalloc(blen
, GFP_NOFS
);
118 blen
= ceph_unarmor(buf
, inkey
, inkey
+inlen
);
125 ret
= ceph_crypto_key_decode(key
, &p
, p
+ blen
);
129 dout("crypto_key_unarmor key %p type %d len %d\n", key
,
130 key
->type
, key
->len
);
134 void ceph_crypto_key_destroy(struct ceph_crypto_key
*key
)
139 crypto_free_skcipher(key
->tfm
);
144 static const u8
*aes_iv
= (u8
*)CEPH_AES_IV
;
147 * Should be used for buffers allocated with ceph_kvmalloc().
148 * Currently these are encrypt out-buffer (ceph_buffer) and decrypt
149 * in-buffer (msg front).
151 * Dispose of @sgt with teardown_sgtable().
153 * @prealloc_sg is to avoid memory allocation inside sg_alloc_table()
154 * in cases where a single sg is sufficient. No attempt to reduce the
155 * number of sgs by squeezing physically contiguous pages together is
156 * made though, for simplicity.
158 static int setup_sgtable(struct sg_table
*sgt
, struct scatterlist
*prealloc_sg
,
159 const void *buf
, unsigned int buf_len
)
161 struct scatterlist
*sg
;
162 const bool is_vmalloc
= is_vmalloc_addr(buf
);
163 unsigned int off
= offset_in_page(buf
);
164 unsigned int chunk_cnt
= 1;
165 unsigned int chunk_len
= PAGE_ALIGN(off
+ buf_len
);
170 memset(sgt
, 0, sizeof(*sgt
));
175 chunk_cnt
= chunk_len
>> PAGE_SHIFT
;
176 chunk_len
= PAGE_SIZE
;
180 ret
= sg_alloc_table(sgt
, chunk_cnt
, GFP_NOFS
);
184 WARN_ON(chunk_cnt
!= 1);
185 sg_init_table(prealloc_sg
, 1);
186 sgt
->sgl
= prealloc_sg
;
187 sgt
->nents
= sgt
->orig_nents
= 1;
190 for_each_sg(sgt
->sgl
, sg
, sgt
->orig_nents
, i
) {
192 unsigned int len
= min(chunk_len
- off
, buf_len
);
195 page
= vmalloc_to_page(buf
);
197 page
= virt_to_page(buf
);
199 sg_set_page(sg
, page
, len
, off
);
205 WARN_ON(buf_len
!= 0);
210 static void teardown_sgtable(struct sg_table
*sgt
)
212 if (sgt
->orig_nents
> 1)
216 static int ceph_aes_crypt(const struct ceph_crypto_key
*key
, bool encrypt
,
217 void *buf
, int buf_len
, int in_len
, int *pout_len
)
219 SKCIPHER_REQUEST_ON_STACK(req
, key
->tfm
);
221 struct scatterlist prealloc_sg
;
222 char iv
[AES_BLOCK_SIZE
] __aligned(8);
223 int pad_byte
= AES_BLOCK_SIZE
- (in_len
& (AES_BLOCK_SIZE
- 1));
224 int crypt_len
= encrypt
? in_len
+ pad_byte
: in_len
;
227 WARN_ON(crypt_len
> buf_len
);
229 memset(buf
+ in_len
, pad_byte
, pad_byte
);
230 ret
= setup_sgtable(&sgt
, &prealloc_sg
, buf
, crypt_len
);
234 memcpy(iv
, aes_iv
, AES_BLOCK_SIZE
);
235 skcipher_request_set_tfm(req
, key
->tfm
);
236 skcipher_request_set_callback(req
, 0, NULL
, NULL
);
237 skcipher_request_set_crypt(req
, sgt
.sgl
, sgt
.sgl
, crypt_len
, iv
);
240 print_hex_dump(KERN_ERR, "key: ", DUMP_PREFIX_NONE, 16, 1,
241 key->key, key->len, 1);
242 print_hex_dump(KERN_ERR, " in: ", DUMP_PREFIX_NONE, 16, 1,
246 ret
= crypto_skcipher_encrypt(req
);
248 ret
= crypto_skcipher_decrypt(req
);
249 skcipher_request_zero(req
);
251 pr_err("%s %scrypt failed: %d\n", __func__
,
252 encrypt
? "en" : "de", ret
);
256 print_hex_dump(KERN_ERR, "out: ", DUMP_PREFIX_NONE, 16, 1,
261 *pout_len
= crypt_len
;
263 pad_byte
= *(char *)(buf
+ in_len
- 1);
264 if (pad_byte
> 0 && pad_byte
<= AES_BLOCK_SIZE
&&
265 in_len
>= pad_byte
) {
266 *pout_len
= in_len
- pad_byte
;
268 pr_err("%s got bad padding %d on in_len %d\n",
269 __func__
, pad_byte
, in_len
);
276 teardown_sgtable(&sgt
);
280 int ceph_crypt(const struct ceph_crypto_key
*key
, bool encrypt
,
281 void *buf
, int buf_len
, int in_len
, int *pout_len
)
284 case CEPH_CRYPTO_NONE
:
287 case CEPH_CRYPTO_AES
:
288 return ceph_aes_crypt(key
, encrypt
, buf
, buf_len
, in_len
,
295 static int ceph_key_preparse(struct key_preparsed_payload
*prep
)
297 struct ceph_crypto_key
*ckey
;
298 size_t datalen
= prep
->datalen
;
303 if (datalen
<= 0 || datalen
> 32767 || !prep
->data
)
307 ckey
= kmalloc(sizeof(*ckey
), GFP_KERNEL
);
311 /* TODO ceph_crypto_key_decode should really take const input */
312 p
= (void *)prep
->data
;
313 ret
= ceph_crypto_key_decode(ckey
, &p
, (char*)prep
->data
+datalen
);
317 prep
->payload
.data
[0] = ckey
;
318 prep
->quotalen
= datalen
;
327 static void ceph_key_free_preparse(struct key_preparsed_payload
*prep
)
329 struct ceph_crypto_key
*ckey
= prep
->payload
.data
[0];
330 ceph_crypto_key_destroy(ckey
);
334 static void ceph_key_destroy(struct key
*key
)
336 struct ceph_crypto_key
*ckey
= key
->payload
.data
[0];
338 ceph_crypto_key_destroy(ckey
);
342 struct key_type key_type_ceph
= {
344 .preparse
= ceph_key_preparse
,
345 .free_preparse
= ceph_key_free_preparse
,
346 .instantiate
= generic_key_instantiate
,
347 .destroy
= ceph_key_destroy
,
350 int __init
ceph_crypto_init(void)
352 return register_key_type(&key_type_ceph
);
355 void ceph_crypto_shutdown(void)
357 unregister_key_type(&key_type_ceph
);