1 // SPDX-License-Identifier: GPL-2.0
3 #include <linux/ceph/ceph_debug.h>
6 #include <linux/scatterlist.h>
7 #include <linux/sched.h>
8 #include <linux/slab.h>
9 #include <crypto/aes.h>
10 #include <crypto/skcipher.h>
11 #include <linux/key-type.h>
12 #include <linux/sched/mm.h>
14 #include <keys/ceph-type.h>
15 #include <keys/user-type.h>
16 #include <linux/ceph/decode.h>
20 * Set ->key and ->tfm. The rest of the key should be filled in before
21 * this function is called.
23 static int set_secret(struct ceph_crypto_key
*key
, void *buf
)
25 unsigned int noio_flag
;
32 case CEPH_CRYPTO_NONE
:
33 return 0; /* nothing to do */
43 key
->key
= kmemdup(buf
, key
->len
, GFP_NOIO
);
49 /* crypto_alloc_sync_skcipher() allocates with GFP_KERNEL */
50 noio_flag
= memalloc_noio_save();
51 key
->tfm
= crypto_alloc_sync_skcipher("cbc(aes)", 0, 0);
52 memalloc_noio_restore(noio_flag
);
53 if (IS_ERR(key
->tfm
)) {
54 ret
= PTR_ERR(key
->tfm
);
59 ret
= crypto_sync_skcipher_setkey(key
->tfm
, key
->key
, key
->len
);
66 ceph_crypto_key_destroy(key
);
70 int ceph_crypto_key_clone(struct ceph_crypto_key
*dst
,
71 const struct ceph_crypto_key
*src
)
73 memcpy(dst
, src
, sizeof(struct ceph_crypto_key
));
74 return set_secret(dst
, src
->key
);
77 int ceph_crypto_key_encode(struct ceph_crypto_key
*key
, void **p
, void *end
)
79 if (*p
+ sizeof(u16
) + sizeof(key
->created
) +
80 sizeof(u16
) + key
->len
> end
)
82 ceph_encode_16(p
, key
->type
);
83 ceph_encode_copy(p
, &key
->created
, sizeof(key
->created
));
84 ceph_encode_16(p
, key
->len
);
85 ceph_encode_copy(p
, key
->key
, key
->len
);
89 int ceph_crypto_key_decode(struct ceph_crypto_key
*key
, void **p
, void *end
)
93 ceph_decode_need(p
, end
, 2*sizeof(u16
) + sizeof(key
->created
), bad
);
94 key
->type
= ceph_decode_16(p
);
95 ceph_decode_copy(p
, &key
->created
, sizeof(key
->created
));
96 key
->len
= ceph_decode_16(p
);
97 ceph_decode_need(p
, end
, key
->len
, bad
);
98 ret
= set_secret(key
, *p
);
103 dout("failed to decode crypto key\n");
107 int ceph_crypto_key_unarmor(struct ceph_crypto_key
*key
, const char *inkey
)
109 int inlen
= strlen(inkey
);
110 int blen
= inlen
* 3 / 4;
114 dout("crypto_key_unarmor %s\n", inkey
);
115 buf
= kmalloc(blen
, GFP_NOFS
);
118 blen
= ceph_unarmor(buf
, inkey
, inkey
+inlen
);
125 ret
= ceph_crypto_key_decode(key
, &p
, p
+ blen
);
129 dout("crypto_key_unarmor key %p type %d len %d\n", key
,
130 key
->type
, key
->len
);
134 void ceph_crypto_key_destroy(struct ceph_crypto_key
*key
)
140 crypto_free_sync_skcipher(key
->tfm
);
146 static const u8
*aes_iv
= (u8
*)CEPH_AES_IV
;
149 * Should be used for buffers allocated with ceph_kvmalloc().
150 * Currently these are encrypt out-buffer (ceph_buffer) and decrypt
151 * in-buffer (msg front).
153 * Dispose of @sgt with teardown_sgtable().
155 * @prealloc_sg is to avoid memory allocation inside sg_alloc_table()
156 * in cases where a single sg is sufficient. No attempt to reduce the
157 * number of sgs by squeezing physically contiguous pages together is
158 * made though, for simplicity.
160 static int setup_sgtable(struct sg_table
*sgt
, struct scatterlist
*prealloc_sg
,
161 const void *buf
, unsigned int buf_len
)
163 struct scatterlist
*sg
;
164 const bool is_vmalloc
= is_vmalloc_addr(buf
);
165 unsigned int off
= offset_in_page(buf
);
166 unsigned int chunk_cnt
= 1;
167 unsigned int chunk_len
= PAGE_ALIGN(off
+ buf_len
);
172 memset(sgt
, 0, sizeof(*sgt
));
177 chunk_cnt
= chunk_len
>> PAGE_SHIFT
;
178 chunk_len
= PAGE_SIZE
;
182 ret
= sg_alloc_table(sgt
, chunk_cnt
, GFP_NOFS
);
186 WARN_ON(chunk_cnt
!= 1);
187 sg_init_table(prealloc_sg
, 1);
188 sgt
->sgl
= prealloc_sg
;
189 sgt
->nents
= sgt
->orig_nents
= 1;
192 for_each_sg(sgt
->sgl
, sg
, sgt
->orig_nents
, i
) {
194 unsigned int len
= min(chunk_len
- off
, buf_len
);
197 page
= vmalloc_to_page(buf
);
199 page
= virt_to_page(buf
);
201 sg_set_page(sg
, page
, len
, off
);
207 WARN_ON(buf_len
!= 0);
212 static void teardown_sgtable(struct sg_table
*sgt
)
214 if (sgt
->orig_nents
> 1)
218 static int ceph_aes_crypt(const struct ceph_crypto_key
*key
, bool encrypt
,
219 void *buf
, int buf_len
, int in_len
, int *pout_len
)
221 SYNC_SKCIPHER_REQUEST_ON_STACK(req
, key
->tfm
);
223 struct scatterlist prealloc_sg
;
224 char iv
[AES_BLOCK_SIZE
] __aligned(8);
225 int pad_byte
= AES_BLOCK_SIZE
- (in_len
& (AES_BLOCK_SIZE
- 1));
226 int crypt_len
= encrypt
? in_len
+ pad_byte
: in_len
;
229 WARN_ON(crypt_len
> buf_len
);
231 memset(buf
+ in_len
, pad_byte
, pad_byte
);
232 ret
= setup_sgtable(&sgt
, &prealloc_sg
, buf
, crypt_len
);
236 memcpy(iv
, aes_iv
, AES_BLOCK_SIZE
);
237 skcipher_request_set_sync_tfm(req
, key
->tfm
);
238 skcipher_request_set_callback(req
, 0, NULL
, NULL
);
239 skcipher_request_set_crypt(req
, sgt
.sgl
, sgt
.sgl
, crypt_len
, iv
);
242 print_hex_dump(KERN_ERR, "key: ", DUMP_PREFIX_NONE, 16, 1,
243 key->key, key->len, 1);
244 print_hex_dump(KERN_ERR, " in: ", DUMP_PREFIX_NONE, 16, 1,
248 ret
= crypto_skcipher_encrypt(req
);
250 ret
= crypto_skcipher_decrypt(req
);
251 skcipher_request_zero(req
);
253 pr_err("%s %scrypt failed: %d\n", __func__
,
254 encrypt
? "en" : "de", ret
);
258 print_hex_dump(KERN_ERR, "out: ", DUMP_PREFIX_NONE, 16, 1,
263 *pout_len
= crypt_len
;
265 pad_byte
= *(char *)(buf
+ in_len
- 1);
266 if (pad_byte
> 0 && pad_byte
<= AES_BLOCK_SIZE
&&
267 in_len
>= pad_byte
) {
268 *pout_len
= in_len
- pad_byte
;
270 pr_err("%s got bad padding %d on in_len %d\n",
271 __func__
, pad_byte
, in_len
);
278 teardown_sgtable(&sgt
);
282 int ceph_crypt(const struct ceph_crypto_key
*key
, bool encrypt
,
283 void *buf
, int buf_len
, int in_len
, int *pout_len
)
286 case CEPH_CRYPTO_NONE
:
289 case CEPH_CRYPTO_AES
:
290 return ceph_aes_crypt(key
, encrypt
, buf
, buf_len
, in_len
,
297 static int ceph_key_preparse(struct key_preparsed_payload
*prep
)
299 struct ceph_crypto_key
*ckey
;
300 size_t datalen
= prep
->datalen
;
305 if (datalen
<= 0 || datalen
> 32767 || !prep
->data
)
309 ckey
= kmalloc(sizeof(*ckey
), GFP_KERNEL
);
313 /* TODO ceph_crypto_key_decode should really take const input */
314 p
= (void *)prep
->data
;
315 ret
= ceph_crypto_key_decode(ckey
, &p
, (char*)prep
->data
+datalen
);
319 prep
->payload
.data
[0] = ckey
;
320 prep
->quotalen
= datalen
;
329 static void ceph_key_free_preparse(struct key_preparsed_payload
*prep
)
331 struct ceph_crypto_key
*ckey
= prep
->payload
.data
[0];
332 ceph_crypto_key_destroy(ckey
);
336 static void ceph_key_destroy(struct key
*key
)
338 struct ceph_crypto_key
*ckey
= key
->payload
.data
[0];
340 ceph_crypto_key_destroy(ckey
);
344 struct key_type key_type_ceph
= {
346 .preparse
= ceph_key_preparse
,
347 .free_preparse
= ceph_key_free_preparse
,
348 .instantiate
= generic_key_instantiate
,
349 .destroy
= ceph_key_destroy
,
352 int __init
ceph_crypto_init(void)
354 return register_key_type(&key_type_ceph
);
357 void ceph_crypto_shutdown(void)
359 unregister_key_type(&key_type_ceph
);