1 // SPDX-License-Identifier: GPL-2.0
3 * NVMe over Fabrics DH-HMAC-CHAP authentication command handling.
4 * Copyright (c) 2020 Hannes Reinecke, SUSE Software Solutions.
7 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
8 #include <linux/blkdev.h>
9 #include <linux/random.h>
10 #include <linux/nvme-auth.h>
11 #include <crypto/hash.h>
12 #include <crypto/kpp.h>
15 static void nvmet_auth_expired_work(struct work_struct
*work
)
17 struct nvmet_sq
*sq
= container_of(to_delayed_work(work
),
18 struct nvmet_sq
, auth_expired_work
);
20 pr_debug("%s: ctrl %d qid %d transaction %u expired, resetting\n",
21 __func__
, sq
->ctrl
->cntlid
, sq
->qid
, sq
->dhchap_tid
);
22 sq
->dhchap_step
= NVME_AUTH_DHCHAP_MESSAGE_NEGOTIATE
;
26 void nvmet_auth_sq_init(struct nvmet_sq
*sq
)
28 /* Initialize in-band authentication */
29 INIT_DELAYED_WORK(&sq
->auth_expired_work
, nvmet_auth_expired_work
);
30 sq
->authenticated
= false;
31 sq
->dhchap_step
= NVME_AUTH_DHCHAP_MESSAGE_NEGOTIATE
;
34 static u8
nvmet_auth_negotiate(struct nvmet_req
*req
, void *d
)
36 struct nvmet_ctrl
*ctrl
= req
->sq
->ctrl
;
37 struct nvmf_auth_dhchap_negotiate_data
*data
= d
;
38 int i
, hash_id
= 0, fallback_hash_id
= 0, dhgid
, fallback_dhgid
;
40 pr_debug("%s: ctrl %d qid %d: data sc_d %d napd %d authid %d halen %d dhlen %d\n",
41 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
42 data
->sc_c
, data
->napd
, data
->auth_protocol
[0].dhchap
.authid
,
43 data
->auth_protocol
[0].dhchap
.halen
,
44 data
->auth_protocol
[0].dhchap
.dhlen
);
45 req
->sq
->dhchap_tid
= le16_to_cpu(data
->t_id
);
47 return NVME_AUTH_DHCHAP_FAILURE_CONCAT_MISMATCH
;
50 return NVME_AUTH_DHCHAP_FAILURE_HASH_UNUSABLE
;
52 if (data
->auth_protocol
[0].dhchap
.authid
!=
53 NVME_AUTH_DHCHAP_AUTH_ID
)
54 return NVME_AUTH_DHCHAP_FAILURE_INCORRECT_PAYLOAD
;
56 for (i
= 0; i
< data
->auth_protocol
[0].dhchap
.halen
; i
++) {
57 u8 host_hmac_id
= data
->auth_protocol
[0].dhchap
.idlist
[i
];
59 if (!fallback_hash_id
&&
60 crypto_has_shash(nvme_auth_hmac_name(host_hmac_id
), 0, 0))
61 fallback_hash_id
= host_hmac_id
;
62 if (ctrl
->shash_id
!= host_hmac_id
)
64 hash_id
= ctrl
->shash_id
;
68 if (fallback_hash_id
== 0) {
69 pr_debug("%s: ctrl %d qid %d: no usable hash found\n",
70 __func__
, ctrl
->cntlid
, req
->sq
->qid
);
71 return NVME_AUTH_DHCHAP_FAILURE_HASH_UNUSABLE
;
73 pr_debug("%s: ctrl %d qid %d: no usable hash found, falling back to %s\n",
74 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
75 nvme_auth_hmac_name(fallback_hash_id
));
76 ctrl
->shash_id
= fallback_hash_id
;
81 for (i
= 0; i
< data
->auth_protocol
[0].dhchap
.dhlen
; i
++) {
82 int tmp_dhgid
= data
->auth_protocol
[0].dhchap
.idlist
[i
+ 30];
84 if (tmp_dhgid
!= ctrl
->dh_gid
) {
88 if (fallback_dhgid
< 0) {
89 const char *kpp
= nvme_auth_dhgroup_kpp(tmp_dhgid
);
91 if (crypto_has_kpp(kpp
, 0, 0))
92 fallback_dhgid
= tmp_dhgid
;
96 if (fallback_dhgid
< 0) {
97 pr_debug("%s: ctrl %d qid %d: no usable DH group found\n",
98 __func__
, ctrl
->cntlid
, req
->sq
->qid
);
99 return NVME_AUTH_DHCHAP_FAILURE_DHGROUP_UNUSABLE
;
101 pr_debug("%s: ctrl %d qid %d: configured DH group %s not found\n",
102 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
103 nvme_auth_dhgroup_name(fallback_dhgid
));
104 ctrl
->dh_gid
= fallback_dhgid
;
106 pr_debug("%s: ctrl %d qid %d: selected DH group %s (%d)\n",
107 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
108 nvme_auth_dhgroup_name(ctrl
->dh_gid
), ctrl
->dh_gid
);
112 static u8
nvmet_auth_reply(struct nvmet_req
*req
, void *d
)
114 struct nvmet_ctrl
*ctrl
= req
->sq
->ctrl
;
115 struct nvmf_auth_dhchap_reply_data
*data
= d
;
116 u16 dhvlen
= le16_to_cpu(data
->dhvlen
);
119 pr_debug("%s: ctrl %d qid %d: data hl %d cvalid %d dhvlen %u\n",
120 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
121 data
->hl
, data
->cvalid
, dhvlen
);
125 return NVME_AUTH_DHCHAP_FAILURE_INCORRECT_PAYLOAD
;
126 if (nvmet_auth_ctrl_sesskey(req
, data
->rval
+ 2 * data
->hl
,
128 return NVME_AUTH_DHCHAP_FAILURE_DHGROUP_UNUSABLE
;
131 response
= kmalloc(data
->hl
, GFP_KERNEL
);
133 return NVME_AUTH_DHCHAP_FAILURE_FAILED
;
135 if (!ctrl
->host_key
) {
136 pr_warn("ctrl %d qid %d no host key\n",
137 ctrl
->cntlid
, req
->sq
->qid
);
139 return NVME_AUTH_DHCHAP_FAILURE_FAILED
;
141 if (nvmet_auth_host_hash(req
, response
, data
->hl
) < 0) {
142 pr_debug("ctrl %d qid %d host hash failed\n",
143 ctrl
->cntlid
, req
->sq
->qid
);
145 return NVME_AUTH_DHCHAP_FAILURE_FAILED
;
148 if (memcmp(data
->rval
, response
, data
->hl
)) {
149 pr_info("ctrl %d qid %d host response mismatch\n",
150 ctrl
->cntlid
, req
->sq
->qid
);
152 return NVME_AUTH_DHCHAP_FAILURE_FAILED
;
155 pr_debug("%s: ctrl %d qid %d host authenticated\n",
156 __func__
, ctrl
->cntlid
, req
->sq
->qid
);
158 req
->sq
->dhchap_c2
= kmemdup(data
->rval
+ data
->hl
, data
->hl
,
160 if (!req
->sq
->dhchap_c2
)
161 return NVME_AUTH_DHCHAP_FAILURE_FAILED
;
163 pr_debug("%s: ctrl %d qid %d challenge %*ph\n",
164 __func__
, ctrl
->cntlid
, req
->sq
->qid
, data
->hl
,
167 req
->sq
->authenticated
= true;
168 req
->sq
->dhchap_c2
= NULL
;
170 req
->sq
->dhchap_s2
= le32_to_cpu(data
->seqnum
);
175 static u8
nvmet_auth_failure2(void *d
)
177 struct nvmf_auth_dhchap_failure_data
*data
= d
;
179 return data
->rescode_exp
;
182 void nvmet_execute_auth_send(struct nvmet_req
*req
)
184 struct nvmet_ctrl
*ctrl
= req
->sq
->ctrl
;
185 struct nvmf_auth_dhchap_success2_data
*data
;
191 if (req
->cmd
->auth_send
.secp
!= NVME_AUTH_DHCHAP_PROTOCOL_IDENTIFIER
) {
192 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
194 offsetof(struct nvmf_auth_send_command
, secp
);
197 if (req
->cmd
->auth_send
.spsp0
!= 0x01) {
198 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
200 offsetof(struct nvmf_auth_send_command
, spsp0
);
203 if (req
->cmd
->auth_send
.spsp1
!= 0x01) {
204 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
206 offsetof(struct nvmf_auth_send_command
, spsp1
);
209 tl
= le32_to_cpu(req
->cmd
->auth_send
.tl
);
211 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
213 offsetof(struct nvmf_auth_send_command
, tl
);
216 if (!nvmet_check_transfer_len(req
, tl
)) {
217 pr_debug("%s: transfer length mismatch (%u)\n", __func__
, tl
);
221 d
= kmalloc(tl
, GFP_KERNEL
);
223 status
= NVME_SC_INTERNAL
;
227 status
= nvmet_copy_from_sgl(req
, 0, d
, tl
);
232 pr_debug("%s: ctrl %d qid %d type %d id %d step %x\n", __func__
,
233 ctrl
->cntlid
, req
->sq
->qid
, data
->auth_type
, data
->auth_id
,
234 req
->sq
->dhchap_step
);
235 if (data
->auth_type
!= NVME_AUTH_COMMON_MESSAGES
&&
236 data
->auth_type
!= NVME_AUTH_DHCHAP_MESSAGES
)
238 if (data
->auth_type
== NVME_AUTH_COMMON_MESSAGES
) {
239 if (data
->auth_id
== NVME_AUTH_DHCHAP_MESSAGE_NEGOTIATE
) {
240 /* Restart negotiation */
241 pr_debug("%s: ctrl %d qid %d reset negotiation\n",
242 __func__
, ctrl
->cntlid
, req
->sq
->qid
);
244 dhchap_status
= nvmet_setup_auth(ctrl
);
246 pr_err("ctrl %d qid 0 failed to setup re-authentication\n",
248 req
->sq
->dhchap_status
= dhchap_status
;
249 req
->sq
->dhchap_step
=
250 NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
;
254 req
->sq
->dhchap_step
=
255 NVME_AUTH_DHCHAP_MESSAGE_NEGOTIATE
;
256 } else if (data
->auth_id
!= req
->sq
->dhchap_step
)
258 /* Validate negotiation parameters */
259 dhchap_status
= nvmet_auth_negotiate(req
, d
);
260 if (dhchap_status
== 0)
261 req
->sq
->dhchap_step
=
262 NVME_AUTH_DHCHAP_MESSAGE_CHALLENGE
;
264 req
->sq
->dhchap_step
=
265 NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
;
266 req
->sq
->dhchap_status
= dhchap_status
;
270 if (data
->auth_id
!= req
->sq
->dhchap_step
) {
271 pr_debug("%s: ctrl %d qid %d step mismatch (%d != %d)\n",
272 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
273 data
->auth_id
, req
->sq
->dhchap_step
);
276 if (le16_to_cpu(data
->t_id
) != req
->sq
->dhchap_tid
) {
277 pr_debug("%s: ctrl %d qid %d invalid transaction %d (expected %d)\n",
278 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
279 le16_to_cpu(data
->t_id
),
280 req
->sq
->dhchap_tid
);
281 req
->sq
->dhchap_step
=
282 NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
;
283 req
->sq
->dhchap_status
=
284 NVME_AUTH_DHCHAP_FAILURE_INCORRECT_PAYLOAD
;
288 switch (data
->auth_id
) {
289 case NVME_AUTH_DHCHAP_MESSAGE_REPLY
:
290 dhchap_status
= nvmet_auth_reply(req
, d
);
291 if (dhchap_status
== 0)
292 req
->sq
->dhchap_step
=
293 NVME_AUTH_DHCHAP_MESSAGE_SUCCESS1
;
295 req
->sq
->dhchap_step
=
296 NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
;
297 req
->sq
->dhchap_status
= dhchap_status
;
300 case NVME_AUTH_DHCHAP_MESSAGE_SUCCESS2
:
301 req
->sq
->authenticated
= true;
302 pr_debug("%s: ctrl %d qid %d ctrl authenticated\n",
303 __func__
, ctrl
->cntlid
, req
->sq
->qid
);
305 case NVME_AUTH_DHCHAP_MESSAGE_FAILURE2
:
306 dhchap_status
= nvmet_auth_failure2(d
);
308 pr_warn("ctrl %d qid %d: authentication failed (%d)\n",
309 ctrl
->cntlid
, req
->sq
->qid
, dhchap_status
);
310 req
->sq
->dhchap_status
= dhchap_status
;
311 req
->sq
->authenticated
= false;
315 req
->sq
->dhchap_status
=
316 NVME_AUTH_DHCHAP_FAILURE_INCORRECT_MESSAGE
;
317 req
->sq
->dhchap_step
=
318 NVME_AUTH_DHCHAP_MESSAGE_FAILURE2
;
319 req
->sq
->authenticated
= false;
323 req
->sq
->dhchap_status
= NVME_AUTH_DHCHAP_FAILURE_INCORRECT_MESSAGE
;
324 req
->sq
->dhchap_step
= NVME_AUTH_DHCHAP_MESSAGE_FAILURE2
;
329 pr_debug("%s: ctrl %d qid %d dhchap status %x step %x\n", __func__
,
330 ctrl
->cntlid
, req
->sq
->qid
,
331 req
->sq
->dhchap_status
, req
->sq
->dhchap_step
);
333 pr_debug("%s: ctrl %d qid %d nvme status %x error loc %d\n",
334 __func__
, ctrl
->cntlid
, req
->sq
->qid
,
335 status
, req
->error_loc
);
336 if (req
->sq
->dhchap_step
!= NVME_AUTH_DHCHAP_MESSAGE_SUCCESS2
&&
337 req
->sq
->dhchap_step
!= NVME_AUTH_DHCHAP_MESSAGE_FAILURE2
) {
338 unsigned long auth_expire_secs
= ctrl
->kato
? ctrl
->kato
: 120;
340 mod_delayed_work(system_wq
, &req
->sq
->auth_expired_work
,
341 auth_expire_secs
* HZ
);
344 /* Final states, clear up variables */
345 nvmet_auth_sq_free(req
->sq
);
346 if (req
->sq
->dhchap_step
== NVME_AUTH_DHCHAP_MESSAGE_FAILURE2
)
347 nvmet_ctrl_fatal_error(ctrl
);
350 nvmet_req_complete(req
, status
);
353 static int nvmet_auth_challenge(struct nvmet_req
*req
, void *d
, int al
)
355 struct nvmf_auth_dhchap_challenge_data
*data
= d
;
356 struct nvmet_ctrl
*ctrl
= req
->sq
->ctrl
;
358 int hash_len
= nvme_auth_hmac_hash_len(ctrl
->shash_id
);
359 int data_size
= sizeof(*d
) + hash_len
;
362 data_size
+= ctrl
->dh_keysize
;
363 if (al
< data_size
) {
364 pr_debug("%s: buffer too small (al %d need %d)\n", __func__
,
368 memset(data
, 0, data_size
);
369 req
->sq
->dhchap_s1
= nvme_auth_get_seqnum();
370 data
->auth_type
= NVME_AUTH_DHCHAP_MESSAGES
;
371 data
->auth_id
= NVME_AUTH_DHCHAP_MESSAGE_CHALLENGE
;
372 data
->t_id
= cpu_to_le16(req
->sq
->dhchap_tid
);
373 data
->hashid
= ctrl
->shash_id
;
375 data
->seqnum
= cpu_to_le32(req
->sq
->dhchap_s1
);
376 req
->sq
->dhchap_c1
= kmalloc(data
->hl
, GFP_KERNEL
);
377 if (!req
->sq
->dhchap_c1
)
379 get_random_bytes(req
->sq
->dhchap_c1
, data
->hl
);
380 memcpy(data
->cval
, req
->sq
->dhchap_c1
, data
->hl
);
382 data
->dhgid
= ctrl
->dh_gid
;
383 data
->dhvlen
= cpu_to_le16(ctrl
->dh_keysize
);
384 ret
= nvmet_auth_ctrl_exponential(req
, data
->cval
+ data
->hl
,
387 pr_debug("%s: ctrl %d qid %d seq %d transaction %d hl %d dhvlen %zu\n",
388 __func__
, ctrl
->cntlid
, req
->sq
->qid
, req
->sq
->dhchap_s1
,
389 req
->sq
->dhchap_tid
, data
->hl
, ctrl
->dh_keysize
);
393 static int nvmet_auth_success1(struct nvmet_req
*req
, void *d
, int al
)
395 struct nvmf_auth_dhchap_success1_data
*data
= d
;
396 struct nvmet_ctrl
*ctrl
= req
->sq
->ctrl
;
397 int hash_len
= nvme_auth_hmac_hash_len(ctrl
->shash_id
);
399 WARN_ON(al
< sizeof(*data
));
400 memset(data
, 0, sizeof(*data
));
401 data
->auth_type
= NVME_AUTH_DHCHAP_MESSAGES
;
402 data
->auth_id
= NVME_AUTH_DHCHAP_MESSAGE_SUCCESS1
;
403 data
->t_id
= cpu_to_le16(req
->sq
->dhchap_tid
);
405 if (req
->sq
->dhchap_c2
) {
406 if (!ctrl
->ctrl_key
) {
407 pr_warn("ctrl %d qid %d no ctrl key\n",
408 ctrl
->cntlid
, req
->sq
->qid
);
409 return NVME_AUTH_DHCHAP_FAILURE_FAILED
;
411 if (nvmet_auth_ctrl_hash(req
, data
->rval
, data
->hl
))
412 return NVME_AUTH_DHCHAP_FAILURE_HASH_UNUSABLE
;
414 pr_debug("ctrl %d qid %d response %*ph\n",
415 ctrl
->cntlid
, req
->sq
->qid
, data
->hl
, data
->rval
);
420 static void nvmet_auth_failure1(struct nvmet_req
*req
, void *d
, int al
)
422 struct nvmf_auth_dhchap_failure_data
*data
= d
;
424 WARN_ON(al
< sizeof(*data
));
425 data
->auth_type
= NVME_AUTH_COMMON_MESSAGES
;
426 data
->auth_id
= NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
;
427 data
->t_id
= cpu_to_le16(req
->sq
->dhchap_tid
);
428 data
->rescode
= NVME_AUTH_DHCHAP_FAILURE_REASON_FAILED
;
429 data
->rescode_exp
= req
->sq
->dhchap_status
;
432 void nvmet_execute_auth_receive(struct nvmet_req
*req
)
434 struct nvmet_ctrl
*ctrl
= req
->sq
->ctrl
;
439 if (req
->cmd
->auth_receive
.secp
!= NVME_AUTH_DHCHAP_PROTOCOL_IDENTIFIER
) {
440 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
442 offsetof(struct nvmf_auth_receive_command
, secp
);
445 if (req
->cmd
->auth_receive
.spsp0
!= 0x01) {
446 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
448 offsetof(struct nvmf_auth_receive_command
, spsp0
);
451 if (req
->cmd
->auth_receive
.spsp1
!= 0x01) {
452 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
454 offsetof(struct nvmf_auth_receive_command
, spsp1
);
457 al
= le32_to_cpu(req
->cmd
->auth_receive
.al
);
459 status
= NVME_SC_INVALID_FIELD
| NVME_STATUS_DNR
;
461 offsetof(struct nvmf_auth_receive_command
, al
);
464 if (!nvmet_check_transfer_len(req
, al
)) {
465 pr_debug("%s: transfer length mismatch (%u)\n", __func__
, al
);
469 d
= kmalloc(al
, GFP_KERNEL
);
471 status
= NVME_SC_INTERNAL
;
474 pr_debug("%s: ctrl %d qid %d step %x\n", __func__
,
475 ctrl
->cntlid
, req
->sq
->qid
, req
->sq
->dhchap_step
);
476 switch (req
->sq
->dhchap_step
) {
477 case NVME_AUTH_DHCHAP_MESSAGE_CHALLENGE
:
478 if (nvmet_auth_challenge(req
, d
, al
) < 0) {
479 pr_warn("ctrl %d qid %d: challenge error (%d)\n",
480 ctrl
->cntlid
, req
->sq
->qid
, status
);
481 status
= NVME_SC_INTERNAL
;
484 req
->sq
->dhchap_step
= NVME_AUTH_DHCHAP_MESSAGE_REPLY
;
486 case NVME_AUTH_DHCHAP_MESSAGE_SUCCESS1
:
487 status
= nvmet_auth_success1(req
, d
, al
);
489 req
->sq
->dhchap_status
= status
;
490 req
->sq
->authenticated
= false;
491 nvmet_auth_failure1(req
, d
, al
);
492 pr_warn("ctrl %d qid %d: success1 status (%x)\n",
493 ctrl
->cntlid
, req
->sq
->qid
,
494 req
->sq
->dhchap_status
);
497 req
->sq
->dhchap_step
= NVME_AUTH_DHCHAP_MESSAGE_SUCCESS2
;
499 case NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
:
500 req
->sq
->authenticated
= false;
501 nvmet_auth_failure1(req
, d
, al
);
502 pr_warn("ctrl %d qid %d failure1 (%x)\n",
503 ctrl
->cntlid
, req
->sq
->qid
, req
->sq
->dhchap_status
);
506 pr_warn("ctrl %d qid %d unhandled step (%d)\n",
507 ctrl
->cntlid
, req
->sq
->qid
, req
->sq
->dhchap_step
);
508 req
->sq
->dhchap_step
= NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
;
509 req
->sq
->dhchap_status
= NVME_AUTH_DHCHAP_FAILURE_FAILED
;
510 nvmet_auth_failure1(req
, d
, al
);
515 status
= nvmet_copy_to_sgl(req
, 0, d
, al
);
518 if (req
->sq
->dhchap_step
== NVME_AUTH_DHCHAP_MESSAGE_SUCCESS2
)
519 nvmet_auth_sq_free(req
->sq
);
520 else if (req
->sq
->dhchap_step
== NVME_AUTH_DHCHAP_MESSAGE_FAILURE1
) {
521 nvmet_auth_sq_free(req
->sq
);
522 nvmet_ctrl_fatal_error(ctrl
);
524 nvmet_req_complete(req
, status
);