2 * This file is part of the Chelsio T6 Crypto driver for Linux.
4 * Copyright (c) 2003-2016 Chelsio Communications, Inc. All rights reserved.
6 * This software is available to you under a choice of one of two
7 * licenses. You may choose to be licensed under the terms of the GNU
8 * General Public License (GPL) Version 2, available from the file
9 * COPYING in the main directory of this source tree, or the
10 * OpenIB.org BSD license below:
12 * Redistribution and use in source and binary forms, with or
13 * without modification, are permitted provided that the following
16 * - Redistributions of source code must retain the above
17 * copyright notice, this list of conditions and the following
20 * - Redistributions in binary form must reproduce the above
21 * copyright notice, this list of conditions and the following
22 * disclaimer in the documentation and/or other materials
23 * provided with the distribution.
25 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
26 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
27 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
28 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
29 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
30 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
31 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
34 * Written and Maintained by:
35 * Manoj Malviya (manojmalviya@chelsio.com)
36 * Atul Gupta (atul.gupta@chelsio.com)
37 * Jitendra Lulla (jlulla@chelsio.com)
38 * Yeshaswi M R Gowda (yeshaswi@chelsio.com)
39 * Harsh Jain (harsh@chelsio.com)
42 #define pr_fmt(fmt) "chcr:" fmt
44 #include <linux/kernel.h>
45 #include <linux/module.h>
46 #include <linux/crypto.h>
47 #include <linux/cryptohash.h>
48 #include <linux/skbuff.h>
49 #include <linux/rtnetlink.h>
50 #include <linux/highmem.h>
51 #include <linux/scatterlist.h>
53 #include <crypto/aes.h>
54 #include <crypto/algapi.h>
55 #include <crypto/hash.h>
56 #include <crypto/sha.h>
57 #include <crypto/authenc.h>
58 #include <crypto/internal/aead.h>
59 #include <crypto/null.h>
60 #include <crypto/internal/skcipher.h>
61 #include <crypto/aead.h>
62 #include <crypto/scatterwalk.h>
63 #include <crypto/internal/hash.h>
67 #include "chcr_core.h"
68 #include "chcr_algo.h"
69 #include "chcr_crypto.h"
71 static inline struct chcr_aead_ctx
*AEAD_CTX(struct chcr_context
*ctx
)
73 return ctx
->crypto_ctx
->aeadctx
;
76 static inline struct ablk_ctx
*ABLK_CTX(struct chcr_context
*ctx
)
78 return ctx
->crypto_ctx
->ablkctx
;
81 static inline struct hmac_ctx
*HMAC_CTX(struct chcr_context
*ctx
)
83 return ctx
->crypto_ctx
->hmacctx
;
86 static inline struct chcr_gcm_ctx
*GCM_CTX(struct chcr_aead_ctx
*gctx
)
88 return gctx
->ctx
->gcm
;
91 static inline struct chcr_authenc_ctx
*AUTHENC_CTX(struct chcr_aead_ctx
*gctx
)
93 return gctx
->ctx
->authenc
;
96 static inline struct uld_ctx
*ULD_CTX(struct chcr_context
*ctx
)
98 return ctx
->dev
->u_ctx
;
101 static inline int is_ofld_imm(const struct sk_buff
*skb
)
103 return (skb
->len
<= CRYPTO_MAX_IMM_TX_PKT_LEN
);
107 * sgl_len - calculates the size of an SGL of the given capacity
108 * @n: the number of SGL entries
109 * Calculates the number of flits needed for a scatter/gather list that
110 * can hold the given number of entries.
112 static inline unsigned int sgl_len(unsigned int n
)
115 return (3 * n
) / 2 + (n
& 1) + 2;
118 static void chcr_verify_tag(struct aead_request
*req
, u8
*input
, int *err
)
120 u8 temp
[SHA512_DIGEST_SIZE
];
121 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
122 int authsize
= crypto_aead_authsize(tfm
);
123 struct cpl_fw6_pld
*fw6_pld
;
126 fw6_pld
= (struct cpl_fw6_pld
*)input
;
127 if ((get_aead_subtype(tfm
) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
) ||
128 (get_aead_subtype(tfm
) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM
)) {
129 cmp
= memcmp(&fw6_pld
->data
[2], (fw6_pld
+ 1), authsize
);
132 sg_pcopy_to_buffer(req
->src
, sg_nents(req
->src
), temp
,
133 authsize
, req
->assoclen
+
134 req
->cryptlen
- authsize
);
135 cmp
= memcmp(temp
, (fw6_pld
+ 1), authsize
);
144 * chcr_handle_resp - Unmap the DMA buffers associated with the request
145 * @req: crypto request
147 int chcr_handle_resp(struct crypto_async_request
*req
, unsigned char *input
,
150 struct crypto_tfm
*tfm
= req
->tfm
;
151 struct chcr_context
*ctx
= crypto_tfm_ctx(tfm
);
152 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
153 struct chcr_req_ctx ctx_req
;
154 struct cpl_fw6_pld
*fw6_pld
;
155 unsigned int digestsize
, updated_digestsize
;
157 switch (tfm
->__crt_alg
->cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
158 case CRYPTO_ALG_TYPE_AEAD
:
159 ctx_req
.req
.aead_req
= (struct aead_request
*)req
;
160 ctx_req
.ctx
.reqctx
= aead_request_ctx(ctx_req
.req
.aead_req
);
161 dma_unmap_sg(&u_ctx
->lldi
.pdev
->dev
, ctx_req
.req
.aead_req
->dst
,
162 ctx_req
.ctx
.reqctx
->dst_nents
, DMA_FROM_DEVICE
);
163 if (ctx_req
.ctx
.reqctx
->skb
) {
164 kfree_skb(ctx_req
.ctx
.reqctx
->skb
);
165 ctx_req
.ctx
.reqctx
->skb
= NULL
;
167 if (ctx_req
.ctx
.reqctx
->verify
== VERIFY_SW
) {
168 chcr_verify_tag(ctx_req
.req
.aead_req
, input
,
170 ctx_req
.ctx
.reqctx
->verify
= VERIFY_HW
;
174 case CRYPTO_ALG_TYPE_BLKCIPHER
:
175 ctx_req
.req
.ablk_req
= (struct ablkcipher_request
*)req
;
176 ctx_req
.ctx
.ablk_ctx
=
177 ablkcipher_request_ctx(ctx_req
.req
.ablk_req
);
179 fw6_pld
= (struct cpl_fw6_pld
*)input
;
180 memcpy(ctx_req
.req
.ablk_req
->info
, &fw6_pld
->data
[2],
183 dma_unmap_sg(&u_ctx
->lldi
.pdev
->dev
, ctx_req
.req
.ablk_req
->dst
,
184 ctx_req
.ctx
.ablk_ctx
->dst_nents
, DMA_FROM_DEVICE
);
185 if (ctx_req
.ctx
.ablk_ctx
->skb
) {
186 kfree_skb(ctx_req
.ctx
.ablk_ctx
->skb
);
187 ctx_req
.ctx
.ablk_ctx
->skb
= NULL
;
191 case CRYPTO_ALG_TYPE_AHASH
:
192 ctx_req
.req
.ahash_req
= (struct ahash_request
*)req
;
193 ctx_req
.ctx
.ahash_ctx
=
194 ahash_request_ctx(ctx_req
.req
.ahash_req
);
196 crypto_ahash_digestsize(crypto_ahash_reqtfm(
197 ctx_req
.req
.ahash_req
));
198 updated_digestsize
= digestsize
;
199 if (digestsize
== SHA224_DIGEST_SIZE
)
200 updated_digestsize
= SHA256_DIGEST_SIZE
;
201 else if (digestsize
== SHA384_DIGEST_SIZE
)
202 updated_digestsize
= SHA512_DIGEST_SIZE
;
203 if (ctx_req
.ctx
.ahash_ctx
->skb
) {
204 kfree_skb(ctx_req
.ctx
.ahash_ctx
->skb
);
205 ctx_req
.ctx
.ahash_ctx
->skb
= NULL
;
207 if (ctx_req
.ctx
.ahash_ctx
->result
== 1) {
208 ctx_req
.ctx
.ahash_ctx
->result
= 0;
209 memcpy(ctx_req
.req
.ahash_req
->result
, input
+
210 sizeof(struct cpl_fw6_pld
),
213 memcpy(ctx_req
.ctx
.ahash_ctx
->partial_hash
, input
+
214 sizeof(struct cpl_fw6_pld
),
223 * calc_tx_flits_ofld - calculate # of flits for an offload packet
225 * Returns the number of flits needed for the given offload packet.
226 * These packets are already fully constructed and no additional headers
229 static inline unsigned int calc_tx_flits_ofld(const struct sk_buff
*skb
)
231 unsigned int flits
, cnt
;
233 if (is_ofld_imm(skb
))
234 return DIV_ROUND_UP(skb
->len
, 8);
236 flits
= skb_transport_offset(skb
) / 8; /* headers */
237 cnt
= skb_shinfo(skb
)->nr_frags
;
238 if (skb_tail_pointer(skb
) != skb_transport_header(skb
))
240 return flits
+ sgl_len(cnt
);
243 static inline void get_aes_decrypt_key(unsigned char *dec_key
,
244 const unsigned char *key
,
245 unsigned int keylength
)
253 case AES_KEYLENGTH_128BIT
:
254 nk
= KEYLENGTH_4BYTES
;
255 nr
= NUMBER_OF_ROUNDS_10
;
257 case AES_KEYLENGTH_192BIT
:
258 nk
= KEYLENGTH_6BYTES
;
259 nr
= NUMBER_OF_ROUNDS_12
;
261 case AES_KEYLENGTH_256BIT
:
262 nk
= KEYLENGTH_8BYTES
;
263 nr
= NUMBER_OF_ROUNDS_14
;
268 for (i
= 0; i
< nk
; i
++)
269 w_ring
[i
] = be32_to_cpu(*(u32
*)&key
[4 * i
]);
272 temp
= w_ring
[nk
- 1];
273 while (i
+ nk
< (nr
+ 1) * 4) {
276 temp
= (temp
<< 8) | (temp
>> 24);
277 temp
= aes_ks_subword(temp
);
278 temp
^= round_constant
[i
/ nk
];
279 } else if (nk
== 8 && (i
% 4 == 0)) {
280 temp
= aes_ks_subword(temp
);
282 w_ring
[i
% nk
] ^= temp
;
283 temp
= w_ring
[i
% nk
];
287 for (k
= 0, j
= i
% nk
; k
< nk
; k
++) {
288 *((u32
*)dec_key
+ k
) = htonl(w_ring
[j
]);
295 static struct crypto_shash
*chcr_alloc_shash(unsigned int ds
)
297 struct crypto_shash
*base_hash
= NULL
;
300 case SHA1_DIGEST_SIZE
:
301 base_hash
= crypto_alloc_shash("sha1", 0, 0);
303 case SHA224_DIGEST_SIZE
:
304 base_hash
= crypto_alloc_shash("sha224", 0, 0);
306 case SHA256_DIGEST_SIZE
:
307 base_hash
= crypto_alloc_shash("sha256", 0, 0);
309 case SHA384_DIGEST_SIZE
:
310 base_hash
= crypto_alloc_shash("sha384", 0, 0);
312 case SHA512_DIGEST_SIZE
:
313 base_hash
= crypto_alloc_shash("sha512", 0, 0);
320 static int chcr_compute_partial_hash(struct shash_desc
*desc
,
321 char *iopad
, char *result_hash
,
324 struct sha1_state sha1_st
;
325 struct sha256_state sha256_st
;
326 struct sha512_state sha512_st
;
329 if (digest_size
== SHA1_DIGEST_SIZE
) {
330 error
= crypto_shash_init(desc
) ?:
331 crypto_shash_update(desc
, iopad
, SHA1_BLOCK_SIZE
) ?:
332 crypto_shash_export(desc
, (void *)&sha1_st
);
333 memcpy(result_hash
, sha1_st
.state
, SHA1_DIGEST_SIZE
);
334 } else if (digest_size
== SHA224_DIGEST_SIZE
) {
335 error
= crypto_shash_init(desc
) ?:
336 crypto_shash_update(desc
, iopad
, SHA256_BLOCK_SIZE
) ?:
337 crypto_shash_export(desc
, (void *)&sha256_st
);
338 memcpy(result_hash
, sha256_st
.state
, SHA256_DIGEST_SIZE
);
340 } else if (digest_size
== SHA256_DIGEST_SIZE
) {
341 error
= crypto_shash_init(desc
) ?:
342 crypto_shash_update(desc
, iopad
, SHA256_BLOCK_SIZE
) ?:
343 crypto_shash_export(desc
, (void *)&sha256_st
);
344 memcpy(result_hash
, sha256_st
.state
, SHA256_DIGEST_SIZE
);
346 } else if (digest_size
== SHA384_DIGEST_SIZE
) {
347 error
= crypto_shash_init(desc
) ?:
348 crypto_shash_update(desc
, iopad
, SHA512_BLOCK_SIZE
) ?:
349 crypto_shash_export(desc
, (void *)&sha512_st
);
350 memcpy(result_hash
, sha512_st
.state
, SHA512_DIGEST_SIZE
);
352 } else if (digest_size
== SHA512_DIGEST_SIZE
) {
353 error
= crypto_shash_init(desc
) ?:
354 crypto_shash_update(desc
, iopad
, SHA512_BLOCK_SIZE
) ?:
355 crypto_shash_export(desc
, (void *)&sha512_st
);
356 memcpy(result_hash
, sha512_st
.state
, SHA512_DIGEST_SIZE
);
359 pr_err("Unknown digest size %d\n", digest_size
);
364 static void chcr_change_order(char *buf
, int ds
)
368 if (ds
== SHA512_DIGEST_SIZE
) {
369 for (i
= 0; i
< (ds
/ sizeof(u64
)); i
++)
370 *((__be64
*)buf
+ i
) =
371 cpu_to_be64(*((u64
*)buf
+ i
));
373 for (i
= 0; i
< (ds
/ sizeof(u32
)); i
++)
374 *((__be32
*)buf
+ i
) =
375 cpu_to_be32(*((u32
*)buf
+ i
));
379 static inline int is_hmac(struct crypto_tfm
*tfm
)
381 struct crypto_alg
*alg
= tfm
->__crt_alg
;
382 struct chcr_alg_template
*chcr_crypto_alg
=
383 container_of(__crypto_ahash_alg(alg
), struct chcr_alg_template
,
385 if (chcr_crypto_alg
->type
== CRYPTO_ALG_TYPE_HMAC
)
390 static void write_phys_cpl(struct cpl_rx_phys_dsgl
*phys_cpl
,
391 struct scatterlist
*sg
,
392 struct phys_sge_parm
*sg_param
)
394 struct phys_sge_pairs
*to
;
395 int out_buf_size
= sg_param
->obsize
;
396 unsigned int nents
= sg_param
->nents
, i
, j
= 0;
398 phys_cpl
->op_to_tid
= htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL
)
399 | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
400 phys_cpl
->pcirlxorder_to_noofsgentr
=
401 htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
402 CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
403 CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
404 CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
405 CPL_RX_PHYS_DSGL_DCAID_V(0) |
406 CPL_RX_PHYS_DSGL_NOOFSGENTR_V(nents
));
407 phys_cpl
->rss_hdr_int
.opcode
= CPL_RX_PHYS_ADDR
;
408 phys_cpl
->rss_hdr_int
.qid
= htons(sg_param
->qid
);
409 phys_cpl
->rss_hdr_int
.hash_val
= 0;
410 to
= (struct phys_sge_pairs
*)((unsigned char *)phys_cpl
+
411 sizeof(struct cpl_rx_phys_dsgl
));
413 for (i
= 0; nents
; to
++) {
414 for (j
= 0; j
< 8 && nents
; j
++, nents
--) {
415 out_buf_size
-= sg_dma_len(sg
);
416 to
->len
[j
] = htons(sg_dma_len(sg
));
417 to
->addr
[j
] = cpu_to_be64(sg_dma_address(sg
));
424 to
->len
[j
] = htons(ntohs(to
->len
[j
]) + (out_buf_size
));
428 static inline int map_writesg_phys_cpl(struct device
*dev
,
429 struct cpl_rx_phys_dsgl
*phys_cpl
,
430 struct scatterlist
*sg
,
431 struct phys_sge_parm
*sg_param
)
433 if (!sg
|| !sg_param
->nents
)
436 sg_param
->nents
= dma_map_sg(dev
, sg
, sg_param
->nents
, DMA_FROM_DEVICE
);
437 if (sg_param
->nents
== 0) {
438 pr_err("CHCR : DMA mapping failed\n");
441 write_phys_cpl(phys_cpl
, sg
, sg_param
);
445 static inline int get_aead_subtype(struct crypto_aead
*aead
)
447 struct aead_alg
*alg
= crypto_aead_alg(aead
);
448 struct chcr_alg_template
*chcr_crypto_alg
=
449 container_of(alg
, struct chcr_alg_template
, alg
.aead
);
450 return chcr_crypto_alg
->type
& CRYPTO_ALG_SUB_TYPE_MASK
;
453 static inline int get_cryptoalg_subtype(struct crypto_tfm
*tfm
)
455 struct crypto_alg
*alg
= tfm
->__crt_alg
;
456 struct chcr_alg_template
*chcr_crypto_alg
=
457 container_of(alg
, struct chcr_alg_template
, alg
.crypto
);
459 return chcr_crypto_alg
->type
& CRYPTO_ALG_SUB_TYPE_MASK
;
462 static inline void write_buffer_to_skb(struct sk_buff
*skb
,
468 skb
->data_len
+= bfr_len
;
469 skb
->truesize
+= bfr_len
;
470 get_page(virt_to_page(bfr
));
471 skb_fill_page_desc(skb
, *frags
, virt_to_page(bfr
),
472 offset_in_page(bfr
), bfr_len
);
478 write_sg_to_skb(struct sk_buff
*skb
, unsigned int *frags
,
479 struct scatterlist
*sg
, unsigned int count
)
482 unsigned int page_len
;
485 skb
->data_len
+= count
;
486 skb
->truesize
+= count
;
489 if (!sg
|| (!(sg
->length
)))
493 page_len
= min(sg
->length
, count
);
494 skb_fill_page_desc(skb
, *frags
, spage
, sg
->offset
, page_len
);
501 static int generate_copy_rrkey(struct ablk_ctx
*ablkctx
,
502 struct _key_ctx
*key_ctx
)
504 if (ablkctx
->ciph_mode
== CHCR_SCMD_CIPHER_MODE_AES_CBC
) {
505 memcpy(key_ctx
->key
, ablkctx
->rrkey
, ablkctx
->enckey_len
);
508 ablkctx
->key
+ (ablkctx
->enckey_len
>> 1),
509 ablkctx
->enckey_len
>> 1);
510 memcpy(key_ctx
->key
+ (ablkctx
->enckey_len
>> 1),
511 ablkctx
->rrkey
, ablkctx
->enckey_len
>> 1);
516 static inline void create_wreq(struct chcr_context
*ctx
,
517 struct chcr_wr
*chcr_req
,
518 void *req
, struct sk_buff
*skb
,
519 int kctx_len
, int hash_sz
,
523 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
524 int iv_loc
= IV_DSGL
;
525 int qid
= u_ctx
->lldi
.rxq_ids
[ctx
->tx_channel_id
];
526 unsigned int immdatalen
= 0, nr_frags
= 0;
528 if (is_ofld_imm(skb
)) {
529 immdatalen
= skb
->data_len
;
530 iv_loc
= IV_IMMEDIATE
;
532 nr_frags
= skb_shinfo(skb
)->nr_frags
;
535 chcr_req
->wreq
.op_to_cctx_size
= FILL_WR_OP_CCTX_SIZE(immdatalen
,
536 ((sizeof(chcr_req
->key_ctx
) + kctx_len
) >> 4));
537 chcr_req
->wreq
.pld_size_hash_size
=
538 htonl(FW_CRYPTO_LOOKASIDE_WR_PLD_SIZE_V(sgl_lengths
[nr_frags
]) |
539 FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz
));
540 chcr_req
->wreq
.len16_pkd
=
541 htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(
542 (calc_tx_flits_ofld(skb
) * 8), 16)));
543 chcr_req
->wreq
.cookie
= cpu_to_be64((uintptr_t)req
);
544 chcr_req
->wreq
.rx_chid_to_rx_q_id
=
545 FILL_WR_RX_Q_ID(ctx
->dev
->tx_channel_id
, qid
,
546 is_iv
? iv_loc
: IV_NOP
);
548 chcr_req
->ulptx
.cmd_dest
= FILL_ULPTX_CMD_DEST(ctx
->dev
->tx_channel_id
);
549 chcr_req
->ulptx
.len
= htonl((DIV_ROUND_UP((calc_tx_flits_ofld(skb
) * 8),
550 16) - ((sizeof(chcr_req
->wreq
)) >> 4)));
552 chcr_req
->sc_imm
.cmd_more
= FILL_CMD_MORE(immdatalen
);
553 chcr_req
->sc_imm
.len
= cpu_to_be32(sizeof(struct cpl_tx_sec_pdu
) +
554 sizeof(chcr_req
->key_ctx
) +
555 kctx_len
+ sc_len
+ immdatalen
);
559 * create_cipher_wr - form the WR for cipher operations
561 * @ctx: crypto driver context of the request.
562 * @qid: ingress qid where response of this WR should be received.
563 * @op_type: encryption or decryption
565 static struct sk_buff
566 *create_cipher_wr(struct ablkcipher_request
*req
,
568 unsigned short op_type
)
570 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
571 struct chcr_context
*ctx
= crypto_ablkcipher_ctx(tfm
);
572 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
573 struct ablk_ctx
*ablkctx
= ABLK_CTX(ctx
);
574 struct sk_buff
*skb
= NULL
;
575 struct chcr_wr
*chcr_req
;
576 struct cpl_rx_phys_dsgl
*phys_cpl
;
577 struct chcr_blkcipher_req_ctx
*reqctx
= ablkcipher_request_ctx(req
);
578 struct phys_sge_parm sg_param
;
579 unsigned int frags
= 0, transhdr_len
, phys_dsgl
;
580 unsigned int ivsize
= crypto_ablkcipher_ivsize(tfm
), kctx_len
;
581 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
585 return ERR_PTR(-EINVAL
);
586 reqctx
->dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
587 if (reqctx
->dst_nents
<= 0) {
588 pr_err("AES:Invalid Destination sg lists\n");
589 return ERR_PTR(-EINVAL
);
591 if ((ablkctx
->enckey_len
== 0) || (ivsize
> AES_BLOCK_SIZE
) ||
592 (req
->nbytes
<= 0) || (req
->nbytes
% AES_BLOCK_SIZE
)) {
593 pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
594 ablkctx
->enckey_len
, req
->nbytes
, ivsize
);
595 return ERR_PTR(-EINVAL
);
598 phys_dsgl
= get_space_for_phys_dsgl(reqctx
->dst_nents
);
600 kctx_len
= (DIV_ROUND_UP(ablkctx
->enckey_len
, 16) * 16);
601 transhdr_len
= CIPHER_TRANSHDR_SIZE(kctx_len
, phys_dsgl
);
602 skb
= alloc_skb((transhdr_len
+ sizeof(struct sge_opaque_hdr
)), flags
);
604 return ERR_PTR(-ENOMEM
);
605 skb_reserve(skb
, sizeof(struct sge_opaque_hdr
));
606 chcr_req
= (struct chcr_wr
*)__skb_put(skb
, transhdr_len
);
607 memset(chcr_req
, 0, transhdr_len
);
608 chcr_req
->sec_cpl
.op_ivinsrtofst
=
609 FILL_SEC_CPL_OP_IVINSR(ctx
->dev
->tx_channel_id
, 2, 1);
611 chcr_req
->sec_cpl
.pldlen
= htonl(ivsize
+ req
->nbytes
);
612 chcr_req
->sec_cpl
.aadstart_cipherstop_hi
=
613 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, ivsize
+ 1, 0);
615 chcr_req
->sec_cpl
.cipherstop_lo_authinsert
=
616 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
617 chcr_req
->sec_cpl
.seqno_numivs
= FILL_SEC_CPL_SCMD0_SEQNO(op_type
, 0,
620 chcr_req
->sec_cpl
.ivgen_hdrlen
= FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
623 chcr_req
->key_ctx
.ctx_hdr
= ablkctx
->key_ctx_hdr
;
624 if (op_type
== CHCR_DECRYPT_OP
) {
625 generate_copy_rrkey(ablkctx
, &chcr_req
->key_ctx
);
627 if (ablkctx
->ciph_mode
== CHCR_SCMD_CIPHER_MODE_AES_CBC
) {
628 memcpy(chcr_req
->key_ctx
.key
, ablkctx
->key
,
629 ablkctx
->enckey_len
);
631 memcpy(chcr_req
->key_ctx
.key
, ablkctx
->key
+
632 (ablkctx
->enckey_len
>> 1),
633 ablkctx
->enckey_len
>> 1);
634 memcpy(chcr_req
->key_ctx
.key
+
635 (ablkctx
->enckey_len
>> 1),
637 ablkctx
->enckey_len
>> 1);
640 phys_cpl
= (struct cpl_rx_phys_dsgl
*)((u8
*)(chcr_req
+ 1) + kctx_len
);
641 sg_param
.nents
= reqctx
->dst_nents
;
642 sg_param
.obsize
= req
->nbytes
;
645 if (map_writesg_phys_cpl(&u_ctx
->lldi
.pdev
->dev
, phys_cpl
, req
->dst
,
649 skb_set_transport_header(skb
, transhdr_len
);
650 memcpy(reqctx
->iv
, req
->info
, ivsize
);
651 write_buffer_to_skb(skb
, &frags
, reqctx
->iv
, ivsize
);
652 write_sg_to_skb(skb
, &frags
, req
->src
, req
->nbytes
);
653 create_wreq(ctx
, chcr_req
, req
, skb
, kctx_len
, 0, 1,
654 sizeof(struct cpl_rx_phys_dsgl
) + phys_dsgl
);
660 return ERR_PTR(-ENOMEM
);
663 static int chcr_aes_cbc_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
666 struct chcr_context
*ctx
= crypto_ablkcipher_ctx(tfm
);
667 struct ablk_ctx
*ablkctx
= ABLK_CTX(ctx
);
668 unsigned int ck_size
, context_size
;
671 if (keylen
== AES_KEYSIZE_128
) {
672 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_128
;
673 } else if (keylen
== AES_KEYSIZE_192
) {
675 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_192
;
676 } else if (keylen
== AES_KEYSIZE_256
) {
677 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_256
;
681 memcpy(ablkctx
->key
, key
, keylen
);
682 ablkctx
->enckey_len
= keylen
;
683 get_aes_decrypt_key(ablkctx
->rrkey
, ablkctx
->key
, keylen
<< 3);
684 context_size
= (KEY_CONTEXT_HDR_SALT_AND_PAD
+
685 keylen
+ alignment
) >> 4;
687 ablkctx
->key_ctx_hdr
= FILL_KEY_CTX_HDR(ck_size
, CHCR_KEYCTX_NO_KEY
,
689 ablkctx
->ciph_mode
= CHCR_SCMD_CIPHER_MODE_AES_CBC
;
692 crypto_ablkcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
693 ablkctx
->enckey_len
= 0;
697 static int cxgb4_is_crypto_q_full(struct net_device
*dev
, unsigned int idx
)
699 struct adapter
*adap
= netdev2adap(dev
);
700 struct sge_uld_txq_info
*txq_info
=
701 adap
->sge
.uld_txq_info
[CXGB4_TX_CRYPTO
];
702 struct sge_uld_txq
*txq
;
706 txq
= &txq_info
->uldtxq
[idx
];
707 spin_lock(&txq
->sendq
.lock
);
710 spin_unlock(&txq
->sendq
.lock
);
715 static int chcr_aes_encrypt(struct ablkcipher_request
*req
)
717 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
718 struct chcr_context
*ctx
= crypto_ablkcipher_ctx(tfm
);
719 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
722 if (unlikely(cxgb4_is_crypto_q_full(u_ctx
->lldi
.ports
[0],
723 ctx
->tx_channel_id
))) {
724 if (!(req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
))
728 skb
= create_cipher_wr(req
, u_ctx
->lldi
.rxq_ids
[ctx
->tx_channel_id
],
731 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__
);
734 skb
->dev
= u_ctx
->lldi
.ports
[0];
735 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
740 static int chcr_aes_decrypt(struct ablkcipher_request
*req
)
742 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
743 struct chcr_context
*ctx
= crypto_ablkcipher_ctx(tfm
);
744 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
747 if (unlikely(cxgb4_is_crypto_q_full(u_ctx
->lldi
.ports
[0],
748 ctx
->tx_channel_id
))) {
749 if (!(req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
))
753 skb
= create_cipher_wr(req
, u_ctx
->lldi
.rxq_ids
[0],
756 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__
);
759 skb
->dev
= u_ctx
->lldi
.ports
[0];
760 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
765 static int chcr_device_init(struct chcr_context
*ctx
)
767 struct uld_ctx
*u_ctx
;
769 int err
= 0, rxq_perchan
, rxq_idx
;
771 id
= smp_processor_id();
773 err
= assign_chcr_device(&ctx
->dev
);
775 pr_err("chcr device assignment fails\n");
778 u_ctx
= ULD_CTX(ctx
);
779 rxq_perchan
= u_ctx
->lldi
.nrxq
/ u_ctx
->lldi
.nchan
;
780 rxq_idx
= ctx
->dev
->tx_channel_id
* rxq_perchan
;
781 rxq_idx
+= id
% rxq_perchan
;
782 spin_lock(&ctx
->dev
->lock_chcr_dev
);
783 ctx
->tx_channel_id
= rxq_idx
;
784 ctx
->dev
->tx_channel_id
= !ctx
->dev
->tx_channel_id
;
785 spin_unlock(&ctx
->dev
->lock_chcr_dev
);
791 static int chcr_cra_init(struct crypto_tfm
*tfm
)
793 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct chcr_blkcipher_req_ctx
);
794 return chcr_device_init(crypto_tfm_ctx(tfm
));
797 static int get_alg_config(struct algo_param
*params
,
798 unsigned int auth_size
)
801 case SHA1_DIGEST_SIZE
:
802 params
->mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_160
;
803 params
->auth_mode
= CHCR_SCMD_AUTH_MODE_SHA1
;
804 params
->result_size
= SHA1_DIGEST_SIZE
;
806 case SHA224_DIGEST_SIZE
:
807 params
->mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_256
;
808 params
->auth_mode
= CHCR_SCMD_AUTH_MODE_SHA224
;
809 params
->result_size
= SHA256_DIGEST_SIZE
;
811 case SHA256_DIGEST_SIZE
:
812 params
->mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_256
;
813 params
->auth_mode
= CHCR_SCMD_AUTH_MODE_SHA256
;
814 params
->result_size
= SHA256_DIGEST_SIZE
;
816 case SHA384_DIGEST_SIZE
:
817 params
->mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_512
;
818 params
->auth_mode
= CHCR_SCMD_AUTH_MODE_SHA512_384
;
819 params
->result_size
= SHA512_DIGEST_SIZE
;
821 case SHA512_DIGEST_SIZE
:
822 params
->mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_512
;
823 params
->auth_mode
= CHCR_SCMD_AUTH_MODE_SHA512_512
;
824 params
->result_size
= SHA512_DIGEST_SIZE
;
827 pr_err("chcr : ERROR, unsupported digest size\n");
833 static inline void chcr_free_shash(struct crypto_shash
*base_hash
)
835 crypto_free_shash(base_hash
);
839 * create_hash_wr - Create hash work request
840 * @req - Cipher req base
842 static struct sk_buff
*create_hash_wr(struct ahash_request
*req
,
843 struct hash_wr_param
*param
)
845 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(req
);
846 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
847 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
848 struct hmac_ctx
*hmacctx
= HMAC_CTX(ctx
);
849 struct sk_buff
*skb
= NULL
;
850 struct chcr_wr
*chcr_req
;
851 unsigned int frags
= 0, transhdr_len
, iopad_alignment
= 0;
852 unsigned int digestsize
= crypto_ahash_digestsize(tfm
);
853 unsigned int kctx_len
= 0;
854 u8 hash_size_in_response
= 0;
855 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
858 iopad_alignment
= KEYCTX_ALIGN_PAD(digestsize
);
859 kctx_len
= param
->alg_prm
.result_size
+ iopad_alignment
;
860 if (param
->opad_needed
)
861 kctx_len
+= param
->alg_prm
.result_size
+ iopad_alignment
;
864 hash_size_in_response
= digestsize
;
866 hash_size_in_response
= param
->alg_prm
.result_size
;
867 transhdr_len
= HASH_TRANSHDR_SIZE(kctx_len
);
868 skb
= alloc_skb((transhdr_len
+ sizeof(struct sge_opaque_hdr
)), flags
);
872 skb_reserve(skb
, sizeof(struct sge_opaque_hdr
));
873 chcr_req
= (struct chcr_wr
*)__skb_put(skb
, transhdr_len
);
874 memset(chcr_req
, 0, transhdr_len
);
876 chcr_req
->sec_cpl
.op_ivinsrtofst
=
877 FILL_SEC_CPL_OP_IVINSR(ctx
->dev
->tx_channel_id
, 2, 0);
878 chcr_req
->sec_cpl
.pldlen
= htonl(param
->bfr_len
+ param
->sg_len
);
880 chcr_req
->sec_cpl
.aadstart_cipherstop_hi
=
881 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
882 chcr_req
->sec_cpl
.cipherstop_lo_authinsert
=
883 FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
884 chcr_req
->sec_cpl
.seqno_numivs
=
885 FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param
->alg_prm
.auth_mode
,
886 param
->opad_needed
, 0);
888 chcr_req
->sec_cpl
.ivgen_hdrlen
=
889 FILL_SEC_CPL_IVGEN_HDRLEN(param
->last
, param
->more
, 0, 1, 0, 0);
891 memcpy(chcr_req
->key_ctx
.key
, req_ctx
->partial_hash
,
892 param
->alg_prm
.result_size
);
894 if (param
->opad_needed
)
895 memcpy(chcr_req
->key_ctx
.key
+
896 ((param
->alg_prm
.result_size
<= 32) ? 32 :
897 CHCR_HASH_MAX_DIGEST_SIZE
),
898 hmacctx
->opad
, param
->alg_prm
.result_size
);
900 chcr_req
->key_ctx
.ctx_hdr
= FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY
,
901 param
->alg_prm
.mk_size
, 0,
904 sizeof(chcr_req
->key_ctx
)) >> 4));
905 chcr_req
->sec_cpl
.scmd1
= cpu_to_be64((u64
)param
->scmd1
);
907 skb_set_transport_header(skb
, transhdr_len
);
908 if (param
->bfr_len
!= 0)
909 write_buffer_to_skb(skb
, &frags
, req_ctx
->reqbfr
,
911 if (param
->sg_len
!= 0)
912 write_sg_to_skb(skb
, &frags
, req
->src
, param
->sg_len
);
914 create_wreq(ctx
, chcr_req
, req
, skb
, kctx_len
, hash_size_in_response
, 0,
921 static int chcr_ahash_update(struct ahash_request
*req
)
923 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(req
);
924 struct crypto_ahash
*rtfm
= crypto_ahash_reqtfm(req
);
925 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(rtfm
));
926 struct uld_ctx
*u_ctx
= NULL
;
928 u8 remainder
= 0, bs
;
929 unsigned int nbytes
= req
->nbytes
;
930 struct hash_wr_param params
;
932 bs
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm
));
934 u_ctx
= ULD_CTX(ctx
);
935 if (unlikely(cxgb4_is_crypto_q_full(u_ctx
->lldi
.ports
[0],
936 ctx
->tx_channel_id
))) {
937 if (!(req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
))
941 if (nbytes
+ req_ctx
->reqlen
>= bs
) {
942 remainder
= (nbytes
+ req_ctx
->reqlen
) % bs
;
943 nbytes
= nbytes
+ req_ctx
->reqlen
- remainder
;
945 sg_pcopy_to_buffer(req
->src
, sg_nents(req
->src
), req_ctx
->reqbfr
946 + req_ctx
->reqlen
, nbytes
, 0);
947 req_ctx
->reqlen
+= nbytes
;
951 params
.opad_needed
= 0;
954 params
.sg_len
= nbytes
- req_ctx
->reqlen
;
955 params
.bfr_len
= req_ctx
->reqlen
;
957 get_alg_config(¶ms
.alg_prm
, crypto_ahash_digestsize(rtfm
));
959 req_ctx
->data_len
+= params
.sg_len
+ params
.bfr_len
;
960 skb
= create_hash_wr(req
, ¶ms
);
967 temp
= req_ctx
->reqbfr
;
968 req_ctx
->reqbfr
= req_ctx
->skbfr
;
969 req_ctx
->skbfr
= temp
;
970 sg_pcopy_to_buffer(req
->src
, sg_nents(req
->src
),
971 req_ctx
->reqbfr
, remainder
, req
->nbytes
-
974 req_ctx
->reqlen
= remainder
;
975 skb
->dev
= u_ctx
->lldi
.ports
[0];
976 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
982 static void create_last_hash_block(char *bfr_ptr
, unsigned int bs
, u64 scmd1
)
984 memset(bfr_ptr
, 0, bs
);
987 *(__be64
*)(bfr_ptr
+ 56) = cpu_to_be64(scmd1
<< 3);
989 *(__be64
*)(bfr_ptr
+ 120) = cpu_to_be64(scmd1
<< 3);
992 static int chcr_ahash_final(struct ahash_request
*req
)
994 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(req
);
995 struct crypto_ahash
*rtfm
= crypto_ahash_reqtfm(req
);
996 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(rtfm
));
997 struct hash_wr_param params
;
999 struct uld_ctx
*u_ctx
= NULL
;
1000 u8 bs
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm
));
1002 u_ctx
= ULD_CTX(ctx
);
1003 if (is_hmac(crypto_ahash_tfm(rtfm
)))
1004 params
.opad_needed
= 1;
1006 params
.opad_needed
= 0;
1008 get_alg_config(¶ms
.alg_prm
, crypto_ahash_digestsize(rtfm
));
1009 req_ctx
->result
= 1;
1010 params
.bfr_len
= req_ctx
->reqlen
;
1011 req_ctx
->data_len
+= params
.bfr_len
+ params
.sg_len
;
1012 if (req_ctx
->reqlen
== 0) {
1013 create_last_hash_block(req_ctx
->reqbfr
, bs
, req_ctx
->data_len
);
1017 params
.bfr_len
= bs
;
1020 params
.scmd1
= req_ctx
->data_len
;
1024 skb
= create_hash_wr(req
, ¶ms
);
1028 skb
->dev
= u_ctx
->lldi
.ports
[0];
1029 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
1031 return -EINPROGRESS
;
1034 static int chcr_ahash_finup(struct ahash_request
*req
)
1036 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(req
);
1037 struct crypto_ahash
*rtfm
= crypto_ahash_reqtfm(req
);
1038 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(rtfm
));
1039 struct uld_ctx
*u_ctx
= NULL
;
1040 struct sk_buff
*skb
;
1041 struct hash_wr_param params
;
1044 bs
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm
));
1045 u_ctx
= ULD_CTX(ctx
);
1047 if (unlikely(cxgb4_is_crypto_q_full(u_ctx
->lldi
.ports
[0],
1048 ctx
->tx_channel_id
))) {
1049 if (!(req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
))
1053 if (is_hmac(crypto_ahash_tfm(rtfm
)))
1054 params
.opad_needed
= 1;
1056 params
.opad_needed
= 0;
1058 params
.sg_len
= req
->nbytes
;
1059 params
.bfr_len
= req_ctx
->reqlen
;
1060 get_alg_config(¶ms
.alg_prm
, crypto_ahash_digestsize(rtfm
));
1061 req_ctx
->data_len
+= params
.bfr_len
+ params
.sg_len
;
1062 req_ctx
->result
= 1;
1063 if ((req_ctx
->reqlen
+ req
->nbytes
) == 0) {
1064 create_last_hash_block(req_ctx
->reqbfr
, bs
, req_ctx
->data_len
);
1068 params
.bfr_len
= bs
;
1070 params
.scmd1
= req_ctx
->data_len
;
1075 skb
= create_hash_wr(req
, ¶ms
);
1079 skb
->dev
= u_ctx
->lldi
.ports
[0];
1080 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
1083 return -EINPROGRESS
;
1086 static int chcr_ahash_digest(struct ahash_request
*req
)
1088 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(req
);
1089 struct crypto_ahash
*rtfm
= crypto_ahash_reqtfm(req
);
1090 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(rtfm
));
1091 struct uld_ctx
*u_ctx
= NULL
;
1092 struct sk_buff
*skb
;
1093 struct hash_wr_param params
;
1097 bs
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm
));
1099 u_ctx
= ULD_CTX(ctx
);
1100 if (unlikely(cxgb4_is_crypto_q_full(u_ctx
->lldi
.ports
[0],
1101 ctx
->tx_channel_id
))) {
1102 if (!(req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
))
1106 if (is_hmac(crypto_ahash_tfm(rtfm
)))
1107 params
.opad_needed
= 1;
1109 params
.opad_needed
= 0;
1113 params
.sg_len
= req
->nbytes
;
1116 get_alg_config(¶ms
.alg_prm
, crypto_ahash_digestsize(rtfm
));
1117 req_ctx
->result
= 1;
1118 req_ctx
->data_len
+= params
.bfr_len
+ params
.sg_len
;
1120 if (req
->nbytes
== 0) {
1121 create_last_hash_block(req_ctx
->reqbfr
, bs
, 0);
1123 params
.bfr_len
= bs
;
1126 skb
= create_hash_wr(req
, ¶ms
);
1130 skb
->dev
= u_ctx
->lldi
.ports
[0];
1131 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
1133 return -EINPROGRESS
;
1136 static int chcr_ahash_export(struct ahash_request
*areq
, void *out
)
1138 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1139 struct chcr_ahash_req_ctx
*state
= out
;
1141 state
->reqlen
= req_ctx
->reqlen
;
1142 state
->data_len
= req_ctx
->data_len
;
1143 memcpy(state
->bfr1
, req_ctx
->reqbfr
, req_ctx
->reqlen
);
1144 memcpy(state
->partial_hash
, req_ctx
->partial_hash
,
1145 CHCR_HASH_MAX_DIGEST_SIZE
);
1149 static int chcr_ahash_import(struct ahash_request
*areq
, const void *in
)
1151 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1152 struct chcr_ahash_req_ctx
*state
= (struct chcr_ahash_req_ctx
*)in
;
1154 req_ctx
->reqlen
= state
->reqlen
;
1155 req_ctx
->data_len
= state
->data_len
;
1156 req_ctx
->reqbfr
= req_ctx
->bfr1
;
1157 req_ctx
->skbfr
= req_ctx
->bfr2
;
1158 memcpy(req_ctx
->bfr1
, state
->bfr1
, CHCR_HASH_MAX_BLOCK_SIZE_128
);
1159 memcpy(req_ctx
->partial_hash
, state
->partial_hash
,
1160 CHCR_HASH_MAX_DIGEST_SIZE
);
1164 static int chcr_ahash_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1165 unsigned int keylen
)
1167 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
1168 struct hmac_ctx
*hmacctx
= HMAC_CTX(ctx
);
1169 unsigned int digestsize
= crypto_ahash_digestsize(tfm
);
1170 unsigned int bs
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
1171 unsigned int i
, err
= 0, updated_digestsize
;
1173 SHASH_DESC_ON_STACK(shash
, hmacctx
->base_hash
);
1175 /* use the key to calculate the ipad and opad. ipad will sent with the
1176 * first request's data. opad will be sent with the final hash result
1177 * ipad in hmacctx->ipad and opad in hmacctx->opad location
1179 shash
->tfm
= hmacctx
->base_hash
;
1180 shash
->flags
= crypto_shash_get_flags(hmacctx
->base_hash
);
1182 err
= crypto_shash_digest(shash
, key
, keylen
,
1186 keylen
= digestsize
;
1188 memcpy(hmacctx
->ipad
, key
, keylen
);
1190 memset(hmacctx
->ipad
+ keylen
, 0, bs
- keylen
);
1191 memcpy(hmacctx
->opad
, hmacctx
->ipad
, bs
);
1193 for (i
= 0; i
< bs
/ sizeof(int); i
++) {
1194 *((unsigned int *)(&hmacctx
->ipad
) + i
) ^= IPAD_DATA
;
1195 *((unsigned int *)(&hmacctx
->opad
) + i
) ^= OPAD_DATA
;
1198 updated_digestsize
= digestsize
;
1199 if (digestsize
== SHA224_DIGEST_SIZE
)
1200 updated_digestsize
= SHA256_DIGEST_SIZE
;
1201 else if (digestsize
== SHA384_DIGEST_SIZE
)
1202 updated_digestsize
= SHA512_DIGEST_SIZE
;
1203 err
= chcr_compute_partial_hash(shash
, hmacctx
->ipad
,
1204 hmacctx
->ipad
, digestsize
);
1207 chcr_change_order(hmacctx
->ipad
, updated_digestsize
);
1209 err
= chcr_compute_partial_hash(shash
, hmacctx
->opad
,
1210 hmacctx
->opad
, digestsize
);
1213 chcr_change_order(hmacctx
->opad
, updated_digestsize
);
1218 static int chcr_aes_xts_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
1219 unsigned int key_len
)
1221 struct chcr_context
*ctx
= crypto_ablkcipher_ctx(tfm
);
1222 struct ablk_ctx
*ablkctx
= ABLK_CTX(ctx
);
1223 unsigned short context_size
= 0;
1225 if ((key_len
!= (AES_KEYSIZE_128
<< 1)) &&
1226 (key_len
!= (AES_KEYSIZE_256
<< 1))) {
1227 crypto_tfm_set_flags((struct crypto_tfm
*)tfm
,
1228 CRYPTO_TFM_RES_BAD_KEY_LEN
);
1229 ablkctx
->enckey_len
= 0;
1234 memcpy(ablkctx
->key
, key
, key_len
);
1235 ablkctx
->enckey_len
= key_len
;
1236 get_aes_decrypt_key(ablkctx
->rrkey
, ablkctx
->key
, key_len
<< 2);
1237 context_size
= (KEY_CONTEXT_HDR_SALT_AND_PAD
+ key_len
) >> 4;
1238 ablkctx
->key_ctx_hdr
=
1239 FILL_KEY_CTX_HDR((key_len
== AES_KEYSIZE_256
) ?
1240 CHCR_KEYCTX_CIPHER_KEY_SIZE_128
:
1241 CHCR_KEYCTX_CIPHER_KEY_SIZE_256
,
1242 CHCR_KEYCTX_NO_KEY
, 1,
1244 ablkctx
->ciph_mode
= CHCR_SCMD_CIPHER_MODE_AES_XTS
;
1248 static int chcr_sha_init(struct ahash_request
*areq
)
1250 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1251 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
1252 int digestsize
= crypto_ahash_digestsize(tfm
);
1254 req_ctx
->data_len
= 0;
1255 req_ctx
->reqlen
= 0;
1256 req_ctx
->reqbfr
= req_ctx
->bfr1
;
1257 req_ctx
->skbfr
= req_ctx
->bfr2
;
1258 req_ctx
->skb
= NULL
;
1259 req_ctx
->result
= 0;
1260 copy_hash_init_values(req_ctx
->partial_hash
, digestsize
);
1264 static int chcr_sha_cra_init(struct crypto_tfm
*tfm
)
1266 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
1267 sizeof(struct chcr_ahash_req_ctx
));
1268 return chcr_device_init(crypto_tfm_ctx(tfm
));
1271 static int chcr_hmac_init(struct ahash_request
*areq
)
1273 struct chcr_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1274 struct crypto_ahash
*rtfm
= crypto_ahash_reqtfm(areq
);
1275 struct chcr_context
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(rtfm
));
1276 struct hmac_ctx
*hmacctx
= HMAC_CTX(ctx
);
1277 unsigned int digestsize
= crypto_ahash_digestsize(rtfm
);
1278 unsigned int bs
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm
));
1280 chcr_sha_init(areq
);
1281 req_ctx
->data_len
= bs
;
1282 if (is_hmac(crypto_ahash_tfm(rtfm
))) {
1283 if (digestsize
== SHA224_DIGEST_SIZE
)
1284 memcpy(req_ctx
->partial_hash
, hmacctx
->ipad
,
1285 SHA256_DIGEST_SIZE
);
1286 else if (digestsize
== SHA384_DIGEST_SIZE
)
1287 memcpy(req_ctx
->partial_hash
, hmacctx
->ipad
,
1288 SHA512_DIGEST_SIZE
);
1290 memcpy(req_ctx
->partial_hash
, hmacctx
->ipad
,
1296 static int chcr_hmac_cra_init(struct crypto_tfm
*tfm
)
1298 struct chcr_context
*ctx
= crypto_tfm_ctx(tfm
);
1299 struct hmac_ctx
*hmacctx
= HMAC_CTX(ctx
);
1300 unsigned int digestsize
=
1301 crypto_ahash_digestsize(__crypto_ahash_cast(tfm
));
1303 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
1304 sizeof(struct chcr_ahash_req_ctx
));
1305 hmacctx
->base_hash
= chcr_alloc_shash(digestsize
);
1306 if (IS_ERR(hmacctx
->base_hash
))
1307 return PTR_ERR(hmacctx
->base_hash
);
1308 return chcr_device_init(crypto_tfm_ctx(tfm
));
1311 static void chcr_hmac_cra_exit(struct crypto_tfm
*tfm
)
1313 struct chcr_context
*ctx
= crypto_tfm_ctx(tfm
);
1314 struct hmac_ctx
*hmacctx
= HMAC_CTX(ctx
);
1316 if (hmacctx
->base_hash
) {
1317 chcr_free_shash(hmacctx
->base_hash
);
1318 hmacctx
->base_hash
= NULL
;
1322 static int chcr_copy_assoc(struct aead_request
*req
,
1323 struct chcr_aead_ctx
*ctx
)
1325 SKCIPHER_REQUEST_ON_STACK(skreq
, ctx
->null
);
1327 skcipher_request_set_tfm(skreq
, ctx
->null
);
1328 skcipher_request_set_callback(skreq
, aead_request_flags(req
),
1330 skcipher_request_set_crypt(skreq
, req
->src
, req
->dst
, req
->assoclen
,
1333 return crypto_skcipher_encrypt(skreq
);
1336 static unsigned char get_hmac(unsigned int authsize
)
1340 return CHCR_SCMD_HMAC_CTRL_PL1
;
1342 return CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366
;
1344 return CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT
;
1346 return CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
1350 static struct sk_buff
*create_authenc_wr(struct aead_request
*req
,
1353 unsigned short op_type
)
1355 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1356 struct chcr_context
*ctx
= crypto_aead_ctx(tfm
);
1357 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
1358 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
1359 struct chcr_authenc_ctx
*actx
= AUTHENC_CTX(aeadctx
);
1360 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
1361 struct sk_buff
*skb
= NULL
;
1362 struct chcr_wr
*chcr_req
;
1363 struct cpl_rx_phys_dsgl
*phys_cpl
;
1364 struct phys_sge_parm sg_param
;
1365 struct scatterlist
*src
, *dst
;
1366 struct scatterlist src_sg
[2], dst_sg
[2];
1367 unsigned int frags
= 0, transhdr_len
;
1368 unsigned int ivsize
= crypto_aead_ivsize(tfm
), dst_size
= 0;
1369 unsigned int kctx_len
= 0;
1370 unsigned short stop_offset
= 0;
1371 unsigned int assoclen
= req
->assoclen
;
1372 unsigned int authsize
= crypto_aead_authsize(tfm
);
1375 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
1378 if (aeadctx
->enckey_len
== 0 || (req
->cryptlen
== 0))
1381 if (op_type
&& req
->cryptlen
< crypto_aead_authsize(tfm
))
1384 if (sg_nents_for_len(req
->src
, req
->assoclen
+ req
->cryptlen
) < 0)
1386 src
= scatterwalk_ffwd(src_sg
, req
->src
, req
->assoclen
);
1388 if (req
->src
!= req
->dst
) {
1389 err
= chcr_copy_assoc(req
, aeadctx
);
1391 return ERR_PTR(err
);
1392 dst
= scatterwalk_ffwd(dst_sg
, req
->dst
, req
->assoclen
);
1394 if (get_aead_subtype(tfm
) == CRYPTO_ALG_SUB_TYPE_AEAD_NULL
) {
1398 reqctx
->dst_nents
= sg_nents_for_len(dst
, req
->cryptlen
+
1399 (op_type
? -authsize
: authsize
));
1400 if (reqctx
->dst_nents
<= 0) {
1401 pr_err("AUTHENC:Invalid Destination sg entries\n");
1404 dst_size
= get_space_for_phys_dsgl(reqctx
->dst_nents
);
1405 kctx_len
= (ntohl(KEY_CONTEXT_CTX_LEN_V(aeadctx
->key_ctx_hdr
)) << 4)
1406 - sizeof(chcr_req
->key_ctx
);
1407 transhdr_len
= CIPHER_TRANSHDR_SIZE(kctx_len
, dst_size
);
1408 skb
= alloc_skb((transhdr_len
+ sizeof(struct sge_opaque_hdr
)), flags
);
1412 /* LLD is going to write the sge hdr. */
1413 skb_reserve(skb
, sizeof(struct sge_opaque_hdr
));
1416 chcr_req
= (struct chcr_wr
*) __skb_put(skb
, transhdr_len
);
1417 memset(chcr_req
, 0, transhdr_len
);
1419 stop_offset
= (op_type
== CHCR_ENCRYPT_OP
) ? 0 : authsize
;
1422 * Input order is AAD,IV and Payload. where IV should be included as
1423 * the part of authdata. All other fields should be filled according
1424 * to the hardware spec
1426 chcr_req
->sec_cpl
.op_ivinsrtofst
=
1427 FILL_SEC_CPL_OP_IVINSR(ctx
->dev
->tx_channel_id
, 2,
1428 (ivsize
? (assoclen
+ 1) : 0));
1429 chcr_req
->sec_cpl
.pldlen
= htonl(assoclen
+ ivsize
+ req
->cryptlen
);
1430 chcr_req
->sec_cpl
.aadstart_cipherstop_hi
= FILL_SEC_CPL_CIPHERSTOP_HI(
1431 assoclen
? 1 : 0, assoclen
,
1432 assoclen
+ ivsize
+ 1,
1433 (stop_offset
& 0x1F0) >> 4);
1434 chcr_req
->sec_cpl
.cipherstop_lo_authinsert
= FILL_SEC_CPL_AUTHINSERT(
1436 null
? 0 : assoclen
+ ivsize
+ 1,
1437 stop_offset
, stop_offset
);
1438 chcr_req
->sec_cpl
.seqno_numivs
= FILL_SEC_CPL_SCMD0_SEQNO(op_type
,
1439 (op_type
== CHCR_ENCRYPT_OP
) ? 1 : 0,
1440 CHCR_SCMD_CIPHER_MODE_AES_CBC
,
1441 actx
->auth_mode
, aeadctx
->hmac_ctrl
,
1443 chcr_req
->sec_cpl
.ivgen_hdrlen
= FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1446 chcr_req
->key_ctx
.ctx_hdr
= aeadctx
->key_ctx_hdr
;
1447 if (op_type
== CHCR_ENCRYPT_OP
)
1448 memcpy(chcr_req
->key_ctx
.key
, aeadctx
->key
,
1449 aeadctx
->enckey_len
);
1451 memcpy(chcr_req
->key_ctx
.key
, actx
->dec_rrkey
,
1452 aeadctx
->enckey_len
);
1454 memcpy(chcr_req
->key_ctx
.key
+ (DIV_ROUND_UP(aeadctx
->enckey_len
, 16) <<
1455 4), actx
->h_iopad
, kctx_len
-
1456 (DIV_ROUND_UP(aeadctx
->enckey_len
, 16) << 4));
1458 phys_cpl
= (struct cpl_rx_phys_dsgl
*)((u8
*)(chcr_req
+ 1) + kctx_len
);
1459 sg_param
.nents
= reqctx
->dst_nents
;
1460 sg_param
.obsize
= req
->cryptlen
+ (op_type
? -authsize
: authsize
);
1463 if (map_writesg_phys_cpl(&u_ctx
->lldi
.pdev
->dev
, phys_cpl
, dst
,
1467 skb_set_transport_header(skb
, transhdr_len
);
1471 write_sg_to_skb(skb
, &frags
, req
->src
, assoclen
);
1474 write_buffer_to_skb(skb
, &frags
, req
->iv
, ivsize
);
1475 write_sg_to_skb(skb
, &frags
, src
, req
->cryptlen
);
1476 create_wreq(ctx
, chcr_req
, req
, skb
, kctx_len
, size
, 1,
1477 sizeof(struct cpl_rx_phys_dsgl
) + dst_size
);
1486 return ERR_PTR(-EINVAL
);
1489 static void aes_gcm_empty_pld_pad(struct scatterlist
*sg
,
1490 unsigned short offset
)
1493 unsigned char *addr
;
1495 spage
= sg_page(sg
);
1496 get_page(spage
); /* so that it is not freed by NIC */
1497 #ifdef KMAP_ATOMIC_ARGS
1498 addr
= kmap_atomic(spage
, KM_SOFTIRQ0
);
1500 addr
= kmap_atomic(spage
);
1502 memset(addr
+ sg
->offset
, 0, offset
+ 1);
1504 kunmap_atomic(addr
);
1507 static int set_msg_len(u8
*block
, unsigned int msglen
, int csize
)
1511 memset(block
, 0, csize
);
1516 else if (msglen
> (unsigned int)(1 << (8 * csize
)))
1519 data
= cpu_to_be32(msglen
);
1520 memcpy(block
- csize
, (u8
*)&data
+ 4 - csize
, csize
);
1525 static void generate_b0(struct aead_request
*req
,
1526 struct chcr_aead_ctx
*aeadctx
,
1527 unsigned short op_type
)
1529 unsigned int l
, lp
, m
;
1531 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1532 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
1533 u8
*b0
= reqctx
->scratch_pad
;
1535 m
= crypto_aead_authsize(aead
);
1537 memcpy(b0
, reqctx
->iv
, 16);
1542 /* set m, bits 3-5 */
1543 *b0
|= (8 * ((m
- 2) / 2));
1545 /* set adata, bit 6, if associated data is used */
1548 rc
= set_msg_len(b0
+ 16 - l
,
1549 (op_type
== CHCR_DECRYPT_OP
) ?
1550 req
->cryptlen
- m
: req
->cryptlen
, l
);
1553 static inline int crypto_ccm_check_iv(const u8
*iv
)
1555 /* 2 <= L <= 8, so 1 <= L' <= 7. */
1556 if (iv
[0] < 1 || iv
[0] > 7)
1562 static int ccm_format_packet(struct aead_request
*req
,
1563 struct chcr_aead_ctx
*aeadctx
,
1564 unsigned int sub_type
,
1565 unsigned short op_type
)
1567 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
1570 if (req
->assoclen
> T5_MAX_AAD_SIZE
) {
1571 pr_err("CCM: Unsupported AAD data. It should be < %d\n",
1575 if (sub_type
== CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
) {
1577 memcpy(reqctx
->iv
+ 1, &aeadctx
->salt
[0], 3);
1578 memcpy(reqctx
->iv
+ 4, req
->iv
, 8);
1579 memset(reqctx
->iv
+ 12, 0, 4);
1580 *((unsigned short *)(reqctx
->scratch_pad
+ 16)) =
1581 htons(req
->assoclen
- 8);
1583 memcpy(reqctx
->iv
, req
->iv
, 16);
1584 *((unsigned short *)(reqctx
->scratch_pad
+ 16)) =
1585 htons(req
->assoclen
);
1587 generate_b0(req
, aeadctx
, op_type
);
1588 /* zero the ctr value */
1589 memset(reqctx
->iv
+ 15 - reqctx
->iv
[0], 0, reqctx
->iv
[0] + 1);
1593 static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu
*sec_cpl
,
1594 unsigned int dst_size
,
1595 struct aead_request
*req
,
1596 unsigned short op_type
,
1597 struct chcr_context
*chcrctx
)
1599 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1600 unsigned int ivsize
= AES_BLOCK_SIZE
;
1601 unsigned int cipher_mode
= CHCR_SCMD_CIPHER_MODE_AES_CCM
;
1602 unsigned int mac_mode
= CHCR_SCMD_AUTH_MODE_CBCMAC
;
1603 unsigned int c_id
= chcrctx
->dev
->tx_channel_id
;
1604 unsigned int ccm_xtra
;
1605 unsigned char tag_offset
= 0, auth_offset
= 0;
1606 unsigned char hmac_ctrl
= get_hmac(crypto_aead_authsize(tfm
));
1607 unsigned int assoclen
;
1609 if (get_aead_subtype(tfm
) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
)
1610 assoclen
= req
->assoclen
- 8;
1612 assoclen
= req
->assoclen
;
1613 ccm_xtra
= CCM_B0_SIZE
+
1614 ((assoclen
) ? CCM_AAD_FIELD_SIZE
: 0);
1616 auth_offset
= req
->cryptlen
?
1617 (assoclen
+ ivsize
+ 1 + ccm_xtra
) : 0;
1618 if (op_type
== CHCR_DECRYPT_OP
) {
1619 if (crypto_aead_authsize(tfm
) != req
->cryptlen
)
1620 tag_offset
= crypto_aead_authsize(tfm
);
1626 sec_cpl
->op_ivinsrtofst
= FILL_SEC_CPL_OP_IVINSR(c_id
,
1627 2, (ivsize
? (assoclen
+ 1) : 0) +
1630 htonl(assoclen
+ ivsize
+ req
->cryptlen
+ ccm_xtra
);
1631 /* For CCM there wil be b0 always. So AAD start will be 1 always */
1632 sec_cpl
->aadstart_cipherstop_hi
= FILL_SEC_CPL_CIPHERSTOP_HI(
1633 1, assoclen
+ ccm_xtra
, assoclen
1634 + ivsize
+ 1 + ccm_xtra
, 0);
1636 sec_cpl
->cipherstop_lo_authinsert
= FILL_SEC_CPL_AUTHINSERT(0,
1637 auth_offset
, tag_offset
,
1638 (op_type
== CHCR_ENCRYPT_OP
) ? 0 :
1639 crypto_aead_authsize(tfm
));
1640 sec_cpl
->seqno_numivs
= FILL_SEC_CPL_SCMD0_SEQNO(op_type
,
1641 (op_type
== CHCR_ENCRYPT_OP
) ? 0 : 1,
1642 cipher_mode
, mac_mode
, hmac_ctrl
,
1645 sec_cpl
->ivgen_hdrlen
= FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
1649 int aead_ccm_validate_input(unsigned short op_type
,
1650 struct aead_request
*req
,
1651 struct chcr_aead_ctx
*aeadctx
,
1652 unsigned int sub_type
)
1654 if (sub_type
!= CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
) {
1655 if (crypto_ccm_check_iv(req
->iv
)) {
1656 pr_err("CCM: IV check fails\n");
1660 if (req
->assoclen
!= 16 && req
->assoclen
!= 20) {
1661 pr_err("RFC4309: Invalid AAD length %d\n",
1666 if (aeadctx
->enckey_len
== 0) {
1667 pr_err("CCM: Encryption key not set\n");
1673 unsigned int fill_aead_req_fields(struct sk_buff
*skb
,
1674 struct aead_request
*req
,
1675 struct scatterlist
*src
,
1676 unsigned int ivsize
,
1677 struct chcr_aead_ctx
*aeadctx
)
1679 unsigned int frags
= 0;
1680 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1681 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
1682 /* b0 and aad length(if available) */
1684 write_buffer_to_skb(skb
, &frags
, reqctx
->scratch_pad
, CCM_B0_SIZE
+
1685 (req
->assoclen
? CCM_AAD_FIELD_SIZE
: 0));
1686 if (req
->assoclen
) {
1687 if (get_aead_subtype(tfm
) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
)
1688 write_sg_to_skb(skb
, &frags
, req
->src
,
1691 write_sg_to_skb(skb
, &frags
, req
->src
, req
->assoclen
);
1693 write_buffer_to_skb(skb
, &frags
, reqctx
->iv
, ivsize
);
1695 write_sg_to_skb(skb
, &frags
, src
, req
->cryptlen
);
1700 static struct sk_buff
*create_aead_ccm_wr(struct aead_request
*req
,
1703 unsigned short op_type
)
1705 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1706 struct chcr_context
*ctx
= crypto_aead_ctx(tfm
);
1707 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
1708 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
1709 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
1710 struct sk_buff
*skb
= NULL
;
1711 struct chcr_wr
*chcr_req
;
1712 struct cpl_rx_phys_dsgl
*phys_cpl
;
1713 struct phys_sge_parm sg_param
;
1714 struct scatterlist
*src
, *dst
;
1715 struct scatterlist src_sg
[2], dst_sg
[2];
1716 unsigned int frags
= 0, transhdr_len
, ivsize
= AES_BLOCK_SIZE
;
1717 unsigned int dst_size
= 0, kctx_len
;
1718 unsigned int sub_type
;
1719 unsigned int authsize
= crypto_aead_authsize(tfm
);
1721 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
1725 if (op_type
&& req
->cryptlen
< crypto_aead_authsize(tfm
))
1728 if (sg_nents_for_len(req
->src
, req
->assoclen
+ req
->cryptlen
) < 0)
1730 sub_type
= get_aead_subtype(tfm
);
1731 src
= scatterwalk_ffwd(src_sg
, req
->src
, req
->assoclen
);
1733 if (req
->src
!= req
->dst
) {
1734 err
= chcr_copy_assoc(req
, aeadctx
);
1736 pr_err("AAD copy to destination buffer fails\n");
1737 return ERR_PTR(err
);
1739 dst
= scatterwalk_ffwd(dst_sg
, req
->dst
, req
->assoclen
);
1741 reqctx
->dst_nents
= sg_nents_for_len(dst
, req
->cryptlen
+
1742 (op_type
? -authsize
: authsize
));
1743 if (reqctx
->dst_nents
<= 0) {
1744 pr_err("CCM:Invalid Destination sg entries\n");
1749 if (aead_ccm_validate_input(op_type
, req
, aeadctx
, sub_type
))
1752 dst_size
= get_space_for_phys_dsgl(reqctx
->dst_nents
);
1753 kctx_len
= ((DIV_ROUND_UP(aeadctx
->enckey_len
, 16)) << 4) * 2;
1754 transhdr_len
= CIPHER_TRANSHDR_SIZE(kctx_len
, dst_size
);
1755 skb
= alloc_skb((transhdr_len
+ sizeof(struct sge_opaque_hdr
)), flags
);
1760 skb_reserve(skb
, sizeof(struct sge_opaque_hdr
));
1762 chcr_req
= (struct chcr_wr
*) __skb_put(skb
, transhdr_len
);
1763 memset(chcr_req
, 0, transhdr_len
);
1765 fill_sec_cpl_for_aead(&chcr_req
->sec_cpl
, dst_size
, req
, op_type
, ctx
);
1767 chcr_req
->key_ctx
.ctx_hdr
= aeadctx
->key_ctx_hdr
;
1768 memcpy(chcr_req
->key_ctx
.key
, aeadctx
->key
, aeadctx
->enckey_len
);
1769 memcpy(chcr_req
->key_ctx
.key
+ (DIV_ROUND_UP(aeadctx
->enckey_len
, 16) *
1770 16), aeadctx
->key
, aeadctx
->enckey_len
);
1772 phys_cpl
= (struct cpl_rx_phys_dsgl
*)((u8
*)(chcr_req
+ 1) + kctx_len
);
1773 if (ccm_format_packet(req
, aeadctx
, sub_type
, op_type
))
1776 sg_param
.nents
= reqctx
->dst_nents
;
1777 sg_param
.obsize
= req
->cryptlen
+ (op_type
? -authsize
: authsize
);
1780 if (map_writesg_phys_cpl(&u_ctx
->lldi
.pdev
->dev
, phys_cpl
, dst
,
1784 skb_set_transport_header(skb
, transhdr_len
);
1785 frags
= fill_aead_req_fields(skb
, req
, src
, ivsize
, aeadctx
);
1786 create_wreq(ctx
, chcr_req
, req
, skb
, kctx_len
, 0, 1,
1787 sizeof(struct cpl_rx_phys_dsgl
) + dst_size
);
1795 return ERR_PTR(-EINVAL
);
1798 static struct sk_buff
*create_gcm_wr(struct aead_request
*req
,
1801 unsigned short op_type
)
1803 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1804 struct chcr_context
*ctx
= crypto_aead_ctx(tfm
);
1805 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
1806 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
1807 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
1808 struct sk_buff
*skb
= NULL
;
1809 struct chcr_wr
*chcr_req
;
1810 struct cpl_rx_phys_dsgl
*phys_cpl
;
1811 struct phys_sge_parm sg_param
;
1812 struct scatterlist
*src
, *dst
;
1813 struct scatterlist src_sg
[2], dst_sg
[2];
1814 unsigned int frags
= 0, transhdr_len
;
1815 unsigned int ivsize
= AES_BLOCK_SIZE
;
1816 unsigned int dst_size
= 0, kctx_len
;
1817 unsigned char tag_offset
= 0;
1818 unsigned int crypt_len
= 0;
1819 unsigned int authsize
= crypto_aead_authsize(tfm
);
1820 unsigned char hmac_ctrl
= get_hmac(authsize
);
1822 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
1825 /* validate key size */
1826 if (aeadctx
->enckey_len
== 0)
1829 if (op_type
&& req
->cryptlen
< crypto_aead_authsize(tfm
))
1832 if (sg_nents_for_len(req
->src
, req
->assoclen
+ req
->cryptlen
) < 0)
1835 src
= scatterwalk_ffwd(src_sg
, req
->src
, req
->assoclen
);
1837 if (req
->src
!= req
->dst
) {
1838 err
= chcr_copy_assoc(req
, aeadctx
);
1840 return ERR_PTR(err
);
1841 dst
= scatterwalk_ffwd(dst_sg
, req
->dst
, req
->assoclen
);
1845 /* null-payload is not supported in the hardware.
1846 * software is sending block size
1848 crypt_len
= AES_BLOCK_SIZE
;
1850 crypt_len
= req
->cryptlen
;
1851 reqctx
->dst_nents
= sg_nents_for_len(dst
, req
->cryptlen
+
1852 (op_type
? -authsize
: authsize
));
1853 if (reqctx
->dst_nents
<= 0) {
1854 pr_err("GCM:Invalid Destination sg entries\n");
1859 dst_size
= get_space_for_phys_dsgl(reqctx
->dst_nents
);
1860 kctx_len
= ((DIV_ROUND_UP(aeadctx
->enckey_len
, 16)) << 4) +
1862 transhdr_len
= CIPHER_TRANSHDR_SIZE(kctx_len
, dst_size
);
1863 skb
= alloc_skb((transhdr_len
+ sizeof(struct sge_opaque_hdr
)), flags
);
1867 /* NIC driver is going to write the sge hdr. */
1868 skb_reserve(skb
, sizeof(struct sge_opaque_hdr
));
1870 chcr_req
= (struct chcr_wr
*)__skb_put(skb
, transhdr_len
);
1871 memset(chcr_req
, 0, transhdr_len
);
1873 if (get_aead_subtype(tfm
) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
)
1876 tag_offset
= (op_type
== CHCR_ENCRYPT_OP
) ? 0 : authsize
;
1877 chcr_req
->sec_cpl
.op_ivinsrtofst
= FILL_SEC_CPL_OP_IVINSR(
1878 ctx
->dev
->tx_channel_id
, 2, (ivsize
?
1879 (req
->assoclen
+ 1) : 0));
1880 chcr_req
->sec_cpl
.pldlen
= htonl(req
->assoclen
+ ivsize
+ crypt_len
);
1881 chcr_req
->sec_cpl
.aadstart_cipherstop_hi
= FILL_SEC_CPL_CIPHERSTOP_HI(
1882 req
->assoclen
? 1 : 0, req
->assoclen
,
1883 req
->assoclen
+ ivsize
+ 1, 0);
1884 if (req
->cryptlen
) {
1885 chcr_req
->sec_cpl
.cipherstop_lo_authinsert
=
1886 FILL_SEC_CPL_AUTHINSERT(0, req
->assoclen
+ ivsize
+ 1,
1887 tag_offset
, tag_offset
);
1888 chcr_req
->sec_cpl
.seqno_numivs
=
1889 FILL_SEC_CPL_SCMD0_SEQNO(op_type
, (op_type
==
1890 CHCR_ENCRYPT_OP
) ? 1 : 0,
1891 CHCR_SCMD_CIPHER_MODE_AES_GCM
,
1892 CHCR_SCMD_AUTH_MODE_GHASH
, hmac_ctrl
,
1895 chcr_req
->sec_cpl
.cipherstop_lo_authinsert
=
1896 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
1897 chcr_req
->sec_cpl
.seqno_numivs
=
1898 FILL_SEC_CPL_SCMD0_SEQNO(op_type
,
1899 (op_type
== CHCR_ENCRYPT_OP
) ?
1900 1 : 0, CHCR_SCMD_CIPHER_MODE_AES_CBC
,
1903 chcr_req
->sec_cpl
.ivgen_hdrlen
= FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1905 chcr_req
->key_ctx
.ctx_hdr
= aeadctx
->key_ctx_hdr
;
1906 memcpy(chcr_req
->key_ctx
.key
, aeadctx
->key
, aeadctx
->enckey_len
);
1907 memcpy(chcr_req
->key_ctx
.key
+ (DIV_ROUND_UP(aeadctx
->enckey_len
, 16) *
1908 16), GCM_CTX(aeadctx
)->ghash_h
, AEAD_H_SIZE
);
1910 /* prepare a 16 byte iv */
1911 /* S A L T | IV | 0x00000001 */
1912 if (get_aead_subtype(tfm
) ==
1913 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
) {
1914 memcpy(reqctx
->iv
, aeadctx
->salt
, 4);
1915 memcpy(reqctx
->iv
+ 4, req
->iv
, 8);
1917 memcpy(reqctx
->iv
, req
->iv
, 12);
1919 *((unsigned int *)(reqctx
->iv
+ 12)) = htonl(0x01);
1921 phys_cpl
= (struct cpl_rx_phys_dsgl
*)((u8
*)(chcr_req
+ 1) + kctx_len
);
1922 sg_param
.nents
= reqctx
->dst_nents
;
1923 sg_param
.obsize
= req
->cryptlen
+ (op_type
? -authsize
: authsize
);
1926 if (map_writesg_phys_cpl(&u_ctx
->lldi
.pdev
->dev
, phys_cpl
, dst
,
1930 skb_set_transport_header(skb
, transhdr_len
);
1932 write_sg_to_skb(skb
, &frags
, req
->src
, req
->assoclen
);
1934 write_buffer_to_skb(skb
, &frags
, reqctx
->iv
, ivsize
);
1936 if (req
->cryptlen
) {
1937 write_sg_to_skb(skb
, &frags
, src
, req
->cryptlen
);
1939 aes_gcm_empty_pld_pad(req
->dst
, authsize
- 1);
1940 write_sg_to_skb(skb
, &frags
, dst
, crypt_len
);
1943 create_wreq(ctx
, chcr_req
, req
, skb
, kctx_len
, size
, 1,
1944 sizeof(struct cpl_rx_phys_dsgl
) + dst_size
);
1959 static int chcr_aead_cra_init(struct crypto_aead
*tfm
)
1961 struct chcr_context
*ctx
= crypto_aead_ctx(tfm
);
1962 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
1964 crypto_aead_set_reqsize(tfm
, sizeof(struct chcr_aead_reqctx
));
1965 aeadctx
->null
= crypto_get_default_null_skcipher();
1966 if (IS_ERR(aeadctx
->null
))
1967 return PTR_ERR(aeadctx
->null
);
1968 return chcr_device_init(ctx
);
1971 static void chcr_aead_cra_exit(struct crypto_aead
*tfm
)
1973 crypto_put_default_null_skcipher();
1976 static int chcr_authenc_null_setauthsize(struct crypto_aead
*tfm
,
1977 unsigned int authsize
)
1979 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(crypto_aead_ctx(tfm
));
1981 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NOP
;
1982 aeadctx
->mayverify
= VERIFY_HW
;
1985 static int chcr_authenc_setauthsize(struct crypto_aead
*tfm
,
1986 unsigned int authsize
)
1988 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(crypto_aead_ctx(tfm
));
1989 u32 maxauth
= crypto_aead_maxauthsize(tfm
);
1991 /*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
1992 * true for sha1. authsize == 12 condition should be before
1993 * authsize == (maxauth >> 1)
1995 if (authsize
== ICV_4
) {
1996 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL1
;
1997 aeadctx
->mayverify
= VERIFY_HW
;
1998 } else if (authsize
== ICV_6
) {
1999 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL2
;
2000 aeadctx
->mayverify
= VERIFY_HW
;
2001 } else if (authsize
== ICV_10
) {
2002 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366
;
2003 aeadctx
->mayverify
= VERIFY_HW
;
2004 } else if (authsize
== ICV_12
) {
2005 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT
;
2006 aeadctx
->mayverify
= VERIFY_HW
;
2007 } else if (authsize
== ICV_14
) {
2008 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL3
;
2009 aeadctx
->mayverify
= VERIFY_HW
;
2010 } else if (authsize
== (maxauth
>> 1)) {
2011 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_DIV2
;
2012 aeadctx
->mayverify
= VERIFY_HW
;
2013 } else if (authsize
== maxauth
) {
2014 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
2015 aeadctx
->mayverify
= VERIFY_HW
;
2017 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
2018 aeadctx
->mayverify
= VERIFY_SW
;
2024 static int chcr_gcm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
2026 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(crypto_aead_ctx(tfm
));
2030 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL1
;
2031 aeadctx
->mayverify
= VERIFY_HW
;
2034 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_DIV2
;
2035 aeadctx
->mayverify
= VERIFY_HW
;
2038 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT
;
2039 aeadctx
->mayverify
= VERIFY_HW
;
2042 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL3
;
2043 aeadctx
->mayverify
= VERIFY_HW
;
2046 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
2047 aeadctx
->mayverify
= VERIFY_HW
;
2051 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
2052 aeadctx
->mayverify
= VERIFY_SW
;
2056 crypto_tfm_set_flags((struct crypto_tfm
*) tfm
,
2057 CRYPTO_TFM_RES_BAD_KEY_LEN
);
2063 static int chcr_4106_4309_setauthsize(struct crypto_aead
*tfm
,
2064 unsigned int authsize
)
2066 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(crypto_aead_ctx(tfm
));
2070 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_DIV2
;
2071 aeadctx
->mayverify
= VERIFY_HW
;
2074 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT
;
2075 aeadctx
->mayverify
= VERIFY_HW
;
2078 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
2079 aeadctx
->mayverify
= VERIFY_HW
;
2082 crypto_tfm_set_flags((struct crypto_tfm
*)tfm
,
2083 CRYPTO_TFM_RES_BAD_KEY_LEN
);
2089 static int chcr_ccm_setauthsize(struct crypto_aead
*tfm
,
2090 unsigned int authsize
)
2092 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(crypto_aead_ctx(tfm
));
2096 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL1
;
2097 aeadctx
->mayverify
= VERIFY_HW
;
2100 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL2
;
2101 aeadctx
->mayverify
= VERIFY_HW
;
2104 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_DIV2
;
2105 aeadctx
->mayverify
= VERIFY_HW
;
2108 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366
;
2109 aeadctx
->mayverify
= VERIFY_HW
;
2112 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT
;
2113 aeadctx
->mayverify
= VERIFY_HW
;
2116 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_PL3
;
2117 aeadctx
->mayverify
= VERIFY_HW
;
2120 aeadctx
->hmac_ctrl
= CHCR_SCMD_HMAC_CTRL_NO_TRUNC
;
2121 aeadctx
->mayverify
= VERIFY_HW
;
2124 crypto_tfm_set_flags((struct crypto_tfm
*)tfm
,
2125 CRYPTO_TFM_RES_BAD_KEY_LEN
);
2131 static int chcr_aead_ccm_setkey(struct crypto_aead
*aead
,
2133 unsigned int keylen
)
2135 struct chcr_context
*ctx
= crypto_aead_ctx(aead
);
2136 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
2137 unsigned char ck_size
, mk_size
;
2138 int key_ctx_size
= 0;
2140 memcpy(aeadctx
->key
, key
, keylen
);
2141 aeadctx
->enckey_len
= keylen
;
2142 key_ctx_size
= sizeof(struct _key_ctx
) +
2143 ((DIV_ROUND_UP(keylen
, 16)) << 4) * 2;
2144 if (keylen
== AES_KEYSIZE_128
) {
2145 mk_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_128
;
2146 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_128
;
2147 } else if (keylen
== AES_KEYSIZE_192
) {
2148 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_192
;
2149 mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_192
;
2150 } else if (keylen
== AES_KEYSIZE_256
) {
2151 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_256
;
2152 mk_size
= CHCR_KEYCTX_MAC_KEY_SIZE_256
;
2154 crypto_tfm_set_flags((struct crypto_tfm
*)aead
,
2155 CRYPTO_TFM_RES_BAD_KEY_LEN
);
2156 aeadctx
->enckey_len
= 0;
2159 aeadctx
->key_ctx_hdr
= FILL_KEY_CTX_HDR(ck_size
, mk_size
, 0, 0,
2164 static int chcr_aead_rfc4309_setkey(struct crypto_aead
*aead
, const u8
*key
,
2165 unsigned int keylen
)
2167 struct chcr_context
*ctx
= crypto_aead_ctx(aead
);
2168 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
2171 crypto_tfm_set_flags((struct crypto_tfm
*)aead
,
2172 CRYPTO_TFM_RES_BAD_KEY_LEN
);
2173 aeadctx
->enckey_len
= 0;
2177 memcpy(aeadctx
->salt
, key
+ keylen
, 3);
2178 return chcr_aead_ccm_setkey(aead
, key
, keylen
);
2181 static int chcr_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
2182 unsigned int keylen
)
2184 struct chcr_context
*ctx
= crypto_aead_ctx(aead
);
2185 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
2186 struct chcr_gcm_ctx
*gctx
= GCM_CTX(aeadctx
);
2187 struct blkcipher_desc h_desc
;
2188 struct scatterlist src
[1];
2189 unsigned int ck_size
;
2190 int ret
= 0, key_ctx_size
= 0;
2192 if (get_aead_subtype(aead
) ==
2193 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
) {
2194 keylen
-= 4; /* nonce/salt is present in the last 4 bytes */
2195 memcpy(aeadctx
->salt
, key
+ keylen
, 4);
2197 if (keylen
== AES_KEYSIZE_128
) {
2198 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_128
;
2199 } else if (keylen
== AES_KEYSIZE_192
) {
2200 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_192
;
2201 } else if (keylen
== AES_KEYSIZE_256
) {
2202 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_256
;
2204 crypto_tfm_set_flags((struct crypto_tfm
*)aead
,
2205 CRYPTO_TFM_RES_BAD_KEY_LEN
);
2206 aeadctx
->enckey_len
= 0;
2207 pr_err("GCM: Invalid key length %d", keylen
);
2212 memcpy(aeadctx
->key
, key
, keylen
);
2213 aeadctx
->enckey_len
= keylen
;
2214 key_ctx_size
= sizeof(struct _key_ctx
) +
2215 ((DIV_ROUND_UP(keylen
, 16)) << 4) +
2217 aeadctx
->key_ctx_hdr
= FILL_KEY_CTX_HDR(ck_size
,
2218 CHCR_KEYCTX_MAC_KEY_SIZE_128
,
2221 /* Calculate the H = CIPH(K, 0 repeated 16 times) using sync aes
2222 * blkcipher It will go on key context
2224 h_desc
.tfm
= crypto_alloc_blkcipher("cbc(aes-generic)", 0, 0);
2225 if (IS_ERR(h_desc
.tfm
)) {
2226 aeadctx
->enckey_len
= 0;
2231 ret
= crypto_blkcipher_setkey(h_desc
.tfm
, key
, keylen
);
2233 aeadctx
->enckey_len
= 0;
2236 memset(gctx
->ghash_h
, 0, AEAD_H_SIZE
);
2237 sg_init_one(&src
[0], gctx
->ghash_h
, AEAD_H_SIZE
);
2238 ret
= crypto_blkcipher_encrypt(&h_desc
, &src
[0], &src
[0], AEAD_H_SIZE
);
2241 crypto_free_blkcipher(h_desc
.tfm
);
2246 static int chcr_authenc_setkey(struct crypto_aead
*authenc
, const u8
*key
,
2247 unsigned int keylen
)
2249 struct chcr_context
*ctx
= crypto_aead_ctx(authenc
);
2250 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
2251 struct chcr_authenc_ctx
*actx
= AUTHENC_CTX(aeadctx
);
2252 /* it contains auth and cipher key both*/
2253 struct crypto_authenc_keys keys
;
2255 unsigned int max_authsize
= crypto_aead_alg(authenc
)->maxauthsize
;
2256 int err
= 0, i
, key_ctx_len
= 0;
2257 unsigned char ck_size
= 0;
2258 unsigned char pad
[CHCR_HASH_MAX_BLOCK_SIZE_128
] = { 0 };
2259 struct crypto_shash
*base_hash
= NULL
;
2260 struct algo_param param
;
2264 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0) {
2265 crypto_aead_set_flags(authenc
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
2269 if (get_alg_config(¶m
, max_authsize
)) {
2270 pr_err("chcr : Unsupported digest size\n");
2273 if (keys
.enckeylen
== AES_KEYSIZE_128
) {
2274 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_128
;
2275 } else if (keys
.enckeylen
== AES_KEYSIZE_192
) {
2276 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_192
;
2277 } else if (keys
.enckeylen
== AES_KEYSIZE_256
) {
2278 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_256
;
2280 pr_err("chcr : Unsupported cipher key\n");
2284 /* Copy only encryption key. We use authkey to generate h(ipad) and
2285 * h(opad) so authkey is not needed again. authkeylen size have the
2286 * size of the hash digest size.
2288 memcpy(aeadctx
->key
, keys
.enckey
, keys
.enckeylen
);
2289 aeadctx
->enckey_len
= keys
.enckeylen
;
2290 get_aes_decrypt_key(actx
->dec_rrkey
, aeadctx
->key
,
2291 aeadctx
->enckey_len
<< 3);
2293 base_hash
= chcr_alloc_shash(max_authsize
);
2294 if (IS_ERR(base_hash
)) {
2295 pr_err("chcr : Base driver cannot be loaded\n");
2299 SHASH_DESC_ON_STACK(shash
, base_hash
);
2300 shash
->tfm
= base_hash
;
2301 shash
->flags
= crypto_shash_get_flags(base_hash
);
2302 bs
= crypto_shash_blocksize(base_hash
);
2303 align
= KEYCTX_ALIGN_PAD(max_authsize
);
2304 o_ptr
= actx
->h_iopad
+ param
.result_size
+ align
;
2306 if (keys
.authkeylen
> bs
) {
2307 err
= crypto_shash_digest(shash
, keys
.authkey
,
2311 pr_err("chcr : Base driver cannot be loaded\n");
2314 keys
.authkeylen
= max_authsize
;
2316 memcpy(o_ptr
, keys
.authkey
, keys
.authkeylen
);
2318 /* Compute the ipad-digest*/
2319 memset(pad
+ keys
.authkeylen
, 0, bs
- keys
.authkeylen
);
2320 memcpy(pad
, o_ptr
, keys
.authkeylen
);
2321 for (i
= 0; i
< bs
>> 2; i
++)
2322 *((unsigned int *)pad
+ i
) ^= IPAD_DATA
;
2324 if (chcr_compute_partial_hash(shash
, pad
, actx
->h_iopad
,
2327 /* Compute the opad-digest */
2328 memset(pad
+ keys
.authkeylen
, 0, bs
- keys
.authkeylen
);
2329 memcpy(pad
, o_ptr
, keys
.authkeylen
);
2330 for (i
= 0; i
< bs
>> 2; i
++)
2331 *((unsigned int *)pad
+ i
) ^= OPAD_DATA
;
2333 if (chcr_compute_partial_hash(shash
, pad
, o_ptr
, max_authsize
))
2336 /* convert the ipad and opad digest to network order */
2337 chcr_change_order(actx
->h_iopad
, param
.result_size
);
2338 chcr_change_order(o_ptr
, param
.result_size
);
2339 key_ctx_len
= sizeof(struct _key_ctx
) +
2340 ((DIV_ROUND_UP(keys
.enckeylen
, 16)) << 4) +
2341 (param
.result_size
+ align
) * 2;
2342 aeadctx
->key_ctx_hdr
= FILL_KEY_CTX_HDR(ck_size
, param
.mk_size
,
2343 0, 1, key_ctx_len
>> 4);
2344 actx
->auth_mode
= param
.auth_mode
;
2345 chcr_free_shash(base_hash
);
2350 aeadctx
->enckey_len
= 0;
2352 chcr_free_shash(base_hash
);
2356 static int chcr_aead_digest_null_setkey(struct crypto_aead
*authenc
,
2357 const u8
*key
, unsigned int keylen
)
2359 struct chcr_context
*ctx
= crypto_aead_ctx(authenc
);
2360 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(ctx
);
2361 struct chcr_authenc_ctx
*actx
= AUTHENC_CTX(aeadctx
);
2362 struct crypto_authenc_keys keys
;
2364 /* it contains auth and cipher key both*/
2365 int key_ctx_len
= 0;
2366 unsigned char ck_size
= 0;
2368 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0) {
2369 crypto_aead_set_flags(authenc
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
2372 if (keys
.enckeylen
== AES_KEYSIZE_128
) {
2373 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_128
;
2374 } else if (keys
.enckeylen
== AES_KEYSIZE_192
) {
2375 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_192
;
2376 } else if (keys
.enckeylen
== AES_KEYSIZE_256
) {
2377 ck_size
= CHCR_KEYCTX_CIPHER_KEY_SIZE_256
;
2379 pr_err("chcr : Unsupported cipher key\n");
2382 memcpy(aeadctx
->key
, keys
.enckey
, keys
.enckeylen
);
2383 aeadctx
->enckey_len
= keys
.enckeylen
;
2384 get_aes_decrypt_key(actx
->dec_rrkey
, aeadctx
->key
,
2385 aeadctx
->enckey_len
<< 3);
2386 key_ctx_len
= sizeof(struct _key_ctx
)
2387 + ((DIV_ROUND_UP(keys
.enckeylen
, 16)) << 4);
2389 aeadctx
->key_ctx_hdr
= FILL_KEY_CTX_HDR(ck_size
, CHCR_KEYCTX_NO_KEY
, 0,
2390 0, key_ctx_len
>> 4);
2391 actx
->auth_mode
= CHCR_SCMD_AUTH_MODE_NOP
;
2394 aeadctx
->enckey_len
= 0;
2397 static int chcr_aead_encrypt(struct aead_request
*req
)
2399 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2400 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
2402 reqctx
->verify
= VERIFY_HW
;
2404 switch (get_aead_subtype(tfm
)) {
2405 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
:
2406 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL
:
2407 return chcr_aead_op(req
, CHCR_ENCRYPT_OP
, 0,
2409 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM
:
2410 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
:
2411 return chcr_aead_op(req
, CHCR_ENCRYPT_OP
, 0,
2412 create_aead_ccm_wr
);
2414 return chcr_aead_op(req
, CHCR_ENCRYPT_OP
, 0,
2419 static int chcr_aead_decrypt(struct aead_request
*req
)
2421 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2422 struct chcr_aead_ctx
*aeadctx
= AEAD_CTX(crypto_aead_ctx(tfm
));
2423 struct chcr_aead_reqctx
*reqctx
= aead_request_ctx(req
);
2426 if (aeadctx
->mayverify
== VERIFY_SW
) {
2427 size
= crypto_aead_maxauthsize(tfm
);
2428 reqctx
->verify
= VERIFY_SW
;
2431 reqctx
->verify
= VERIFY_HW
;
2434 switch (get_aead_subtype(tfm
)) {
2435 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
:
2436 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL
:
2437 return chcr_aead_op(req
, CHCR_DECRYPT_OP
, size
,
2439 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM
:
2440 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
:
2441 return chcr_aead_op(req
, CHCR_DECRYPT_OP
, size
,
2442 create_aead_ccm_wr
);
2444 return chcr_aead_op(req
, CHCR_DECRYPT_OP
, size
,
2449 static int chcr_aead_op(struct aead_request
*req
,
2450 unsigned short op_type
,
2452 create_wr_t create_wr_fn
)
2454 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2455 struct chcr_context
*ctx
= crypto_aead_ctx(tfm
);
2456 struct uld_ctx
*u_ctx
= ULD_CTX(ctx
);
2457 struct sk_buff
*skb
;
2459 if (ctx
&& !ctx
->dev
) {
2460 pr_err("chcr : %s : No crypto device.\n", __func__
);
2463 if (cxgb4_is_crypto_q_full(u_ctx
->lldi
.ports
[0],
2464 ctx
->tx_channel_id
)) {
2465 if (!(req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
))
2469 /* Form a WR from req */
2470 skb
= create_wr_fn(req
, u_ctx
->lldi
.rxq_ids
[ctx
->tx_channel_id
], size
,
2473 if (IS_ERR(skb
) || skb
== NULL
) {
2474 pr_err("chcr : %s : failed to form WR. No memory\n", __func__
);
2475 return PTR_ERR(skb
);
2478 skb
->dev
= u_ctx
->lldi
.ports
[0];
2479 set_wr_txq(skb
, CPL_PRIORITY_DATA
, ctx
->tx_channel_id
);
2481 return -EINPROGRESS
;
2483 static struct chcr_alg_template driver_algs
[] = {
2486 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
2489 .cra_name
= "cbc(aes)",
2490 .cra_driver_name
= "cbc-aes-chcr",
2491 .cra_priority
= CHCR_CRA_PRIORITY
,
2492 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
2494 .cra_blocksize
= AES_BLOCK_SIZE
,
2495 .cra_ctxsize
= sizeof(struct chcr_context
)
2496 + sizeof(struct ablk_ctx
),
2498 .cra_type
= &crypto_ablkcipher_type
,
2499 .cra_module
= THIS_MODULE
,
2500 .cra_init
= chcr_cra_init
,
2502 .cra_u
.ablkcipher
= {
2503 .min_keysize
= AES_MIN_KEY_SIZE
,
2504 .max_keysize
= AES_MAX_KEY_SIZE
,
2505 .ivsize
= AES_BLOCK_SIZE
,
2506 .setkey
= chcr_aes_cbc_setkey
,
2507 .encrypt
= chcr_aes_encrypt
,
2508 .decrypt
= chcr_aes_decrypt
,
2513 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
2516 .cra_name
= "xts(aes)",
2517 .cra_driver_name
= "xts-aes-chcr",
2518 .cra_priority
= CHCR_CRA_PRIORITY
,
2519 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
2521 .cra_blocksize
= AES_BLOCK_SIZE
,
2522 .cra_ctxsize
= sizeof(struct chcr_context
) +
2523 sizeof(struct ablk_ctx
),
2525 .cra_type
= &crypto_ablkcipher_type
,
2526 .cra_module
= THIS_MODULE
,
2527 .cra_init
= chcr_cra_init
,
2531 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
2532 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
2533 .ivsize
= AES_BLOCK_SIZE
,
2534 .setkey
= chcr_aes_xts_setkey
,
2535 .encrypt
= chcr_aes_encrypt
,
2536 .decrypt
= chcr_aes_decrypt
,
2543 .type
= CRYPTO_ALG_TYPE_AHASH
,
2546 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2549 .cra_driver_name
= "sha1-chcr",
2550 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2555 .type
= CRYPTO_ALG_TYPE_AHASH
,
2558 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2560 .cra_name
= "sha256",
2561 .cra_driver_name
= "sha256-chcr",
2562 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2567 .type
= CRYPTO_ALG_TYPE_AHASH
,
2570 .halg
.digestsize
= SHA224_DIGEST_SIZE
,
2572 .cra_name
= "sha224",
2573 .cra_driver_name
= "sha224-chcr",
2574 .cra_blocksize
= SHA224_BLOCK_SIZE
,
2579 .type
= CRYPTO_ALG_TYPE_AHASH
,
2582 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
2584 .cra_name
= "sha384",
2585 .cra_driver_name
= "sha384-chcr",
2586 .cra_blocksize
= SHA384_BLOCK_SIZE
,
2591 .type
= CRYPTO_ALG_TYPE_AHASH
,
2594 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2596 .cra_name
= "sha512",
2597 .cra_driver_name
= "sha512-chcr",
2598 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2604 .type
= CRYPTO_ALG_TYPE_HMAC
,
2607 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2609 .cra_name
= "hmac(sha1)",
2610 .cra_driver_name
= "hmac-sha1-chcr",
2611 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2616 .type
= CRYPTO_ALG_TYPE_HMAC
,
2619 .halg
.digestsize
= SHA224_DIGEST_SIZE
,
2621 .cra_name
= "hmac(sha224)",
2622 .cra_driver_name
= "hmac-sha224-chcr",
2623 .cra_blocksize
= SHA224_BLOCK_SIZE
,
2628 .type
= CRYPTO_ALG_TYPE_HMAC
,
2631 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2633 .cra_name
= "hmac(sha256)",
2634 .cra_driver_name
= "hmac-sha256-chcr",
2635 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2640 .type
= CRYPTO_ALG_TYPE_HMAC
,
2643 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
2645 .cra_name
= "hmac(sha384)",
2646 .cra_driver_name
= "hmac-sha384-chcr",
2647 .cra_blocksize
= SHA384_BLOCK_SIZE
,
2652 .type
= CRYPTO_ALG_TYPE_HMAC
,
2655 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2657 .cra_name
= "hmac(sha512)",
2658 .cra_driver_name
= "hmac-sha512-chcr",
2659 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2663 /* Add AEAD Algorithms */
2665 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_GCM
,
2669 .cra_name
= "gcm(aes)",
2670 .cra_driver_name
= "gcm-aes-chcr",
2672 .cra_ctxsize
= sizeof(struct chcr_context
) +
2673 sizeof(struct chcr_aead_ctx
) +
2674 sizeof(struct chcr_gcm_ctx
),
2677 .maxauthsize
= GHASH_DIGEST_SIZE
,
2678 .setkey
= chcr_gcm_setkey
,
2679 .setauthsize
= chcr_gcm_setauthsize
,
2683 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
,
2687 .cra_name
= "rfc4106(gcm(aes))",
2688 .cra_driver_name
= "rfc4106-gcm-aes-chcr",
2690 .cra_ctxsize
= sizeof(struct chcr_context
) +
2691 sizeof(struct chcr_aead_ctx
) +
2692 sizeof(struct chcr_gcm_ctx
),
2696 .maxauthsize
= GHASH_DIGEST_SIZE
,
2697 .setkey
= chcr_gcm_setkey
,
2698 .setauthsize
= chcr_4106_4309_setauthsize
,
2702 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_CCM
,
2706 .cra_name
= "ccm(aes)",
2707 .cra_driver_name
= "ccm-aes-chcr",
2709 .cra_ctxsize
= sizeof(struct chcr_context
) +
2710 sizeof(struct chcr_aead_ctx
),
2713 .ivsize
= AES_BLOCK_SIZE
,
2714 .maxauthsize
= GHASH_DIGEST_SIZE
,
2715 .setkey
= chcr_aead_ccm_setkey
,
2716 .setauthsize
= chcr_ccm_setauthsize
,
2720 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309
,
2724 .cra_name
= "rfc4309(ccm(aes))",
2725 .cra_driver_name
= "rfc4309-ccm-aes-chcr",
2727 .cra_ctxsize
= sizeof(struct chcr_context
) +
2728 sizeof(struct chcr_aead_ctx
),
2732 .maxauthsize
= GHASH_DIGEST_SIZE
,
2733 .setkey
= chcr_aead_rfc4309_setkey
,
2734 .setauthsize
= chcr_4106_4309_setauthsize
,
2738 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
,
2742 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2744 "authenc-hmac-sha1-cbc-aes-chcr",
2745 .cra_blocksize
= AES_BLOCK_SIZE
,
2746 .cra_ctxsize
= sizeof(struct chcr_context
) +
2747 sizeof(struct chcr_aead_ctx
) +
2748 sizeof(struct chcr_authenc_ctx
),
2751 .ivsize
= AES_BLOCK_SIZE
,
2752 .maxauthsize
= SHA1_DIGEST_SIZE
,
2753 .setkey
= chcr_authenc_setkey
,
2754 .setauthsize
= chcr_authenc_setauthsize
,
2758 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
,
2763 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2765 "authenc-hmac-sha256-cbc-aes-chcr",
2766 .cra_blocksize
= AES_BLOCK_SIZE
,
2767 .cra_ctxsize
= sizeof(struct chcr_context
) +
2768 sizeof(struct chcr_aead_ctx
) +
2769 sizeof(struct chcr_authenc_ctx
),
2772 .ivsize
= AES_BLOCK_SIZE
,
2773 .maxauthsize
= SHA256_DIGEST_SIZE
,
2774 .setkey
= chcr_authenc_setkey
,
2775 .setauthsize
= chcr_authenc_setauthsize
,
2779 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
,
2783 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2785 "authenc-hmac-sha224-cbc-aes-chcr",
2786 .cra_blocksize
= AES_BLOCK_SIZE
,
2787 .cra_ctxsize
= sizeof(struct chcr_context
) +
2788 sizeof(struct chcr_aead_ctx
) +
2789 sizeof(struct chcr_authenc_ctx
),
2791 .ivsize
= AES_BLOCK_SIZE
,
2792 .maxauthsize
= SHA224_DIGEST_SIZE
,
2793 .setkey
= chcr_authenc_setkey
,
2794 .setauthsize
= chcr_authenc_setauthsize
,
2798 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
,
2802 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2804 "authenc-hmac-sha384-cbc-aes-chcr",
2805 .cra_blocksize
= AES_BLOCK_SIZE
,
2806 .cra_ctxsize
= sizeof(struct chcr_context
) +
2807 sizeof(struct chcr_aead_ctx
) +
2808 sizeof(struct chcr_authenc_ctx
),
2811 .ivsize
= AES_BLOCK_SIZE
,
2812 .maxauthsize
= SHA384_DIGEST_SIZE
,
2813 .setkey
= chcr_authenc_setkey
,
2814 .setauthsize
= chcr_authenc_setauthsize
,
2818 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC
,
2822 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2824 "authenc-hmac-sha512-cbc-aes-chcr",
2825 .cra_blocksize
= AES_BLOCK_SIZE
,
2826 .cra_ctxsize
= sizeof(struct chcr_context
) +
2827 sizeof(struct chcr_aead_ctx
) +
2828 sizeof(struct chcr_authenc_ctx
),
2831 .ivsize
= AES_BLOCK_SIZE
,
2832 .maxauthsize
= SHA512_DIGEST_SIZE
,
2833 .setkey
= chcr_authenc_setkey
,
2834 .setauthsize
= chcr_authenc_setauthsize
,
2838 .type
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_SUB_TYPE_AEAD_NULL
,
2842 .cra_name
= "authenc(digest_null,cbc(aes))",
2844 "authenc-digest_null-cbc-aes-chcr",
2845 .cra_blocksize
= AES_BLOCK_SIZE
,
2846 .cra_ctxsize
= sizeof(struct chcr_context
) +
2847 sizeof(struct chcr_aead_ctx
) +
2848 sizeof(struct chcr_authenc_ctx
),
2851 .ivsize
= AES_BLOCK_SIZE
,
2853 .setkey
= chcr_aead_digest_null_setkey
,
2854 .setauthsize
= chcr_authenc_null_setauthsize
,
2860 * chcr_unregister_alg - Deregister crypto algorithms with
2863 static int chcr_unregister_alg(void)
2867 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
2868 switch (driver_algs
[i
].type
& CRYPTO_ALG_TYPE_MASK
) {
2869 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
2870 if (driver_algs
[i
].is_registered
)
2871 crypto_unregister_alg(
2872 &driver_algs
[i
].alg
.crypto
);
2874 case CRYPTO_ALG_TYPE_AEAD
:
2875 if (driver_algs
[i
].is_registered
)
2876 crypto_unregister_aead(
2877 &driver_algs
[i
].alg
.aead
);
2879 case CRYPTO_ALG_TYPE_AHASH
:
2880 if (driver_algs
[i
].is_registered
)
2881 crypto_unregister_ahash(
2882 &driver_algs
[i
].alg
.hash
);
2885 driver_algs
[i
].is_registered
= 0;
2890 #define SZ_AHASH_CTX sizeof(struct chcr_context)
2891 #define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
2892 #define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
2893 #define AHASH_CRA_FLAGS (CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC)
2896 * chcr_register_alg - Register crypto algorithms with kernel framework.
2898 static int chcr_register_alg(void)
2900 struct crypto_alg ai
;
2901 struct ahash_alg
*a_hash
;
2905 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
2906 if (driver_algs
[i
].is_registered
)
2908 switch (driver_algs
[i
].type
& CRYPTO_ALG_TYPE_MASK
) {
2909 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
2910 err
= crypto_register_alg(&driver_algs
[i
].alg
.crypto
);
2911 name
= driver_algs
[i
].alg
.crypto
.cra_driver_name
;
2913 case CRYPTO_ALG_TYPE_AEAD
:
2914 driver_algs
[i
].alg
.aead
.base
.cra_priority
=
2916 driver_algs
[i
].alg
.aead
.base
.cra_flags
=
2917 CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
;
2918 driver_algs
[i
].alg
.aead
.encrypt
= chcr_aead_encrypt
;
2919 driver_algs
[i
].alg
.aead
.decrypt
= chcr_aead_decrypt
;
2920 driver_algs
[i
].alg
.aead
.init
= chcr_aead_cra_init
;
2921 driver_algs
[i
].alg
.aead
.exit
= chcr_aead_cra_exit
;
2922 driver_algs
[i
].alg
.aead
.base
.cra_module
= THIS_MODULE
;
2923 err
= crypto_register_aead(&driver_algs
[i
].alg
.aead
);
2924 name
= driver_algs
[i
].alg
.aead
.base
.cra_driver_name
;
2926 case CRYPTO_ALG_TYPE_AHASH
:
2927 a_hash
= &driver_algs
[i
].alg
.hash
;
2928 a_hash
->update
= chcr_ahash_update
;
2929 a_hash
->final
= chcr_ahash_final
;
2930 a_hash
->finup
= chcr_ahash_finup
;
2931 a_hash
->digest
= chcr_ahash_digest
;
2932 a_hash
->export
= chcr_ahash_export
;
2933 a_hash
->import
= chcr_ahash_import
;
2934 a_hash
->halg
.statesize
= SZ_AHASH_REQ_CTX
;
2935 a_hash
->halg
.base
.cra_priority
= CHCR_CRA_PRIORITY
;
2936 a_hash
->halg
.base
.cra_module
= THIS_MODULE
;
2937 a_hash
->halg
.base
.cra_flags
= AHASH_CRA_FLAGS
;
2938 a_hash
->halg
.base
.cra_alignmask
= 0;
2939 a_hash
->halg
.base
.cra_exit
= NULL
;
2940 a_hash
->halg
.base
.cra_type
= &crypto_ahash_type
;
2942 if (driver_algs
[i
].type
== CRYPTO_ALG_TYPE_HMAC
) {
2943 a_hash
->halg
.base
.cra_init
= chcr_hmac_cra_init
;
2944 a_hash
->halg
.base
.cra_exit
= chcr_hmac_cra_exit
;
2945 a_hash
->init
= chcr_hmac_init
;
2946 a_hash
->setkey
= chcr_ahash_setkey
;
2947 a_hash
->halg
.base
.cra_ctxsize
= SZ_AHASH_H_CTX
;
2949 a_hash
->init
= chcr_sha_init
;
2950 a_hash
->halg
.base
.cra_ctxsize
= SZ_AHASH_CTX
;
2951 a_hash
->halg
.base
.cra_init
= chcr_sha_cra_init
;
2953 err
= crypto_register_ahash(&driver_algs
[i
].alg
.hash
);
2954 ai
= driver_algs
[i
].alg
.hash
.halg
.base
;
2955 name
= ai
.cra_driver_name
;
2959 pr_err("chcr : %s : Algorithm registration failed\n",
2963 driver_algs
[i
].is_registered
= 1;
2969 chcr_unregister_alg();
2974 * start_crypto - Register the crypto algorithms.
2975 * This should called once when the first device comesup. After this
2976 * kernel will start calling driver APIs for crypto operations.
2978 int start_crypto(void)
2980 return chcr_register_alg();
2984 * stop_crypto - Deregister all the crypto algorithms with kernel.
2985 * This should be called once when the last device goes down. After this
2986 * kernel will not call the driver API for crypto operations.
2988 int stop_crypto(void)
2990 chcr_unregister_alg();