1 // SPDX-License-Identifier: GPL-2.0
3 * K3 SA2UL crypto accelerator driver
5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
11 #include <linux/bitfield.h>
12 #include <linux/clk.h>
13 #include <linux/dma-mapping.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dmapool.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
19 #include <linux/of_platform.h>
20 #include <linux/platform_device.h>
21 #include <linux/pm_runtime.h>
23 #include <crypto/aes.h>
24 #include <crypto/authenc.h>
25 #include <crypto/des.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/hash.h>
28 #include <crypto/internal/skcipher.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/sha1.h>
31 #include <crypto/sha2.h>
35 /* Byte offset for key in encryption security context */
36 #define SC_ENC_KEY_OFFSET (1 + 27 + 4)
37 /* Byte offset for Aux-1 in encryption security context */
38 #define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
40 #define SA_CMDL_UPD_ENC 0x0001
41 #define SA_CMDL_UPD_AUTH 0x0002
42 #define SA_CMDL_UPD_ENC_IV 0x0004
43 #define SA_CMDL_UPD_AUTH_IV 0x0008
44 #define SA_CMDL_UPD_AUX_KEY 0x0010
46 #define SA_AUTH_SUBKEY_LEN 16
47 #define SA_CMDL_PAYLOAD_LENGTH_MASK 0xFFFF
48 #define SA_CMDL_SOP_BYPASS_LEN_MASK 0xFF000000
50 #define MODE_CONTROL_BYTES 27
51 #define SA_HASH_PROCESSING 0
52 #define SA_CRYPTO_PROCESSING 0
53 #define SA_UPLOAD_HASH_TO_TLR BIT(6)
55 #define SA_SW0_FLAGS_MASK 0xF0000
56 #define SA_SW0_CMDL_INFO_MASK 0x1F00000
57 #define SA_SW0_CMDL_PRESENT BIT(4)
58 #define SA_SW0_ENG_ID_MASK 0x3E000000
59 #define SA_SW0_DEST_INFO_PRESENT BIT(30)
60 #define SA_SW2_EGRESS_LENGTH 0xFF000000
61 #define SA_BASIC_HASH 0x10
63 #define SHA256_DIGEST_WORDS 8
64 /* Make 32-bit word from 4 bytes */
65 #define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
68 /* size of SCCTL structure in bytes */
69 #define SA_SCCTL_SZ 16
71 /* Max Authentication tag size */
72 #define SA_MAX_AUTH_TAG_SZ 64
82 SA_ALG_AUTHENC_SHA1_AES
,
83 SA_ALG_AUTHENC_SHA256_AES
,
86 struct sa_match_data
{
92 static struct device
*sa_k3_dev
;
95 * struct sa_cmdl_cfg - Command label configuration descriptor
96 * @aalg: authentication algorithm ID
97 * @enc_eng_id: Encryption Engine ID supported by the SA hardware
98 * @auth_eng_id: Authentication Engine ID
99 * @iv_size: Initialization Vector size
100 * @akey: Authentication key
101 * @akey_len: Authentication key length
102 * @enc: True, if this is an encode request
115 * struct algo_data - Crypto algorithm specific data
116 * @enc_eng: Encryption engine info structure
117 * @auth_eng: Authentication engine info structure
118 * @auth_ctrl: Authentication control word
119 * @hash_size: Size of digest
120 * @iv_idx: iv index in psdata
121 * @iv_out_size: iv out size
122 * @ealg_id: Encryption Algorithm ID
123 * @aalg_id: Authentication algorithm ID
124 * @mci_enc: Mode Control Instruction for Encryption algorithm
125 * @mci_dec: Mode Control Instruction for Decryption
126 * @inv_key: Whether the encryption algorithm demands key inversion
127 * @ctx: Pointer to the algorithm context
128 * @keyed_mac: Whether the authentication algorithm has key
129 * @prep_iopad: Function pointer to generate intermediate ipad/opad
132 struct sa_eng_info enc_eng
;
133 struct sa_eng_info auth_eng
;
143 struct sa_tfm_ctx
*ctx
;
145 void (*prep_iopad
)(struct algo_data
*algo
, const u8
*key
,
146 u16 key_sz
, __be32
*ipad
, __be32
*opad
);
150 * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
151 * @type: Type of the crypto algorithm.
152 * @alg: Union of crypto algorithm definitions.
153 * @registered: Flag indicating if the crypto algorithm is already registered
156 u32 type
; /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
158 struct skcipher_alg skcipher
;
159 struct ahash_alg ahash
;
160 struct aead_alg aead
;
166 * struct sa_mapped_sg: scatterlist information for tx and rx
167 * @mapped: Set to true if the @sgt is mapped
168 * @dir: mapping direction used for @sgt
169 * @split_sg: Set if the sg is split and needs to be freed up
170 * @static_sg: Static scatterlist entry for overriding data
171 * @sgt: scatterlist table for DMA API use
173 struct sa_mapped_sg
{
175 enum dma_data_direction dir
;
176 struct scatterlist static_sg
;
177 struct scatterlist
*split_sg
;
181 * struct sa_rx_data: RX Packet miscellaneous data place holder
182 * @req: crypto request data pointer
183 * @ddev: pointer to the DMA device
184 * @tx_in: dma_async_tx_descriptor pointer for rx channel
185 * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
186 * @enc: Flag indicating either encryption or decryption
187 * @enc_iv_size: Initialisation vector size
188 * @iv_idx: Initialisation vector index
193 struct dma_async_tx_descriptor
*tx_in
;
194 struct sa_mapped_sg mapped_sg
[2];
201 * struct sa_req: SA request definition
202 * @dev: device for the request
203 * @size: total data to the xmitted via DMA
204 * @enc_offset: offset of cipher data
205 * @enc_size: data to be passed to cipher engine
207 * @auth_offset: offset of the authentication data
208 * @auth_size: size of the authentication data
209 * @auth_iv: authentication IV
210 * @type: algorithm type for the request
211 * @cmdl: command label pointer
212 * @base: pointer to the base request
213 * @ctx: pointer to the algorithm context data
214 * @enc: true if this is an encode request
216 * @dst: destination data
217 * @callback: DMA callback for the request
218 * @mdata_size: metadata size passed to DMA
231 struct crypto_async_request
*base
;
232 struct sa_tfm_ctx
*ctx
;
234 struct scatterlist
*src
;
235 struct scatterlist
*dst
;
236 dma_async_tx_callback callback
;
241 * Mode Control Instructions for various Key lengths 128, 192, 256
242 * For CBC (Cipher Block Chaining) mode for encryption
244 static u8 mci_cbc_enc_array
[3][MODE_CONTROL_BYTES
] = {
245 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
246 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
247 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
248 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
249 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
250 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
251 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
252 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
253 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
257 * Mode Control Instructions for various Key lengths 128, 192, 256
258 * For CBC (Cipher Block Chaining) mode for decryption
260 static u8 mci_cbc_dec_array
[3][MODE_CONTROL_BYTES
] = {
261 { 0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
262 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
263 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
264 { 0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
265 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
266 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
267 { 0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
268 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
269 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
273 * Mode Control Instructions for various Key lengths 128, 192, 256
274 * For CBC (Cipher Block Chaining) mode for encryption
276 static u8 mci_cbc_enc_no_iv_array
[3][MODE_CONTROL_BYTES
] = {
277 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
278 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
279 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
280 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
281 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
282 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
283 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
284 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
285 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
289 * Mode Control Instructions for various Key lengths 128, 192, 256
290 * For CBC (Cipher Block Chaining) mode for decryption
292 static u8 mci_cbc_dec_no_iv_array
[3][MODE_CONTROL_BYTES
] = {
293 { 0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
294 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
295 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
296 { 0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
297 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
298 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
299 { 0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
300 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
301 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
305 * Mode Control Instructions for various Key lengths 128, 192, 256
306 * For ECB (Electronic Code Book) mode for encryption
308 static u8 mci_ecb_enc_array
[3][27] = {
309 { 0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
310 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
311 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
312 { 0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
313 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
314 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
315 { 0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
316 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
317 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
321 * Mode Control Instructions for various Key lengths 128, 192, 256
322 * For ECB (Electronic Code Book) mode for decryption
324 static u8 mci_ecb_dec_array
[3][27] = {
325 { 0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
326 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
327 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
328 { 0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
329 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
330 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
331 { 0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
332 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
333 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
337 * Mode Control Instructions for DES algorithm
338 * For CBC (Cipher Block Chaining) mode and ECB mode
339 * encryption and for decryption respectively
341 static u8 mci_cbc_3des_enc_array
[MODE_CONTROL_BYTES
] = {
342 0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
343 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
347 static u8 mci_cbc_3des_dec_array
[MODE_CONTROL_BYTES
] = {
348 0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
349 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
353 static u8 mci_ecb_3des_enc_array
[MODE_CONTROL_BYTES
] = {
354 0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
355 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
359 static u8 mci_ecb_3des_dec_array
[MODE_CONTROL_BYTES
] = {
360 0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
361 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
366 * Perform 16 byte or 128 bit swizzling
367 * The SA2UL Expects the security context to
368 * be in little Endian and the bus width is 128 bits or 16 bytes
369 * Hence swap 16 bytes at a time from higher to lower address
371 static void sa_swiz_128(u8
*in
, u16 len
)
376 for (i
= 0; i
< len
; i
+= 16) {
377 memcpy(data
, &in
[i
], 16);
378 for (j
= 0; j
< 16; j
++)
379 in
[i
+ j
] = data
[15 - j
];
383 /* Prepare the ipad and opad from key as per SHA algorithm step 1*/
384 static void prepare_kipad(u8
*k_ipad
, const u8
*key
, u16 key_sz
)
388 for (i
= 0; i
< key_sz
; i
++)
389 k_ipad
[i
] = key
[i
] ^ 0x36;
391 /* Instead of XOR with 0 */
392 for (; i
< SHA1_BLOCK_SIZE
; i
++)
396 static void prepare_kopad(u8
*k_opad
, const u8
*key
, u16 key_sz
)
400 for (i
= 0; i
< key_sz
; i
++)
401 k_opad
[i
] = key
[i
] ^ 0x5c;
403 /* Instead of XOR with 0 */
404 for (; i
< SHA1_BLOCK_SIZE
; i
++)
408 static void sa_export_shash(void *state
, struct shash_desc
*hash
,
409 int digest_size
, __be32
*out
)
411 struct sha1_state
*sha1
;
412 struct sha256_state
*sha256
;
415 switch (digest_size
) {
416 case SHA1_DIGEST_SIZE
:
418 result
= sha1
->state
;
420 case SHA256_DIGEST_SIZE
:
422 result
= sha256
->state
;
425 dev_err(sa_k3_dev
, "%s: bad digest_size=%d\n", __func__
,
430 crypto_shash_export(hash
, state
);
432 cpu_to_be32_array(out
, result
, digest_size
/ 4);
435 static void sa_prepare_iopads(struct algo_data
*data
, const u8
*key
,
436 u16 key_sz
, __be32
*ipad
, __be32
*opad
)
438 SHASH_DESC_ON_STACK(shash
, data
->ctx
->shash
);
439 int block_size
= crypto_shash_blocksize(data
->ctx
->shash
);
440 int digest_size
= crypto_shash_digestsize(data
->ctx
->shash
);
442 struct sha1_state sha1
;
443 struct sha256_state sha256
;
444 u8 k_pad
[SHA1_BLOCK_SIZE
];
447 shash
->tfm
= data
->ctx
->shash
;
449 prepare_kipad(sha
.k_pad
, key
, key_sz
);
451 crypto_shash_init(shash
);
452 crypto_shash_update(shash
, sha
.k_pad
, block_size
);
453 sa_export_shash(&sha
, shash
, digest_size
, ipad
);
455 prepare_kopad(sha
.k_pad
, key
, key_sz
);
457 crypto_shash_init(shash
);
458 crypto_shash_update(shash
, sha
.k_pad
, block_size
);
460 sa_export_shash(&sha
, shash
, digest_size
, opad
);
462 memzero_explicit(&sha
, sizeof(sha
));
465 /* Derive the inverse key used in AES-CBC decryption operation */
466 static inline int sa_aes_inv_key(u8
*inv_key
, const u8
*key
, u16 key_sz
)
468 struct crypto_aes_ctx ctx
;
471 if (aes_expandkey(&ctx
, key
, key_sz
)) {
472 dev_err(sa_k3_dev
, "%s: bad key len(%d)\n", __func__
, key_sz
);
476 /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
477 if (key_sz
== AES_KEYSIZE_192
) {
478 ctx
.key_enc
[52] = ctx
.key_enc
[51] ^ ctx
.key_enc
[46];
479 ctx
.key_enc
[53] = ctx
.key_enc
[52] ^ ctx
.key_enc
[47];
482 /* Based crypto_aes_expand_key logic */
484 case AES_KEYSIZE_128
:
485 case AES_KEYSIZE_192
:
486 key_pos
= key_sz
+ 24;
489 case AES_KEYSIZE_256
:
490 key_pos
= key_sz
+ 24 - 4;
494 dev_err(sa_k3_dev
, "%s: bad key len(%d)\n", __func__
, key_sz
);
498 memcpy(inv_key
, &ctx
.key_enc
[key_pos
], key_sz
);
502 /* Set Security context for the encryption engine */
503 static int sa_set_sc_enc(struct algo_data
*ad
, const u8
*key
, u16 key_sz
,
506 const u8
*mci
= NULL
;
508 /* Set Encryption mode selector to crypto processing */
509 sc_buf
[0] = SA_CRYPTO_PROCESSING
;
515 /* Set the mode control instructions in security context */
517 memcpy(&sc_buf
[1], mci
, MODE_CONTROL_BYTES
);
519 /* For AES-CBC decryption get the inverse key */
520 if (ad
->inv_key
&& !enc
) {
521 if (sa_aes_inv_key(&sc_buf
[SC_ENC_KEY_OFFSET
], key
, key_sz
))
523 /* For all other cases: key is used */
525 memcpy(&sc_buf
[SC_ENC_KEY_OFFSET
], key
, key_sz
);
531 /* Set Security context for the authentication engine */
532 static void sa_set_sc_auth(struct algo_data
*ad
, const u8
*key
, u16 key_sz
,
535 __be32
*ipad
= (void *)(sc_buf
+ 32);
536 __be32
*opad
= (void *)(sc_buf
+ 64);
538 /* Set Authentication mode selector to hash processing */
539 sc_buf
[0] = SA_HASH_PROCESSING
;
540 /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
541 sc_buf
[1] = SA_UPLOAD_HASH_TO_TLR
;
542 sc_buf
[1] |= ad
->auth_ctrl
;
544 /* Copy the keys or ipad/opad */
546 ad
->prep_iopad(ad
, key
, key_sz
, ipad
, opad
);
549 sc_buf
[1] |= SA_BASIC_HASH
;
553 static inline void sa_copy_iv(__be32
*out
, const u8
*iv
, bool size16
)
557 for (j
= 0; j
< ((size16
) ? 4 : 2); j
++) {
558 *out
= cpu_to_be32(*((u32
*)iv
));
564 /* Format general command label */
565 static int sa_format_cmdl_gen(struct sa_cmdl_cfg
*cfg
, u8
*cmdl
,
566 struct sa_cmdl_upd_info
*upd_info
)
568 u8 enc_offset
= 0, auth_offset
= 0, total
= 0;
569 u8 enc_next_eng
= SA_ENG_ID_OUTPORT2
;
570 u8 auth_next_eng
= SA_ENG_ID_OUTPORT2
;
571 u32
*word_ptr
= (u32
*)cmdl
;
574 /* Clear the command label */
575 memzero_explicit(cmdl
, (SA_MAX_CMDL_WORDS
* sizeof(u32
)));
577 /* Initialize the command update structure */
578 memzero_explicit(upd_info
, sizeof(*upd_info
));
580 if (cfg
->enc_eng_id
&& cfg
->auth_eng_id
) {
582 auth_offset
= SA_CMDL_HEADER_SIZE_BYTES
;
583 enc_next_eng
= cfg
->auth_eng_id
;
586 auth_offset
+= cfg
->iv_size
;
588 enc_offset
= SA_CMDL_HEADER_SIZE_BYTES
;
589 auth_next_eng
= cfg
->enc_eng_id
;
593 if (cfg
->enc_eng_id
) {
594 upd_info
->flags
|= SA_CMDL_UPD_ENC
;
595 upd_info
->enc_size
.index
= enc_offset
>> 2;
596 upd_info
->enc_offset
.index
= upd_info
->enc_size
.index
+ 1;
597 /* Encryption command label */
598 cmdl
[enc_offset
+ SA_CMDL_OFFSET_NESC
] = enc_next_eng
;
600 /* Encryption modes requiring IV */
602 upd_info
->flags
|= SA_CMDL_UPD_ENC_IV
;
603 upd_info
->enc_iv
.index
=
604 (enc_offset
+ SA_CMDL_HEADER_SIZE_BYTES
) >> 2;
605 upd_info
->enc_iv
.size
= cfg
->iv_size
;
607 cmdl
[enc_offset
+ SA_CMDL_OFFSET_LABEL_LEN
] =
608 SA_CMDL_HEADER_SIZE_BYTES
+ cfg
->iv_size
;
610 cmdl
[enc_offset
+ SA_CMDL_OFFSET_OPTION_CTRL1
] =
611 (SA_CTX_ENC_AUX2_OFFSET
| (cfg
->iv_size
>> 3));
612 total
+= SA_CMDL_HEADER_SIZE_BYTES
+ cfg
->iv_size
;
614 cmdl
[enc_offset
+ SA_CMDL_OFFSET_LABEL_LEN
] =
615 SA_CMDL_HEADER_SIZE_BYTES
;
616 total
+= SA_CMDL_HEADER_SIZE_BYTES
;
620 if (cfg
->auth_eng_id
) {
621 upd_info
->flags
|= SA_CMDL_UPD_AUTH
;
622 upd_info
->auth_size
.index
= auth_offset
>> 2;
623 upd_info
->auth_offset
.index
= upd_info
->auth_size
.index
+ 1;
624 cmdl
[auth_offset
+ SA_CMDL_OFFSET_NESC
] = auth_next_eng
;
625 cmdl
[auth_offset
+ SA_CMDL_OFFSET_LABEL_LEN
] =
626 SA_CMDL_HEADER_SIZE_BYTES
;
627 total
+= SA_CMDL_HEADER_SIZE_BYTES
;
630 total
= roundup(total
, 8);
632 for (i
= 0; i
< total
/ 4; i
++)
633 word_ptr
[i
] = swab32(word_ptr
[i
]);
638 /* Update Command label */
639 static inline void sa_update_cmdl(struct sa_req
*req
, u32
*cmdl
,
640 struct sa_cmdl_upd_info
*upd_info
)
644 if (likely(upd_info
->flags
& SA_CMDL_UPD_ENC
)) {
645 cmdl
[upd_info
->enc_size
.index
] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK
;
646 cmdl
[upd_info
->enc_size
.index
] |= req
->enc_size
;
647 cmdl
[upd_info
->enc_offset
.index
] &=
648 ~SA_CMDL_SOP_BYPASS_LEN_MASK
;
649 cmdl
[upd_info
->enc_offset
.index
] |=
650 FIELD_PREP(SA_CMDL_SOP_BYPASS_LEN_MASK
,
653 if (likely(upd_info
->flags
& SA_CMDL_UPD_ENC_IV
)) {
654 __be32
*data
= (__be32
*)&cmdl
[upd_info
->enc_iv
.index
];
655 u32
*enc_iv
= (u32
*)req
->enc_iv
;
657 for (j
= 0; i
< upd_info
->enc_iv
.size
; i
+= 4, j
++) {
658 data
[j
] = cpu_to_be32(*enc_iv
);
664 if (likely(upd_info
->flags
& SA_CMDL_UPD_AUTH
)) {
665 cmdl
[upd_info
->auth_size
.index
] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK
;
666 cmdl
[upd_info
->auth_size
.index
] |= req
->auth_size
;
667 cmdl
[upd_info
->auth_offset
.index
] &=
668 ~SA_CMDL_SOP_BYPASS_LEN_MASK
;
669 cmdl
[upd_info
->auth_offset
.index
] |=
670 FIELD_PREP(SA_CMDL_SOP_BYPASS_LEN_MASK
,
672 if (upd_info
->flags
& SA_CMDL_UPD_AUTH_IV
) {
673 sa_copy_iv((void *)&cmdl
[upd_info
->auth_iv
.index
],
675 (upd_info
->auth_iv
.size
> 8));
677 if (upd_info
->flags
& SA_CMDL_UPD_AUX_KEY
) {
678 int offset
= (req
->auth_size
& 0xF) ? 4 : 0;
680 memcpy(&cmdl
[upd_info
->aux_key_info
.index
],
681 &upd_info
->aux_key
[offset
], 16);
686 /* Format SWINFO words to be sent to SA */
688 void sa_set_swinfo(u8 eng_id
, u16 sc_id
, dma_addr_t sc_phys
,
689 u8 cmdl_present
, u8 cmdl_offset
, u8 flags
,
690 u8 hash_size
, u32
*swinfo
)
693 swinfo
[0] |= FIELD_PREP(SA_SW0_FLAGS_MASK
, flags
);
694 if (likely(cmdl_present
))
695 swinfo
[0] |= FIELD_PREP(SA_SW0_CMDL_INFO_MASK
,
696 cmdl_offset
| SA_SW0_CMDL_PRESENT
);
697 swinfo
[0] |= FIELD_PREP(SA_SW0_ENG_ID_MASK
, eng_id
);
699 swinfo
[0] |= SA_SW0_DEST_INFO_PRESENT
;
700 swinfo
[1] = (u32
)(sc_phys
& 0xFFFFFFFFULL
);
701 swinfo
[2] = (u32
)((sc_phys
& 0xFFFFFFFF00000000ULL
) >> 32);
702 swinfo
[2] |= FIELD_PREP(SA_SW2_EGRESS_LENGTH
, hash_size
);
705 /* Dump the security context */
706 static void sa_dump_sc(u8
*buf
, dma_addr_t dma_addr
)
709 dev_info(sa_k3_dev
, "Security context dump:: 0x%pad\n", &dma_addr
);
710 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
711 16, 1, buf
, SA_CTX_MAX_SZ
, false);
716 int sa_init_sc(struct sa_ctx_info
*ctx
, const struct sa_match_data
*match_data
,
717 const u8
*enc_key
, u16 enc_key_sz
,
718 const u8
*auth_key
, u16 auth_key_sz
,
719 struct algo_data
*ad
, u8 enc
, u32
*swinfo
)
721 int enc_sc_offset
= 0;
722 int auth_sc_offset
= 0;
723 u8
*sc_buf
= ctx
->sc
;
724 u16 sc_id
= ctx
->sc_id
;
727 memzero_explicit(sc_buf
, SA_CTX_MAX_SZ
);
729 if (ad
->auth_eng
.eng_id
) {
731 first_engine
= ad
->enc_eng
.eng_id
;
733 first_engine
= ad
->auth_eng
.eng_id
;
735 enc_sc_offset
= SA_CTX_PHP_PE_CTX_SZ
;
736 auth_sc_offset
= enc_sc_offset
+ ad
->enc_eng
.sc_size
;
737 sc_buf
[1] = SA_SCCTL_FE_AUTH_ENC
;
740 ad
->hash_size
= roundup(ad
->hash_size
, 8);
742 } else if (ad
->enc_eng
.eng_id
&& !ad
->auth_eng
.eng_id
) {
743 enc_sc_offset
= SA_CTX_PHP_PE_CTX_SZ
;
744 first_engine
= ad
->enc_eng
.eng_id
;
745 sc_buf
[1] = SA_SCCTL_FE_ENC
;
746 ad
->hash_size
= ad
->iv_out_size
;
749 /* SCCTL Owner info: 0=host, 1=CP_ACE */
750 sc_buf
[SA_CTX_SCCTL_OWNER_OFFSET
] = 0;
751 memcpy(&sc_buf
[2], &sc_id
, 2);
753 sc_buf
[5] = match_data
->priv_id
;
754 sc_buf
[6] = match_data
->priv
;
757 /* Prepare context for encryption engine */
758 if (ad
->enc_eng
.sc_size
) {
759 if (sa_set_sc_enc(ad
, enc_key
, enc_key_sz
, enc
,
760 &sc_buf
[enc_sc_offset
]))
764 /* Prepare context for authentication engine */
765 if (ad
->auth_eng
.sc_size
)
766 sa_set_sc_auth(ad
, auth_key
, auth_key_sz
,
767 &sc_buf
[auth_sc_offset
]);
769 /* Set the ownership of context to CP_ACE */
770 sc_buf
[SA_CTX_SCCTL_OWNER_OFFSET
] = 0x80;
772 /* swizzle the security context */
773 sa_swiz_128(sc_buf
, SA_CTX_MAX_SZ
);
775 sa_set_swinfo(first_engine
, ctx
->sc_id
, ctx
->sc_phys
, 1, 0,
776 SA_SW_INFO_FLAG_EVICT
, ad
->hash_size
, swinfo
);
778 sa_dump_sc(sc_buf
, ctx
->sc_phys
);
783 /* Free the per direction context memory */
784 static void sa_free_ctx_info(struct sa_ctx_info
*ctx
,
785 struct sa_crypto_data
*data
)
789 bn
= ctx
->sc_id
- data
->sc_id_start
;
790 spin_lock(&data
->scid_lock
);
791 __clear_bit(bn
, data
->ctx_bm
);
793 spin_unlock(&data
->scid_lock
);
796 dma_pool_free(data
->sc_pool
, ctx
->sc
, ctx
->sc_phys
);
801 static int sa_init_ctx_info(struct sa_ctx_info
*ctx
,
802 struct sa_crypto_data
*data
)
807 spin_lock(&data
->scid_lock
);
808 bn
= find_first_zero_bit(data
->ctx_bm
, SA_MAX_NUM_CTX
);
809 __set_bit(bn
, data
->ctx_bm
);
811 spin_unlock(&data
->scid_lock
);
813 ctx
->sc_id
= (u16
)(data
->sc_id_start
+ bn
);
815 ctx
->sc
= dma_pool_alloc(data
->sc_pool
, GFP_KERNEL
, &ctx
->sc_phys
);
817 dev_err(&data
->pdev
->dev
, "Failed to allocate SC memory\n");
825 spin_lock(&data
->scid_lock
);
826 __clear_bit(bn
, data
->ctx_bm
);
828 spin_unlock(&data
->scid_lock
);
833 static void sa_cipher_cra_exit(struct crypto_skcipher
*tfm
)
835 struct sa_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
836 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
838 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
839 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
840 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
842 sa_free_ctx_info(&ctx
->enc
, data
);
843 sa_free_ctx_info(&ctx
->dec
, data
);
845 crypto_free_skcipher(ctx
->fallback
.skcipher
);
848 static int sa_cipher_cra_init(struct crypto_skcipher
*tfm
)
850 struct sa_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
851 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
852 const char *name
= crypto_tfm_alg_name(&tfm
->base
);
853 struct crypto_skcipher
*child
;
856 memzero_explicit(ctx
, sizeof(*ctx
));
857 ctx
->dev_data
= data
;
859 ret
= sa_init_ctx_info(&ctx
->enc
, data
);
862 ret
= sa_init_ctx_info(&ctx
->dec
, data
);
864 sa_free_ctx_info(&ctx
->enc
, data
);
868 child
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
871 dev_err(sa_k3_dev
, "Error allocating fallback algo %s\n", name
);
872 return PTR_ERR(child
);
875 ctx
->fallback
.skcipher
= child
;
876 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
877 sizeof(struct skcipher_request
));
879 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
880 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
881 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
885 static int sa_cipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
886 unsigned int keylen
, struct algo_data
*ad
)
888 struct sa_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
889 struct crypto_skcipher
*child
= ctx
->fallback
.skcipher
;
891 struct sa_cmdl_cfg cfg
;
894 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
895 keylen
!= AES_KEYSIZE_256
)
898 ad
->enc_eng
.eng_id
= SA_ENG_ID_EM1
;
899 ad
->enc_eng
.sc_size
= SA_CTX_ENC_TYPE1_SZ
;
901 memzero_explicit(&cfg
, sizeof(cfg
));
902 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
903 cfg
.iv_size
= crypto_skcipher_ivsize(tfm
);
905 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
906 crypto_skcipher_set_flags(child
, tfm
->base
.crt_flags
&
907 CRYPTO_TFM_REQ_MASK
);
908 ret
= crypto_skcipher_setkey(child
, key
, keylen
);
912 /* Setup Encryption Security Context & Command label template */
913 if (sa_init_sc(&ctx
->enc
, ctx
->dev_data
->match_data
, key
, keylen
, NULL
, 0,
914 ad
, 1, &ctx
->enc
.epib
[1]))
917 cmdl_len
= sa_format_cmdl_gen(&cfg
,
919 &ctx
->enc
.cmdl_upd_info
);
920 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
923 ctx
->enc
.cmdl_size
= cmdl_len
;
925 /* Setup Decryption Security Context & Command label template */
926 if (sa_init_sc(&ctx
->dec
, ctx
->dev_data
->match_data
, key
, keylen
, NULL
, 0,
927 ad
, 0, &ctx
->dec
.epib
[1]))
930 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
931 cmdl_len
= sa_format_cmdl_gen(&cfg
, (u8
*)ctx
->dec
.cmdl
,
932 &ctx
->dec
.cmdl_upd_info
);
934 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
937 ctx
->dec
.cmdl_size
= cmdl_len
;
938 ctx
->iv_idx
= ad
->iv_idx
;
943 dev_err(sa_k3_dev
, "%s: badkey\n", __func__
);
947 static int sa_aes_cbc_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
950 struct algo_data ad
= { 0 };
951 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
952 int key_idx
= (keylen
>> 3) - 2;
957 ad
.mci_enc
= mci_cbc_enc_array
[key_idx
];
958 ad
.mci_dec
= mci_cbc_dec_array
[key_idx
];
960 ad
.ealg_id
= SA_EALG_ID_AES_CBC
;
964 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
967 static int sa_aes_ecb_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
970 struct algo_data ad
= { 0 };
971 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
972 int key_idx
= (keylen
>> 3) - 2;
977 ad
.mci_enc
= mci_ecb_enc_array
[key_idx
];
978 ad
.mci_dec
= mci_ecb_dec_array
[key_idx
];
980 ad
.ealg_id
= SA_EALG_ID_AES_ECB
;
982 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
985 static int sa_3des_cbc_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
988 struct algo_data ad
= { 0 };
990 ad
.mci_enc
= mci_cbc_3des_enc_array
;
991 ad
.mci_dec
= mci_cbc_3des_dec_array
;
992 ad
.ealg_id
= SA_EALG_ID_3DES_CBC
;
996 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
999 static int sa_3des_ecb_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1000 unsigned int keylen
)
1002 struct algo_data ad
= { 0 };
1004 ad
.mci_enc
= mci_ecb_3des_enc_array
;
1005 ad
.mci_dec
= mci_ecb_3des_dec_array
;
1007 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
1010 static void sa_sync_from_device(struct sa_rx_data
*rxd
)
1012 struct sg_table
*sgt
;
1014 if (rxd
->mapped_sg
[0].dir
== DMA_BIDIRECTIONAL
)
1015 sgt
= &rxd
->mapped_sg
[0].sgt
;
1017 sgt
= &rxd
->mapped_sg
[1].sgt
;
1019 dma_sync_sgtable_for_cpu(rxd
->ddev
, sgt
, DMA_FROM_DEVICE
);
1022 static void sa_free_sa_rx_data(struct sa_rx_data
*rxd
)
1026 for (i
= 0; i
< ARRAY_SIZE(rxd
->mapped_sg
); i
++) {
1027 struct sa_mapped_sg
*mapped_sg
= &rxd
->mapped_sg
[i
];
1029 if (mapped_sg
->mapped
) {
1030 dma_unmap_sgtable(rxd
->ddev
, &mapped_sg
->sgt
,
1032 kfree(mapped_sg
->split_sg
);
1039 static void sa_aes_dma_in_callback(void *data
)
1041 struct sa_rx_data
*rxd
= data
;
1042 struct skcipher_request
*req
;
1048 sa_sync_from_device(rxd
);
1049 req
= container_of(rxd
->req
, struct skcipher_request
, base
);
1052 mdptr
= (__be32
*)dmaengine_desc_get_metadata_ptr(rxd
->tx_in
, &pl
,
1054 result
= (u32
*)req
->iv
;
1056 for (i
= 0; i
< (rxd
->enc_iv_size
/ 4); i
++)
1057 result
[i
] = be32_to_cpu(mdptr
[i
+ rxd
->iv_idx
]);
1060 sa_free_sa_rx_data(rxd
);
1062 skcipher_request_complete(req
, 0);
1066 sa_prepare_tx_desc(u32
*mdptr
, u32 pslen
, u32
*psdata
, u32 epiblen
, u32
*epib
)
1071 for (out
= mdptr
, in
= epib
, i
= 0; i
< epiblen
/ sizeof(u32
); i
++)
1074 mdptr
[4] = (0xFFFF << 16);
1075 for (out
= &mdptr
[5], in
= psdata
, i
= 0;
1076 i
< pslen
/ sizeof(u32
); i
++)
1080 static int sa_run(struct sa_req
*req
)
1082 struct sa_rx_data
*rxd
;
1084 u32 cmdl
[SA_MAX_CMDL_WORDS
];
1085 struct sa_crypto_data
*pdata
= dev_get_drvdata(sa_k3_dev
);
1086 struct device
*ddev
;
1087 struct dma_chan
*dma_rx
;
1088 int sg_nents
, src_nents
, dst_nents
;
1089 struct scatterlist
*src
, *dst
;
1090 size_t pl
, ml
, split_size
;
1091 struct sa_ctx_info
*sa_ctx
= req
->enc
? &req
->ctx
->enc
: &req
->ctx
->dec
;
1093 struct dma_async_tx_descriptor
*tx_out
;
1096 enum dma_data_direction dir_src
;
1097 struct sa_mapped_sg
*mapped_sg
;
1099 gfp_flags
= req
->base
->flags
& CRYPTO_TFM_REQ_MAY_SLEEP
?
1100 GFP_KERNEL
: GFP_ATOMIC
;
1102 rxd
= kzalloc(sizeof(*rxd
), gfp_flags
);
1106 if (req
->src
!= req
->dst
) {
1108 dir_src
= DMA_TO_DEVICE
;
1111 dir_src
= DMA_BIDIRECTIONAL
;
1115 * SA2UL has an interesting feature where the receive DMA channel
1116 * is selected based on the data passed to the engine. Within the
1117 * transition range, there is also a space where it is impossible
1118 * to determine where the data will end up, and this should be
1119 * avoided. This will be handled by the SW fallback mechanism by
1120 * the individual algorithm implementations.
1122 if (req
->size
>= 256)
1123 dma_rx
= pdata
->dma_rx2
;
1125 dma_rx
= pdata
->dma_rx1
;
1127 ddev
= dmaengine_get_dma_device(pdata
->dma_tx
);
1130 memcpy(cmdl
, sa_ctx
->cmdl
, sa_ctx
->cmdl_size
);
1132 sa_update_cmdl(req
, cmdl
, &sa_ctx
->cmdl_upd_info
);
1134 if (req
->type
!= CRYPTO_ALG_TYPE_AHASH
) {
1137 (SA_REQ_SUBTYPE_ENC
<< SA_REQ_SUBTYPE_SHIFT
);
1140 (SA_REQ_SUBTYPE_DEC
<< SA_REQ_SUBTYPE_SHIFT
);
1143 cmdl
[sa_ctx
->cmdl_size
/ sizeof(u32
)] = req
->type
;
1146 * Map the packets, first we check if the data fits into a single
1147 * sg entry and use that if possible. If it does not fit, we check
1148 * if we need to do sg_split to align the scatterlist data on the
1149 * actual data size being processed by the crypto engine.
1152 sg_nents
= sg_nents_for_len(src
, req
->size
);
1154 split_size
= req
->size
;
1156 mapped_sg
= &rxd
->mapped_sg
[0];
1157 if (sg_nents
== 1 && split_size
<= req
->src
->length
) {
1158 src
= &mapped_sg
->static_sg
;
1160 sg_init_table(src
, 1);
1161 sg_set_page(src
, sg_page(req
->src
), split_size
,
1164 mapped_sg
->sgt
.sgl
= src
;
1165 mapped_sg
->sgt
.orig_nents
= src_nents
;
1166 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
, dir_src
, 0);
1172 mapped_sg
->dir
= dir_src
;
1173 mapped_sg
->mapped
= true;
1175 mapped_sg
->sgt
.sgl
= req
->src
;
1176 mapped_sg
->sgt
.orig_nents
= sg_nents
;
1177 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
, dir_src
, 0);
1183 mapped_sg
->dir
= dir_src
;
1184 mapped_sg
->mapped
= true;
1186 ret
= sg_split(mapped_sg
->sgt
.sgl
, mapped_sg
->sgt
.nents
, 0, 1,
1187 &split_size
, &src
, &src_nents
, gfp_flags
);
1189 src_nents
= mapped_sg
->sgt
.nents
;
1190 src
= mapped_sg
->sgt
.sgl
;
1192 mapped_sg
->split_sg
= src
;
1196 dma_sync_sgtable_for_device(ddev
, &mapped_sg
->sgt
, DMA_TO_DEVICE
);
1199 dst_nents
= src_nents
;
1202 dst_nents
= sg_nents_for_len(req
->dst
, req
->size
);
1203 mapped_sg
= &rxd
->mapped_sg
[1];
1205 if (dst_nents
== 1 && split_size
<= req
->dst
->length
) {
1206 dst
= &mapped_sg
->static_sg
;
1208 sg_init_table(dst
, 1);
1209 sg_set_page(dst
, sg_page(req
->dst
), split_size
,
1212 mapped_sg
->sgt
.sgl
= dst
;
1213 mapped_sg
->sgt
.orig_nents
= dst_nents
;
1214 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
,
1215 DMA_FROM_DEVICE
, 0);
1219 mapped_sg
->dir
= DMA_FROM_DEVICE
;
1220 mapped_sg
->mapped
= true;
1222 mapped_sg
->sgt
.sgl
= req
->dst
;
1223 mapped_sg
->sgt
.orig_nents
= dst_nents
;
1224 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
,
1225 DMA_FROM_DEVICE
, 0);
1229 mapped_sg
->dir
= DMA_FROM_DEVICE
;
1230 mapped_sg
->mapped
= true;
1232 ret
= sg_split(mapped_sg
->sgt
.sgl
, mapped_sg
->sgt
.nents
,
1233 0, 1, &split_size
, &dst
, &dst_nents
,
1236 dst_nents
= mapped_sg
->sgt
.nents
;
1237 dst
= mapped_sg
->sgt
.sgl
;
1239 mapped_sg
->split_sg
= dst
;
1244 rxd
->tx_in
= dmaengine_prep_slave_sg(dma_rx
, dst
, dst_nents
,
1246 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
1248 dev_err(pdata
->dev
, "IN prep_slave_sg() failed\n");
1253 rxd
->req
= (void *)req
->base
;
1254 rxd
->enc
= req
->enc
;
1255 rxd
->iv_idx
= req
->ctx
->iv_idx
;
1256 rxd
->enc_iv_size
= sa_ctx
->cmdl_upd_info
.enc_iv
.size
;
1257 rxd
->tx_in
->callback
= req
->callback
;
1258 rxd
->tx_in
->callback_param
= rxd
;
1260 tx_out
= dmaengine_prep_slave_sg(pdata
->dma_tx
, src
,
1261 src_nents
, DMA_MEM_TO_DEV
,
1262 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
1265 dev_err(pdata
->dev
, "OUT prep_slave_sg() failed\n");
1271 * Prepare metadata for DMA engine. This essentially describes the
1272 * crypto algorithm to be used, data sizes, different keys etc.
1274 mdptr
= (u32
*)dmaengine_desc_get_metadata_ptr(tx_out
, &pl
, &ml
);
1276 sa_prepare_tx_desc(mdptr
, (sa_ctx
->cmdl_size
+ (SA_PSDATA_CTX_WORDS
*
1277 sizeof(u32
))), cmdl
, sizeof(sa_ctx
->epib
),
1280 ml
= sa_ctx
->cmdl_size
+ (SA_PSDATA_CTX_WORDS
* sizeof(u32
));
1281 dmaengine_desc_set_metadata_len(tx_out
, req
->mdata_size
);
1283 dmaengine_submit(tx_out
);
1284 dmaengine_submit(rxd
->tx_in
);
1286 dma_async_issue_pending(dma_rx
);
1287 dma_async_issue_pending(pdata
->dma_tx
);
1289 return -EINPROGRESS
;
1292 sa_free_sa_rx_data(rxd
);
1297 static int sa_cipher_run(struct skcipher_request
*req
, u8
*iv
, int enc
)
1299 struct sa_tfm_ctx
*ctx
=
1300 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
1301 struct crypto_alg
*alg
= req
->base
.tfm
->__crt_alg
;
1302 struct sa_req sa_req
= { 0 };
1307 if (req
->cryptlen
% alg
->cra_blocksize
)
1310 /* Use SW fallback if the data size is not supported */
1311 if (req
->cryptlen
> SA_MAX_DATA_SZ
||
1312 (req
->cryptlen
>= SA_UNSAFE_DATA_SZ_MIN
&&
1313 req
->cryptlen
<= SA_UNSAFE_DATA_SZ_MAX
)) {
1314 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
1316 skcipher_request_set_tfm(subreq
, ctx
->fallback
.skcipher
);
1317 skcipher_request_set_callback(subreq
, req
->base
.flags
,
1320 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
1321 req
->cryptlen
, req
->iv
);
1323 return crypto_skcipher_encrypt(subreq
);
1325 return crypto_skcipher_decrypt(subreq
);
1328 sa_req
.size
= req
->cryptlen
;
1329 sa_req
.enc_size
= req
->cryptlen
;
1330 sa_req
.src
= req
->src
;
1331 sa_req
.dst
= req
->dst
;
1333 sa_req
.type
= CRYPTO_ALG_TYPE_SKCIPHER
;
1335 sa_req
.callback
= sa_aes_dma_in_callback
;
1336 sa_req
.mdata_size
= 44;
1337 sa_req
.base
= &req
->base
;
1340 return sa_run(&sa_req
);
1343 static int sa_encrypt(struct skcipher_request
*req
)
1345 return sa_cipher_run(req
, req
->iv
, 1);
1348 static int sa_decrypt(struct skcipher_request
*req
)
1350 return sa_cipher_run(req
, req
->iv
, 0);
1353 static void sa_sha_dma_in_callback(void *data
)
1355 struct sa_rx_data
*rxd
= data
;
1356 struct ahash_request
*req
;
1357 struct crypto_ahash
*tfm
;
1358 unsigned int authsize
;
1364 sa_sync_from_device(rxd
);
1365 req
= container_of(rxd
->req
, struct ahash_request
, base
);
1366 tfm
= crypto_ahash_reqtfm(req
);
1367 authsize
= crypto_ahash_digestsize(tfm
);
1369 mdptr
= (__be32
*)dmaengine_desc_get_metadata_ptr(rxd
->tx_in
, &pl
, &ml
);
1370 result
= (u32
*)req
->result
;
1372 for (i
= 0; i
< (authsize
/ 4); i
++)
1373 result
[i
] = be32_to_cpu(mdptr
[i
+ 4]);
1375 sa_free_sa_rx_data(rxd
);
1377 ahash_request_complete(req
, 0);
1380 static int zero_message_process(struct ahash_request
*req
)
1382 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1383 int sa_digest_size
= crypto_ahash_digestsize(tfm
);
1385 switch (sa_digest_size
) {
1386 case SHA1_DIGEST_SIZE
:
1387 memcpy(req
->result
, sha1_zero_message_hash
, sa_digest_size
);
1389 case SHA256_DIGEST_SIZE
:
1390 memcpy(req
->result
, sha256_zero_message_hash
, sa_digest_size
);
1392 case SHA512_DIGEST_SIZE
:
1393 memcpy(req
->result
, sha512_zero_message_hash
, sa_digest_size
);
1402 static int sa_sha_run(struct ahash_request
*req
)
1404 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
1405 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1406 struct sa_req sa_req
= { 0 };
1409 auth_len
= req
->nbytes
;
1412 return zero_message_process(req
);
1414 if (auth_len
> SA_MAX_DATA_SZ
||
1415 (auth_len
>= SA_UNSAFE_DATA_SZ_MIN
&&
1416 auth_len
<= SA_UNSAFE_DATA_SZ_MAX
)) {
1417 struct ahash_request
*subreq
= &rctx
->fallback_req
;
1420 ahash_request_set_tfm(subreq
, ctx
->fallback
.ahash
);
1421 subreq
->base
.flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1423 crypto_ahash_init(subreq
);
1425 subreq
->nbytes
= auth_len
;
1426 subreq
->src
= req
->src
;
1427 subreq
->result
= req
->result
;
1429 ret
|= crypto_ahash_update(subreq
);
1433 ret
|= crypto_ahash_final(subreq
);
1438 sa_req
.size
= auth_len
;
1439 sa_req
.auth_size
= auth_len
;
1440 sa_req
.src
= req
->src
;
1441 sa_req
.dst
= req
->src
;
1443 sa_req
.type
= CRYPTO_ALG_TYPE_AHASH
;
1444 sa_req
.callback
= sa_sha_dma_in_callback
;
1445 sa_req
.mdata_size
= 28;
1447 sa_req
.base
= &req
->base
;
1449 return sa_run(&sa_req
);
1452 static int sa_sha_setup(struct sa_tfm_ctx
*ctx
, struct algo_data
*ad
)
1454 int bs
= crypto_shash_blocksize(ctx
->shash
);
1456 struct sa_cmdl_cfg cfg
;
1458 ad
->enc_eng
.sc_size
= SA_CTX_ENC_TYPE1_SZ
;
1459 ad
->auth_eng
.eng_id
= SA_ENG_ID_AM1
;
1460 ad
->auth_eng
.sc_size
= SA_CTX_AUTH_TYPE2_SZ
;
1462 memset(ctx
->authkey
, 0, bs
);
1463 memset(&cfg
, 0, sizeof(cfg
));
1464 cfg
.aalg
= ad
->aalg_id
;
1465 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
1466 cfg
.auth_eng_id
= ad
->auth_eng
.eng_id
;
1471 ctx
->dev_data
= dev_get_drvdata(sa_k3_dev
);
1472 /* Setup Encryption Security Context & Command label template */
1473 if (sa_init_sc(&ctx
->enc
, ctx
->dev_data
->match_data
, NULL
, 0, NULL
, 0,
1474 ad
, 0, &ctx
->enc
.epib
[1]))
1477 cmdl_len
= sa_format_cmdl_gen(&cfg
,
1478 (u8
*)ctx
->enc
.cmdl
,
1479 &ctx
->enc
.cmdl_upd_info
);
1480 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
1483 ctx
->enc
.cmdl_size
= cmdl_len
;
1488 dev_err(sa_k3_dev
, "%s: badkey\n", __func__
);
1492 static int sa_sha_cra_init_alg(struct crypto_tfm
*tfm
, const char *alg_base
)
1494 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1495 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1498 memset(ctx
, 0, sizeof(*ctx
));
1499 ctx
->dev_data
= data
;
1500 ret
= sa_init_ctx_info(&ctx
->enc
, data
);
1505 ctx
->shash
= crypto_alloc_shash(alg_base
, 0,
1506 CRYPTO_ALG_NEED_FALLBACK
);
1507 if (IS_ERR(ctx
->shash
)) {
1508 dev_err(sa_k3_dev
, "base driver %s couldn't be loaded\n",
1510 return PTR_ERR(ctx
->shash
);
1513 ctx
->fallback
.ahash
=
1514 crypto_alloc_ahash(alg_base
, 0,
1515 CRYPTO_ALG_NEED_FALLBACK
);
1516 if (IS_ERR(ctx
->fallback
.ahash
)) {
1517 dev_err(ctx
->dev_data
->dev
,
1518 "Could not load fallback driver\n");
1519 return PTR_ERR(ctx
->fallback
.ahash
);
1523 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1524 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
1525 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
1527 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
1528 sizeof(struct sa_sha_req_ctx
) +
1529 crypto_ahash_reqsize(ctx
->fallback
.ahash
));
1534 static int sa_sha_digest(struct ahash_request
*req
)
1536 return sa_sha_run(req
);
1539 static int sa_sha_init(struct ahash_request
*req
)
1541 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1542 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1543 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1545 dev_dbg(sa_k3_dev
, "init: digest size: %u, rctx=%p\n",
1546 crypto_ahash_digestsize(tfm
), rctx
);
1548 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1549 rctx
->fallback_req
.base
.flags
=
1550 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1552 return crypto_ahash_init(&rctx
->fallback_req
);
1555 static int sa_sha_update(struct ahash_request
*req
)
1557 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1558 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1559 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1561 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1562 rctx
->fallback_req
.base
.flags
=
1563 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1564 rctx
->fallback_req
.nbytes
= req
->nbytes
;
1565 rctx
->fallback_req
.src
= req
->src
;
1567 return crypto_ahash_update(&rctx
->fallback_req
);
1570 static int sa_sha_final(struct ahash_request
*req
)
1572 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1573 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1574 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1576 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1577 rctx
->fallback_req
.base
.flags
=
1578 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1579 rctx
->fallback_req
.result
= req
->result
;
1581 return crypto_ahash_final(&rctx
->fallback_req
);
1584 static int sa_sha_finup(struct ahash_request
*req
)
1586 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1587 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1588 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1590 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1591 rctx
->fallback_req
.base
.flags
=
1592 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1594 rctx
->fallback_req
.nbytes
= req
->nbytes
;
1595 rctx
->fallback_req
.src
= req
->src
;
1596 rctx
->fallback_req
.result
= req
->result
;
1598 return crypto_ahash_finup(&rctx
->fallback_req
);
1601 static int sa_sha_import(struct ahash_request
*req
, const void *in
)
1603 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1604 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1605 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1607 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1608 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
1609 CRYPTO_TFM_REQ_MAY_SLEEP
;
1611 return crypto_ahash_import(&rctx
->fallback_req
, in
);
1614 static int sa_sha_export(struct ahash_request
*req
, void *out
)
1616 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1617 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1618 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1619 struct ahash_request
*subreq
= &rctx
->fallback_req
;
1621 ahash_request_set_tfm(subreq
, ctx
->fallback
.ahash
);
1622 subreq
->base
.flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1624 return crypto_ahash_export(subreq
, out
);
1627 static int sa_sha1_cra_init(struct crypto_tfm
*tfm
)
1629 struct algo_data ad
= { 0 };
1630 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1632 sa_sha_cra_init_alg(tfm
, "sha1");
1634 ad
.aalg_id
= SA_AALG_ID_SHA1
;
1635 ad
.hash_size
= SHA1_DIGEST_SIZE
;
1636 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA1
;
1638 sa_sha_setup(ctx
, &ad
);
1643 static int sa_sha256_cra_init(struct crypto_tfm
*tfm
)
1645 struct algo_data ad
= { 0 };
1646 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1648 sa_sha_cra_init_alg(tfm
, "sha256");
1650 ad
.aalg_id
= SA_AALG_ID_SHA2_256
;
1651 ad
.hash_size
= SHA256_DIGEST_SIZE
;
1652 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA256
;
1654 sa_sha_setup(ctx
, &ad
);
1659 static int sa_sha512_cra_init(struct crypto_tfm
*tfm
)
1661 struct algo_data ad
= { 0 };
1662 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1664 sa_sha_cra_init_alg(tfm
, "sha512");
1666 ad
.aalg_id
= SA_AALG_ID_SHA2_512
;
1667 ad
.hash_size
= SHA512_DIGEST_SIZE
;
1668 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA512
;
1670 sa_sha_setup(ctx
, &ad
);
1675 static void sa_sha_cra_exit(struct crypto_tfm
*tfm
)
1677 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1678 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1680 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1681 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
1682 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
1684 if (crypto_tfm_alg_type(tfm
) == CRYPTO_ALG_TYPE_AHASH
)
1685 sa_free_ctx_info(&ctx
->enc
, data
);
1687 crypto_free_shash(ctx
->shash
);
1688 crypto_free_ahash(ctx
->fallback
.ahash
);
1691 static void sa_aead_dma_in_callback(void *data
)
1693 struct sa_rx_data
*rxd
= data
;
1694 struct aead_request
*req
;
1695 struct crypto_aead
*tfm
;
1697 unsigned int authsize
;
1698 u8 auth_tag
[SA_MAX_AUTH_TAG_SZ
];
1704 sa_sync_from_device(rxd
);
1705 req
= container_of(rxd
->req
, struct aead_request
, base
);
1706 tfm
= crypto_aead_reqtfm(req
);
1707 start
= req
->assoclen
+ req
->cryptlen
;
1708 authsize
= crypto_aead_authsize(tfm
);
1710 mdptr
= (u32
*)dmaengine_desc_get_metadata_ptr(rxd
->tx_in
, &pl
, &ml
);
1711 for (i
= 0; i
< (authsize
/ 4); i
++)
1712 mdptr
[i
+ 4] = swab32(mdptr
[i
+ 4]);
1715 scatterwalk_map_and_copy(&mdptr
[4], req
->dst
, start
, authsize
,
1719 scatterwalk_map_and_copy(auth_tag
, req
->src
, start
, authsize
,
1722 err
= memcmp(&mdptr
[4], auth_tag
, authsize
) ? -EBADMSG
: 0;
1725 sa_free_sa_rx_data(rxd
);
1727 aead_request_complete(req
, err
);
1730 static int sa_cra_init_aead(struct crypto_aead
*tfm
, const char *hash
,
1731 const char *fallback
)
1733 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1734 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1737 memzero_explicit(ctx
, sizeof(*ctx
));
1738 ctx
->dev_data
= data
;
1740 ctx
->shash
= crypto_alloc_shash(hash
, 0, CRYPTO_ALG_NEED_FALLBACK
);
1741 if (IS_ERR(ctx
->shash
)) {
1742 dev_err(sa_k3_dev
, "base driver %s couldn't be loaded\n", hash
);
1743 return PTR_ERR(ctx
->shash
);
1746 ctx
->fallback
.aead
= crypto_alloc_aead(fallback
, 0,
1747 CRYPTO_ALG_NEED_FALLBACK
);
1749 if (IS_ERR(ctx
->fallback
.aead
)) {
1750 dev_err(sa_k3_dev
, "fallback driver %s couldn't be loaded\n",
1752 return PTR_ERR(ctx
->fallback
.aead
);
1755 crypto_aead_set_reqsize(tfm
, sizeof(struct aead_request
) +
1756 crypto_aead_reqsize(ctx
->fallback
.aead
));
1758 ret
= sa_init_ctx_info(&ctx
->enc
, data
);
1762 ret
= sa_init_ctx_info(&ctx
->dec
, data
);
1764 sa_free_ctx_info(&ctx
->enc
, data
);
1768 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1769 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
1770 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
1775 static int sa_cra_init_aead_sha1(struct crypto_aead
*tfm
)
1777 return sa_cra_init_aead(tfm
, "sha1",
1778 "authenc(hmac(sha1-ce),cbc(aes-ce))");
1781 static int sa_cra_init_aead_sha256(struct crypto_aead
*tfm
)
1783 return sa_cra_init_aead(tfm
, "sha256",
1784 "authenc(hmac(sha256-ce),cbc(aes-ce))");
1787 static void sa_exit_tfm_aead(struct crypto_aead
*tfm
)
1789 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1790 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1792 crypto_free_shash(ctx
->shash
);
1793 crypto_free_aead(ctx
->fallback
.aead
);
1795 sa_free_ctx_info(&ctx
->enc
, data
);
1796 sa_free_ctx_info(&ctx
->dec
, data
);
1799 /* AEAD algorithm configuration interface function */
1800 static int sa_aead_setkey(struct crypto_aead
*authenc
,
1801 const u8
*key
, unsigned int keylen
,
1802 struct algo_data
*ad
)
1804 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(authenc
);
1805 struct crypto_authenc_keys keys
;
1807 struct sa_cmdl_cfg cfg
;
1810 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
1813 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
1814 key_idx
= (keys
.enckeylen
>> 3) - 2;
1819 ad
->enc_eng
.eng_id
= SA_ENG_ID_EM1
;
1820 ad
->enc_eng
.sc_size
= SA_CTX_ENC_TYPE1_SZ
;
1821 ad
->auth_eng
.eng_id
= SA_ENG_ID_AM1
;
1822 ad
->auth_eng
.sc_size
= SA_CTX_AUTH_TYPE2_SZ
;
1823 ad
->mci_enc
= mci_cbc_enc_no_iv_array
[key_idx
];
1824 ad
->mci_dec
= mci_cbc_dec_no_iv_array
[key_idx
];
1826 ad
->keyed_mac
= true;
1827 ad
->ealg_id
= SA_EALG_ID_AES_CBC
;
1828 ad
->prep_iopad
= sa_prepare_iopads
;
1830 memset(&cfg
, 0, sizeof(cfg
));
1832 cfg
.aalg
= ad
->aalg_id
;
1833 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
1834 cfg
.auth_eng_id
= ad
->auth_eng
.eng_id
;
1835 cfg
.iv_size
= crypto_aead_ivsize(authenc
);
1836 cfg
.akey
= keys
.authkey
;
1837 cfg
.akey_len
= keys
.authkeylen
;
1839 /* Setup Encryption Security Context & Command label template */
1840 if (sa_init_sc(&ctx
->enc
, ctx
->dev_data
->match_data
, keys
.enckey
,
1841 keys
.enckeylen
, keys
.authkey
, keys
.authkeylen
,
1842 ad
, 1, &ctx
->enc
.epib
[1]))
1845 cmdl_len
= sa_format_cmdl_gen(&cfg
,
1846 (u8
*)ctx
->enc
.cmdl
,
1847 &ctx
->enc
.cmdl_upd_info
);
1848 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
1851 ctx
->enc
.cmdl_size
= cmdl_len
;
1853 /* Setup Decryption Security Context & Command label template */
1854 if (sa_init_sc(&ctx
->dec
, ctx
->dev_data
->match_data
, keys
.enckey
,
1855 keys
.enckeylen
, keys
.authkey
, keys
.authkeylen
,
1856 ad
, 0, &ctx
->dec
.epib
[1]))
1860 cmdl_len
= sa_format_cmdl_gen(&cfg
, (u8
*)ctx
->dec
.cmdl
,
1861 &ctx
->dec
.cmdl_upd_info
);
1863 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
1866 ctx
->dec
.cmdl_size
= cmdl_len
;
1868 crypto_aead_clear_flags(ctx
->fallback
.aead
, CRYPTO_TFM_REQ_MASK
);
1869 crypto_aead_set_flags(ctx
->fallback
.aead
,
1870 crypto_aead_get_flags(authenc
) &
1871 CRYPTO_TFM_REQ_MASK
);
1873 return crypto_aead_setkey(ctx
->fallback
.aead
, key
, keylen
);
1876 static int sa_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
1878 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(crypto_aead_tfm(tfm
));
1880 return crypto_aead_setauthsize(ctx
->fallback
.aead
, authsize
);
1883 static int sa_aead_cbc_sha1_setkey(struct crypto_aead
*authenc
,
1884 const u8
*key
, unsigned int keylen
)
1886 struct algo_data ad
= { 0 };
1888 ad
.ealg_id
= SA_EALG_ID_AES_CBC
;
1889 ad
.aalg_id
= SA_AALG_ID_HMAC_SHA1
;
1890 ad
.hash_size
= SHA1_DIGEST_SIZE
;
1891 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA1
;
1893 return sa_aead_setkey(authenc
, key
, keylen
, &ad
);
1896 static int sa_aead_cbc_sha256_setkey(struct crypto_aead
*authenc
,
1897 const u8
*key
, unsigned int keylen
)
1899 struct algo_data ad
= { 0 };
1901 ad
.ealg_id
= SA_EALG_ID_AES_CBC
;
1902 ad
.aalg_id
= SA_AALG_ID_HMAC_SHA2_256
;
1903 ad
.hash_size
= SHA256_DIGEST_SIZE
;
1904 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA256
;
1906 return sa_aead_setkey(authenc
, key
, keylen
, &ad
);
1909 static int sa_aead_run(struct aead_request
*req
, u8
*iv
, int enc
)
1911 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1912 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1913 struct sa_req sa_req
= { 0 };
1914 size_t auth_size
, enc_size
;
1916 enc_size
= req
->cryptlen
;
1917 auth_size
= req
->assoclen
+ req
->cryptlen
;
1920 enc_size
-= crypto_aead_authsize(tfm
);
1921 auth_size
-= crypto_aead_authsize(tfm
);
1924 if (auth_size
> SA_MAX_DATA_SZ
||
1925 (auth_size
>= SA_UNSAFE_DATA_SZ_MIN
&&
1926 auth_size
<= SA_UNSAFE_DATA_SZ_MAX
)) {
1927 struct aead_request
*subreq
= aead_request_ctx(req
);
1930 aead_request_set_tfm(subreq
, ctx
->fallback
.aead
);
1931 aead_request_set_callback(subreq
, req
->base
.flags
,
1932 req
->base
.complete
, req
->base
.data
);
1933 aead_request_set_crypt(subreq
, req
->src
, req
->dst
,
1934 req
->cryptlen
, req
->iv
);
1935 aead_request_set_ad(subreq
, req
->assoclen
);
1937 ret
= enc
? crypto_aead_encrypt(subreq
) :
1938 crypto_aead_decrypt(subreq
);
1942 sa_req
.enc_offset
= req
->assoclen
;
1943 sa_req
.enc_size
= enc_size
;
1944 sa_req
.auth_size
= auth_size
;
1945 sa_req
.size
= auth_size
;
1947 sa_req
.type
= CRYPTO_ALG_TYPE_AEAD
;
1949 sa_req
.callback
= sa_aead_dma_in_callback
;
1950 sa_req
.mdata_size
= 52;
1951 sa_req
.base
= &req
->base
;
1953 sa_req
.src
= req
->src
;
1954 sa_req
.dst
= req
->dst
;
1956 return sa_run(&sa_req
);
1959 /* AEAD algorithm encrypt interface function */
1960 static int sa_aead_encrypt(struct aead_request
*req
)
1962 return sa_aead_run(req
, req
->iv
, 1);
1965 /* AEAD algorithm decrypt interface function */
1966 static int sa_aead_decrypt(struct aead_request
*req
)
1968 return sa_aead_run(req
, req
->iv
, 0);
1971 static struct sa_alg_tmpl sa_algs
[] = {
1972 [SA_ALG_CBC_AES
] = {
1973 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
1975 .base
.cra_name
= "cbc(aes)",
1976 .base
.cra_driver_name
= "cbc-aes-sa2ul",
1977 .base
.cra_priority
= 30000,
1978 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
1979 CRYPTO_ALG_KERN_DRIVER_ONLY
|
1981 CRYPTO_ALG_NEED_FALLBACK
,
1982 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1983 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
1984 .base
.cra_module
= THIS_MODULE
,
1985 .init
= sa_cipher_cra_init
,
1986 .exit
= sa_cipher_cra_exit
,
1987 .min_keysize
= AES_MIN_KEY_SIZE
,
1988 .max_keysize
= AES_MAX_KEY_SIZE
,
1989 .ivsize
= AES_BLOCK_SIZE
,
1990 .setkey
= sa_aes_cbc_setkey
,
1991 .encrypt
= sa_encrypt
,
1992 .decrypt
= sa_decrypt
,
1995 [SA_ALG_EBC_AES
] = {
1996 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
1998 .base
.cra_name
= "ecb(aes)",
1999 .base
.cra_driver_name
= "ecb-aes-sa2ul",
2000 .base
.cra_priority
= 30000,
2001 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2002 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2004 CRYPTO_ALG_NEED_FALLBACK
,
2005 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
2006 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2007 .base
.cra_module
= THIS_MODULE
,
2008 .init
= sa_cipher_cra_init
,
2009 .exit
= sa_cipher_cra_exit
,
2010 .min_keysize
= AES_MIN_KEY_SIZE
,
2011 .max_keysize
= AES_MAX_KEY_SIZE
,
2012 .setkey
= sa_aes_ecb_setkey
,
2013 .encrypt
= sa_encrypt
,
2014 .decrypt
= sa_decrypt
,
2017 [SA_ALG_CBC_DES3
] = {
2018 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2020 .base
.cra_name
= "cbc(des3_ede)",
2021 .base
.cra_driver_name
= "cbc-des3-sa2ul",
2022 .base
.cra_priority
= 30000,
2023 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2024 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2026 CRYPTO_ALG_NEED_FALLBACK
,
2027 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
2028 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2029 .base
.cra_module
= THIS_MODULE
,
2030 .init
= sa_cipher_cra_init
,
2031 .exit
= sa_cipher_cra_exit
,
2032 .min_keysize
= 3 * DES_KEY_SIZE
,
2033 .max_keysize
= 3 * DES_KEY_SIZE
,
2034 .ivsize
= DES_BLOCK_SIZE
,
2035 .setkey
= sa_3des_cbc_setkey
,
2036 .encrypt
= sa_encrypt
,
2037 .decrypt
= sa_decrypt
,
2040 [SA_ALG_ECB_DES3
] = {
2041 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2043 .base
.cra_name
= "ecb(des3_ede)",
2044 .base
.cra_driver_name
= "ecb-des3-sa2ul",
2045 .base
.cra_priority
= 30000,
2046 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2047 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2049 CRYPTO_ALG_NEED_FALLBACK
,
2050 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
2051 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2052 .base
.cra_module
= THIS_MODULE
,
2053 .init
= sa_cipher_cra_init
,
2054 .exit
= sa_cipher_cra_exit
,
2055 .min_keysize
= 3 * DES_KEY_SIZE
,
2056 .max_keysize
= 3 * DES_KEY_SIZE
,
2057 .setkey
= sa_3des_ecb_setkey
,
2058 .encrypt
= sa_encrypt
,
2059 .decrypt
= sa_decrypt
,
2063 .type
= CRYPTO_ALG_TYPE_AHASH
,
2067 .cra_driver_name
= "sha1-sa2ul",
2068 .cra_priority
= 400,
2069 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
2071 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2072 CRYPTO_ALG_NEED_FALLBACK
,
2073 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2074 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2075 .cra_module
= THIS_MODULE
,
2076 .cra_init
= sa_sha1_cra_init
,
2077 .cra_exit
= sa_sha_cra_exit
,
2079 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2080 .halg
.statesize
= sizeof(struct sa_sha_req_ctx
) +
2081 sizeof(struct sha1_state
),
2082 .init
= sa_sha_init
,
2083 .update
= sa_sha_update
,
2084 .final
= sa_sha_final
,
2085 .finup
= sa_sha_finup
,
2086 .digest
= sa_sha_digest
,
2087 .export
= sa_sha_export
,
2088 .import
= sa_sha_import
,
2092 .type
= CRYPTO_ALG_TYPE_AHASH
,
2095 .cra_name
= "sha256",
2096 .cra_driver_name
= "sha256-sa2ul",
2097 .cra_priority
= 400,
2098 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
2100 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2101 CRYPTO_ALG_NEED_FALLBACK
,
2102 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2103 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2104 .cra_module
= THIS_MODULE
,
2105 .cra_init
= sa_sha256_cra_init
,
2106 .cra_exit
= sa_sha_cra_exit
,
2108 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2109 .halg
.statesize
= sizeof(struct sa_sha_req_ctx
) +
2110 sizeof(struct sha256_state
),
2111 .init
= sa_sha_init
,
2112 .update
= sa_sha_update
,
2113 .final
= sa_sha_final
,
2114 .finup
= sa_sha_finup
,
2115 .digest
= sa_sha_digest
,
2116 .export
= sa_sha_export
,
2117 .import
= sa_sha_import
,
2121 .type
= CRYPTO_ALG_TYPE_AHASH
,
2124 .cra_name
= "sha512",
2125 .cra_driver_name
= "sha512-sa2ul",
2126 .cra_priority
= 400,
2127 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
2129 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2130 CRYPTO_ALG_NEED_FALLBACK
,
2131 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2132 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2133 .cra_module
= THIS_MODULE
,
2134 .cra_init
= sa_sha512_cra_init
,
2135 .cra_exit
= sa_sha_cra_exit
,
2137 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2138 .halg
.statesize
= sizeof(struct sa_sha_req_ctx
) +
2139 sizeof(struct sha512_state
),
2140 .init
= sa_sha_init
,
2141 .update
= sa_sha_update
,
2142 .final
= sa_sha_final
,
2143 .finup
= sa_sha_finup
,
2144 .digest
= sa_sha_digest
,
2145 .export
= sa_sha_export
,
2146 .import
= sa_sha_import
,
2149 [SA_ALG_AUTHENC_SHA1_AES
] = {
2150 .type
= CRYPTO_ALG_TYPE_AEAD
,
2153 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2155 "authenc(hmac(sha1),cbc(aes))-sa2ul",
2156 .cra_blocksize
= AES_BLOCK_SIZE
,
2157 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
|
2158 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2160 CRYPTO_ALG_NEED_FALLBACK
,
2161 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2162 .cra_module
= THIS_MODULE
,
2163 .cra_priority
= 3000,
2165 .ivsize
= AES_BLOCK_SIZE
,
2166 .maxauthsize
= SHA1_DIGEST_SIZE
,
2168 .init
= sa_cra_init_aead_sha1
,
2169 .exit
= sa_exit_tfm_aead
,
2170 .setkey
= sa_aead_cbc_sha1_setkey
,
2171 .setauthsize
= sa_aead_setauthsize
,
2172 .encrypt
= sa_aead_encrypt
,
2173 .decrypt
= sa_aead_decrypt
,
2176 [SA_ALG_AUTHENC_SHA256_AES
] = {
2177 .type
= CRYPTO_ALG_TYPE_AEAD
,
2180 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2182 "authenc(hmac(sha256),cbc(aes))-sa2ul",
2183 .cra_blocksize
= AES_BLOCK_SIZE
,
2184 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
|
2185 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2187 CRYPTO_ALG_NEED_FALLBACK
,
2188 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2189 .cra_module
= THIS_MODULE
,
2191 .cra_priority
= 3000,
2193 .ivsize
= AES_BLOCK_SIZE
,
2194 .maxauthsize
= SHA256_DIGEST_SIZE
,
2196 .init
= sa_cra_init_aead_sha256
,
2197 .exit
= sa_exit_tfm_aead
,
2198 .setkey
= sa_aead_cbc_sha256_setkey
,
2199 .setauthsize
= sa_aead_setauthsize
,
2200 .encrypt
= sa_aead_encrypt
,
2201 .decrypt
= sa_aead_decrypt
,
2206 /* Register the algorithms in crypto framework */
2207 static void sa_register_algos(struct sa_crypto_data
*dev_data
)
2209 const struct sa_match_data
*match_data
= dev_data
->match_data
;
2210 struct device
*dev
= dev_data
->dev
;
2215 for (i
= 0; i
< ARRAY_SIZE(sa_algs
); i
++) {
2216 /* Skip unsupported algos */
2217 if (!(match_data
->supported_algos
& BIT(i
)))
2220 type
= sa_algs
[i
].type
;
2221 if (type
== CRYPTO_ALG_TYPE_SKCIPHER
) {
2222 alg_name
= sa_algs
[i
].alg
.skcipher
.base
.cra_name
;
2223 err
= crypto_register_skcipher(&sa_algs
[i
].alg
.skcipher
);
2224 } else if (type
== CRYPTO_ALG_TYPE_AHASH
) {
2225 alg_name
= sa_algs
[i
].alg
.ahash
.halg
.base
.cra_name
;
2226 err
= crypto_register_ahash(&sa_algs
[i
].alg
.ahash
);
2227 } else if (type
== CRYPTO_ALG_TYPE_AEAD
) {
2228 alg_name
= sa_algs
[i
].alg
.aead
.base
.cra_name
;
2229 err
= crypto_register_aead(&sa_algs
[i
].alg
.aead
);
2232 "un-supported crypto algorithm (%d)",
2238 dev_err(dev
, "Failed to register '%s'\n", alg_name
);
2240 sa_algs
[i
].registered
= true;
2244 /* Unregister the algorithms in crypto framework */
2245 static void sa_unregister_algos(const struct device
*dev
)
2250 for (i
= 0; i
< ARRAY_SIZE(sa_algs
); i
++) {
2251 type
= sa_algs
[i
].type
;
2252 if (!sa_algs
[i
].registered
)
2254 if (type
== CRYPTO_ALG_TYPE_SKCIPHER
)
2255 crypto_unregister_skcipher(&sa_algs
[i
].alg
.skcipher
);
2256 else if (type
== CRYPTO_ALG_TYPE_AHASH
)
2257 crypto_unregister_ahash(&sa_algs
[i
].alg
.ahash
);
2258 else if (type
== CRYPTO_ALG_TYPE_AEAD
)
2259 crypto_unregister_aead(&sa_algs
[i
].alg
.aead
);
2261 sa_algs
[i
].registered
= false;
2265 static int sa_init_mem(struct sa_crypto_data
*dev_data
)
2267 struct device
*dev
= &dev_data
->pdev
->dev
;
2268 /* Setup dma pool for security context buffers */
2269 dev_data
->sc_pool
= dma_pool_create("keystone-sc", dev
,
2270 SA_CTX_MAX_SZ
, 64, 0);
2271 if (!dev_data
->sc_pool
) {
2272 dev_err(dev
, "Failed to create dma pool");
2279 static int sa_dma_init(struct sa_crypto_data
*dd
)
2282 struct dma_slave_config cfg
;
2288 ret
= dma_coerce_mask_and_coherent(dd
->dev
, DMA_BIT_MASK(48));
2292 dd
->dma_rx1
= dma_request_chan(dd
->dev
, "rx1");
2293 if (IS_ERR(dd
->dma_rx1
))
2294 return dev_err_probe(dd
->dev
, PTR_ERR(dd
->dma_rx1
),
2295 "Unable to request rx1 DMA channel\n");
2297 dd
->dma_rx2
= dma_request_chan(dd
->dev
, "rx2");
2298 if (IS_ERR(dd
->dma_rx2
)) {
2299 ret
= dev_err_probe(dd
->dev
, PTR_ERR(dd
->dma_rx2
),
2300 "Unable to request rx2 DMA channel\n");
2304 dd
->dma_tx
= dma_request_chan(dd
->dev
, "tx");
2305 if (IS_ERR(dd
->dma_tx
)) {
2306 ret
= dev_err_probe(dd
->dev
, PTR_ERR(dd
->dma_tx
),
2307 "Unable to request tx DMA channel\n");
2311 memzero_explicit(&cfg
, sizeof(cfg
));
2313 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
2314 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
2315 cfg
.src_maxburst
= 4;
2316 cfg
.dst_maxburst
= 4;
2318 ret
= dmaengine_slave_config(dd
->dma_rx1
, &cfg
);
2320 dev_err(dd
->dev
, "can't configure IN dmaengine slave: %d\n",
2322 goto err_dma_config
;
2325 ret
= dmaengine_slave_config(dd
->dma_rx2
, &cfg
);
2327 dev_err(dd
->dev
, "can't configure IN dmaengine slave: %d\n",
2329 goto err_dma_config
;
2332 ret
= dmaengine_slave_config(dd
->dma_tx
, &cfg
);
2334 dev_err(dd
->dev
, "can't configure OUT dmaengine slave: %d\n",
2336 goto err_dma_config
;
2342 dma_release_channel(dd
->dma_tx
);
2344 dma_release_channel(dd
->dma_rx2
);
2346 dma_release_channel(dd
->dma_rx1
);
2351 static int sa_link_child(struct device
*dev
, void *data
)
2353 struct device
*parent
= data
;
2355 device_link_add(dev
, parent
, DL_FLAG_AUTOPROBE_CONSUMER
);
2360 static struct sa_match_data am654_match_data
= {
2363 .supported_algos
= BIT(SA_ALG_CBC_AES
) |
2364 BIT(SA_ALG_EBC_AES
) |
2365 BIT(SA_ALG_CBC_DES3
) |
2366 BIT(SA_ALG_ECB_DES3
) |
2368 BIT(SA_ALG_SHA256
) |
2369 BIT(SA_ALG_SHA512
) |
2370 BIT(SA_ALG_AUTHENC_SHA1_AES
) |
2371 BIT(SA_ALG_AUTHENC_SHA256_AES
),
2374 static struct sa_match_data am64_match_data
= {
2377 .supported_algos
= BIT(SA_ALG_CBC_AES
) |
2378 BIT(SA_ALG_EBC_AES
) |
2379 BIT(SA_ALG_SHA256
) |
2380 BIT(SA_ALG_SHA512
) |
2381 BIT(SA_ALG_AUTHENC_SHA256_AES
),
2384 static const struct of_device_id of_match
[] = {
2385 { .compatible
= "ti,j721e-sa2ul", .data
= &am654_match_data
, },
2386 { .compatible
= "ti,am654-sa2ul", .data
= &am654_match_data
, },
2387 { .compatible
= "ti,am64-sa2ul", .data
= &am64_match_data
, },
2388 { .compatible
= "ti,am62-sa3ul", .data
= &am64_match_data
, },
2391 MODULE_DEVICE_TABLE(of
, of_match
);
2393 static int sa_ul_probe(struct platform_device
*pdev
)
2395 struct device
*dev
= &pdev
->dev
;
2396 struct device_node
*node
= dev
->of_node
;
2397 static void __iomem
*saul_base
;
2398 struct sa_crypto_data
*dev_data
;
2402 dev_data
= devm_kzalloc(dev
, sizeof(*dev_data
), GFP_KERNEL
);
2406 dev_data
->match_data
= of_device_get_match_data(dev
);
2407 if (!dev_data
->match_data
)
2410 saul_base
= devm_platform_ioremap_resource(pdev
, 0);
2411 if (IS_ERR(saul_base
))
2412 return PTR_ERR(saul_base
);
2415 dev_data
->dev
= dev
;
2416 dev_data
->pdev
= pdev
;
2417 dev_data
->base
= saul_base
;
2418 platform_set_drvdata(pdev
, dev_data
);
2419 dev_set_drvdata(sa_k3_dev
, dev_data
);
2421 pm_runtime_enable(dev
);
2422 ret
= pm_runtime_resume_and_get(dev
);
2424 dev_err(dev
, "%s: failed to get sync: %d\n", __func__
, ret
);
2425 pm_runtime_disable(dev
);
2429 sa_init_mem(dev_data
);
2430 ret
= sa_dma_init(dev_data
);
2432 goto destroy_dma_pool
;
2434 spin_lock_init(&dev_data
->scid_lock
);
2436 val
= SA_EEC_ENCSS_EN
| SA_EEC_AUTHSS_EN
| SA_EEC_CTXCACH_EN
|
2437 SA_EEC_CPPI_PORT_IN_EN
| SA_EEC_CPPI_PORT_OUT_EN
|
2439 status
= readl_relaxed(saul_base
+ SA_ENGINE_STATUS
);
2440 /* Only enable engines if all are not already enabled */
2442 writel_relaxed(val
, saul_base
+ SA_ENGINE_ENABLE_CONTROL
);
2444 sa_register_algos(dev_data
);
2446 ret
= of_platform_populate(node
, NULL
, NULL
, dev
);
2450 device_for_each_child(dev
, dev
, sa_link_child
);
2455 sa_unregister_algos(dev
);
2457 dma_release_channel(dev_data
->dma_rx2
);
2458 dma_release_channel(dev_data
->dma_rx1
);
2459 dma_release_channel(dev_data
->dma_tx
);
2462 dma_pool_destroy(dev_data
->sc_pool
);
2464 pm_runtime_put_sync(dev
);
2465 pm_runtime_disable(dev
);
2470 static void sa_ul_remove(struct platform_device
*pdev
)
2472 struct sa_crypto_data
*dev_data
= platform_get_drvdata(pdev
);
2474 of_platform_depopulate(&pdev
->dev
);
2476 sa_unregister_algos(&pdev
->dev
);
2478 dma_release_channel(dev_data
->dma_rx2
);
2479 dma_release_channel(dev_data
->dma_rx1
);
2480 dma_release_channel(dev_data
->dma_tx
);
2482 dma_pool_destroy(dev_data
->sc_pool
);
2484 platform_set_drvdata(pdev
, NULL
);
2486 pm_runtime_put_sync(&pdev
->dev
);
2487 pm_runtime_disable(&pdev
->dev
);
2490 static struct platform_driver sa_ul_driver
= {
2491 .probe
= sa_ul_probe
,
2492 .remove
= sa_ul_remove
,
2494 .name
= "saul-crypto",
2495 .of_match_table
= of_match
,
2498 module_platform_driver(sa_ul_driver
);
2499 MODULE_DESCRIPTION("K3 SA2UL crypto accelerator driver");
2500 MODULE_LICENSE("GPL v2");