1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
6 #include <crypto/internal/hash.h>
8 #include <linux/interrupt.h>
9 #include <linux/types.h>
10 #include <crypto/scatterwalk.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
21 static inline u32
qce_read(struct qce_device
*qce
, u32 offset
)
23 return readl(qce
->base
+ offset
);
26 static inline void qce_write(struct qce_device
*qce
, u32 offset
, u32 val
)
28 writel(val
, qce
->base
+ offset
);
31 static inline void qce_write_array(struct qce_device
*qce
, u32 offset
,
32 const u32
*val
, unsigned int len
)
36 for (i
= 0; i
< len
; i
++)
37 qce_write(qce
, offset
+ i
* sizeof(u32
), val
[i
]);
41 qce_clear_array(struct qce_device
*qce
, u32 offset
, unsigned int len
)
45 for (i
= 0; i
< len
; i
++)
46 qce_write(qce
, offset
+ i
* sizeof(u32
), 0);
49 static u32
qce_config_reg(struct qce_device
*qce
, int little
)
51 u32 beats
= (qce
->burst_size
>> 3) - 1;
52 u32 pipe_pair
= qce
->pipe_pair_id
;
55 config
= (beats
<< REQ_SIZE_SHIFT
) & REQ_SIZE_MASK
;
56 config
|= BIT(MASK_DOUT_INTR_SHIFT
) | BIT(MASK_DIN_INTR_SHIFT
) |
57 BIT(MASK_OP_DONE_INTR_SHIFT
) | BIT(MASK_ERR_INTR_SHIFT
);
58 config
|= (pipe_pair
<< PIPE_SET_SELECT_SHIFT
) & PIPE_SET_SELECT_MASK
;
59 config
&= ~HIGH_SPD_EN_N_SHIFT
;
62 config
|= BIT(LITTLE_ENDIAN_MODE_SHIFT
);
67 void qce_cpu_to_be32p_array(__be32
*dst
, const u8
*src
, unsigned int len
)
73 n
= len
/ sizeof(u32
);
75 *d
= cpu_to_be32p((const __u32
*) s
);
81 static void qce_setup_config(struct qce_device
*qce
)
85 /* get big endianness */
86 config
= qce_config_reg(qce
, 0);
89 qce_write(qce
, REG_STATUS
, 0);
90 qce_write(qce
, REG_CONFIG
, config
);
93 static inline void qce_crypto_go(struct qce_device
*qce
, bool result_dump
)
96 qce_write(qce
, REG_GOPROC
, BIT(GO_SHIFT
) | BIT(RESULTS_DUMP_SHIFT
));
98 qce_write(qce
, REG_GOPROC
, BIT(GO_SHIFT
));
101 #if defined(CONFIG_CRYPTO_DEV_QCE_SHA) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
102 static u32
qce_auth_cfg(unsigned long flags
, u32 key_size
, u32 auth_size
)
106 if (IS_CCM(flags
) || IS_CMAC(flags
))
107 cfg
|= AUTH_ALG_AES
<< AUTH_ALG_SHIFT
;
109 cfg
|= AUTH_ALG_SHA
<< AUTH_ALG_SHIFT
;
111 if (IS_CCM(flags
) || IS_CMAC(flags
)) {
112 if (key_size
== AES_KEYSIZE_128
)
113 cfg
|= AUTH_KEY_SZ_AES128
<< AUTH_KEY_SIZE_SHIFT
;
114 else if (key_size
== AES_KEYSIZE_256
)
115 cfg
|= AUTH_KEY_SZ_AES256
<< AUTH_KEY_SIZE_SHIFT
;
118 if (IS_SHA1(flags
) || IS_SHA1_HMAC(flags
))
119 cfg
|= AUTH_SIZE_SHA1
<< AUTH_SIZE_SHIFT
;
120 else if (IS_SHA256(flags
) || IS_SHA256_HMAC(flags
))
121 cfg
|= AUTH_SIZE_SHA256
<< AUTH_SIZE_SHIFT
;
122 else if (IS_CMAC(flags
))
123 cfg
|= AUTH_SIZE_ENUM_16_BYTES
<< AUTH_SIZE_SHIFT
;
124 else if (IS_CCM(flags
))
125 cfg
|= (auth_size
- 1) << AUTH_SIZE_SHIFT
;
127 if (IS_SHA1(flags
) || IS_SHA256(flags
))
128 cfg
|= AUTH_MODE_HASH
<< AUTH_MODE_SHIFT
;
129 else if (IS_SHA1_HMAC(flags
) || IS_SHA256_HMAC(flags
))
130 cfg
|= AUTH_MODE_HMAC
<< AUTH_MODE_SHIFT
;
131 else if (IS_CCM(flags
))
132 cfg
|= AUTH_MODE_CCM
<< AUTH_MODE_SHIFT
;
133 else if (IS_CMAC(flags
))
134 cfg
|= AUTH_MODE_CMAC
<< AUTH_MODE_SHIFT
;
136 if (IS_SHA(flags
) || IS_SHA_HMAC(flags
))
137 cfg
|= AUTH_POS_BEFORE
<< AUTH_POS_SHIFT
;
140 cfg
|= QCE_MAX_NONCE_WORDS
<< AUTH_NONCE_NUM_WORDS_SHIFT
;
146 #ifdef CONFIG_CRYPTO_DEV_QCE_SHA
147 static int qce_setup_regs_ahash(struct crypto_async_request
*async_req
)
149 struct ahash_request
*req
= ahash_request_cast(async_req
);
150 struct crypto_ahash
*ahash
= __crypto_ahash_cast(async_req
->tfm
);
151 struct qce_sha_reqctx
*rctx
= ahash_request_ctx_dma(req
);
152 struct qce_alg_template
*tmpl
= to_ahash_tmpl(async_req
->tfm
);
153 struct qce_device
*qce
= tmpl
->qce
;
154 unsigned int digestsize
= crypto_ahash_digestsize(ahash
);
155 unsigned int blocksize
= crypto_tfm_alg_blocksize(async_req
->tfm
);
156 __be32 auth
[SHA256_DIGEST_SIZE
/ sizeof(__be32
)] = {0};
157 __be32 mackey
[QCE_SHA_HMAC_KEY_SIZE
/ sizeof(__be32
)] = {0};
158 u32 auth_cfg
= 0, config
;
159 unsigned int iv_words
;
161 /* if not the last, the size has to be on the block boundary */
162 if (!rctx
->last_blk
&& req
->nbytes
% blocksize
)
165 qce_setup_config(qce
);
167 if (IS_CMAC(rctx
->flags
)) {
168 qce_write(qce
, REG_AUTH_SEG_CFG
, 0);
169 qce_write(qce
, REG_ENCR_SEG_CFG
, 0);
170 qce_write(qce
, REG_ENCR_SEG_SIZE
, 0);
171 qce_clear_array(qce
, REG_AUTH_IV0
, 16);
172 qce_clear_array(qce
, REG_AUTH_KEY0
, 16);
173 qce_clear_array(qce
, REG_AUTH_BYTECNT0
, 4);
175 auth_cfg
= qce_auth_cfg(rctx
->flags
, rctx
->authklen
, digestsize
);
178 if (IS_SHA_HMAC(rctx
->flags
) || IS_CMAC(rctx
->flags
)) {
179 u32 authkey_words
= rctx
->authklen
/ sizeof(u32
);
181 qce_cpu_to_be32p_array(mackey
, rctx
->authkey
, rctx
->authklen
);
182 qce_write_array(qce
, REG_AUTH_KEY0
, (u32
*)mackey
,
186 if (IS_CMAC(rctx
->flags
))
190 memcpy(auth
, rctx
->digest
, digestsize
);
192 qce_cpu_to_be32p_array(auth
, rctx
->digest
, digestsize
);
194 iv_words
= (IS_SHA1(rctx
->flags
) || IS_SHA1_HMAC(rctx
->flags
)) ? 5 : 8;
195 qce_write_array(qce
, REG_AUTH_IV0
, (u32
*)auth
, iv_words
);
198 qce_clear_array(qce
, REG_AUTH_BYTECNT0
, 4);
200 qce_write_array(qce
, REG_AUTH_BYTECNT0
,
201 (u32
*)rctx
->byte_count
, 2);
203 auth_cfg
= qce_auth_cfg(rctx
->flags
, 0, digestsize
);
206 auth_cfg
|= BIT(AUTH_LAST_SHIFT
);
208 auth_cfg
&= ~BIT(AUTH_LAST_SHIFT
);
211 auth_cfg
|= BIT(AUTH_FIRST_SHIFT
);
213 auth_cfg
&= ~BIT(AUTH_FIRST_SHIFT
);
216 qce_write(qce
, REG_AUTH_SEG_CFG
, auth_cfg
);
217 qce_write(qce
, REG_AUTH_SEG_SIZE
, req
->nbytes
);
218 qce_write(qce
, REG_AUTH_SEG_START
, 0);
219 qce_write(qce
, REG_ENCR_SEG_CFG
, 0);
220 qce_write(qce
, REG_SEG_SIZE
, req
->nbytes
);
222 /* get little endianness */
223 config
= qce_config_reg(qce
, 1);
224 qce_write(qce
, REG_CONFIG
, config
);
226 qce_crypto_go(qce
, true);
232 #if defined(CONFIG_CRYPTO_DEV_QCE_SKCIPHER) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
233 static u32
qce_encr_cfg(unsigned long flags
, u32 aes_key_size
)
238 if (aes_key_size
== AES_KEYSIZE_128
)
239 cfg
|= ENCR_KEY_SZ_AES128
<< ENCR_KEY_SZ_SHIFT
;
240 else if (aes_key_size
== AES_KEYSIZE_256
)
241 cfg
|= ENCR_KEY_SZ_AES256
<< ENCR_KEY_SZ_SHIFT
;
245 cfg
|= ENCR_ALG_AES
<< ENCR_ALG_SHIFT
;
246 else if (IS_DES(flags
) || IS_3DES(flags
))
247 cfg
|= ENCR_ALG_DES
<< ENCR_ALG_SHIFT
;
250 cfg
|= ENCR_KEY_SZ_DES
<< ENCR_KEY_SZ_SHIFT
;
253 cfg
|= ENCR_KEY_SZ_3DES
<< ENCR_KEY_SZ_SHIFT
;
255 switch (flags
& QCE_MODE_MASK
) {
257 cfg
|= ENCR_MODE_ECB
<< ENCR_MODE_SHIFT
;
260 cfg
|= ENCR_MODE_CBC
<< ENCR_MODE_SHIFT
;
263 cfg
|= ENCR_MODE_CTR
<< ENCR_MODE_SHIFT
;
266 cfg
|= ENCR_MODE_XTS
<< ENCR_MODE_SHIFT
;
269 cfg
|= ENCR_MODE_CCM
<< ENCR_MODE_SHIFT
;
270 cfg
|= LAST_CCM_XFR
<< LAST_CCM_SHIFT
;
280 #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER
281 static void qce_xts_swapiv(__be32
*dst
, const u8
*src
, unsigned int ivsize
)
283 u8 swap
[QCE_AES_IV_LENGTH
];
286 if (ivsize
> QCE_AES_IV_LENGTH
)
289 memset(swap
, 0, QCE_AES_IV_LENGTH
);
291 for (i
= (QCE_AES_IV_LENGTH
- ivsize
), j
= ivsize
- 1;
292 i
< QCE_AES_IV_LENGTH
; i
++, j
--)
295 qce_cpu_to_be32p_array(dst
, swap
, QCE_AES_IV_LENGTH
);
298 static void qce_xtskey(struct qce_device
*qce
, const u8
*enckey
,
299 unsigned int enckeylen
, unsigned int cryptlen
)
301 u32 xtskey
[QCE_MAX_CIPHER_KEY_SIZE
/ sizeof(u32
)] = {0};
302 unsigned int xtsklen
= enckeylen
/ (2 * sizeof(u32
));
304 qce_cpu_to_be32p_array((__be32
*)xtskey
, enckey
+ enckeylen
/ 2,
306 qce_write_array(qce
, REG_ENCR_XTS_KEY0
, xtskey
, xtsklen
);
308 /* Set data unit size to cryptlen. Anything else causes
309 * crypto engine to return back incorrect results.
311 qce_write(qce
, REG_ENCR_XTS_DU_SIZE
, cryptlen
);
314 static int qce_setup_regs_skcipher(struct crypto_async_request
*async_req
)
316 struct skcipher_request
*req
= skcipher_request_cast(async_req
);
317 struct qce_cipher_reqctx
*rctx
= skcipher_request_ctx(req
);
318 struct qce_cipher_ctx
*ctx
= crypto_tfm_ctx(async_req
->tfm
);
319 struct qce_alg_template
*tmpl
= to_cipher_tmpl(crypto_skcipher_reqtfm(req
));
320 struct qce_device
*qce
= tmpl
->qce
;
321 __be32 enckey
[QCE_MAX_CIPHER_KEY_SIZE
/ sizeof(__be32
)] = {0};
322 __be32 enciv
[QCE_MAX_IV_SIZE
/ sizeof(__be32
)] = {0};
323 unsigned int enckey_words
, enciv_words
;
325 u32 encr_cfg
= 0, auth_cfg
= 0, config
;
326 unsigned int ivsize
= rctx
->ivsize
;
327 unsigned long flags
= rctx
->flags
;
329 qce_setup_config(qce
);
332 keylen
= ctx
->enc_keylen
/ 2;
334 keylen
= ctx
->enc_keylen
;
336 qce_cpu_to_be32p_array(enckey
, ctx
->enc_key
, keylen
);
337 enckey_words
= keylen
/ sizeof(u32
);
339 qce_write(qce
, REG_AUTH_SEG_CFG
, auth_cfg
);
341 encr_cfg
= qce_encr_cfg(flags
, keylen
);
346 } else if (IS_3DES(flags
)) {
349 } else if (IS_AES(flags
)) {
351 qce_xtskey(qce
, ctx
->enc_key
, ctx
->enc_keylen
,
358 qce_write_array(qce
, REG_ENCR_KEY0
, (u32
*)enckey
, enckey_words
);
360 if (!IS_ECB(flags
)) {
362 qce_xts_swapiv(enciv
, rctx
->iv
, ivsize
);
364 qce_cpu_to_be32p_array(enciv
, rctx
->iv
, ivsize
);
366 qce_write_array(qce
, REG_CNTR0_IV0
, (u32
*)enciv
, enciv_words
);
369 if (IS_ENCRYPT(flags
))
370 encr_cfg
|= BIT(ENCODE_SHIFT
);
372 qce_write(qce
, REG_ENCR_SEG_CFG
, encr_cfg
);
373 qce_write(qce
, REG_ENCR_SEG_SIZE
, rctx
->cryptlen
);
374 qce_write(qce
, REG_ENCR_SEG_START
, 0);
377 qce_write(qce
, REG_CNTR_MASK
, ~0);
378 qce_write(qce
, REG_CNTR_MASK0
, ~0);
379 qce_write(qce
, REG_CNTR_MASK1
, ~0);
380 qce_write(qce
, REG_CNTR_MASK2
, ~0);
383 qce_write(qce
, REG_SEG_SIZE
, rctx
->cryptlen
);
385 /* get little endianness */
386 config
= qce_config_reg(qce
, 1);
387 qce_write(qce
, REG_CONFIG
, config
);
389 qce_crypto_go(qce
, true);
395 #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD
396 static const u32 std_iv_sha1
[SHA256_DIGEST_SIZE
/ sizeof(u32
)] = {
397 SHA1_H0
, SHA1_H1
, SHA1_H2
, SHA1_H3
, SHA1_H4
, 0, 0, 0
400 static const u32 std_iv_sha256
[SHA256_DIGEST_SIZE
/ sizeof(u32
)] = {
401 SHA256_H0
, SHA256_H1
, SHA256_H2
, SHA256_H3
,
402 SHA256_H4
, SHA256_H5
, SHA256_H6
, SHA256_H7
405 static unsigned int qce_be32_to_cpu_array(u32
*dst
, const u8
*src
, unsigned int len
)
411 n
= len
/ sizeof(u32
);
413 *d
= be32_to_cpup((const __be32
*)s
);
417 return DIV_ROUND_UP(len
, sizeof(u32
));
420 static int qce_setup_regs_aead(struct crypto_async_request
*async_req
)
422 struct aead_request
*req
= aead_request_cast(async_req
);
423 struct qce_aead_reqctx
*rctx
= aead_request_ctx_dma(req
);
424 struct qce_aead_ctx
*ctx
= crypto_tfm_ctx(async_req
->tfm
);
425 struct qce_alg_template
*tmpl
= to_aead_tmpl(crypto_aead_reqtfm(req
));
426 struct qce_device
*qce
= tmpl
->qce
;
427 u32 enckey
[QCE_MAX_CIPHER_KEY_SIZE
/ sizeof(u32
)] = {0};
428 u32 enciv
[QCE_MAX_IV_SIZE
/ sizeof(u32
)] = {0};
429 u32 authkey
[QCE_SHA_HMAC_KEY_SIZE
/ sizeof(u32
)] = {0};
430 u32 authiv
[SHA256_DIGEST_SIZE
/ sizeof(u32
)] = {0};
431 u32 authnonce
[QCE_MAX_NONCE
/ sizeof(u32
)] = {0};
432 unsigned int enc_keylen
= ctx
->enc_keylen
;
433 unsigned int auth_keylen
= ctx
->auth_keylen
;
434 unsigned int enc_ivsize
= rctx
->ivsize
;
435 unsigned int auth_ivsize
= 0;
436 unsigned int enckey_words
, enciv_words
;
437 unsigned int authkey_words
, authiv_words
, authnonce_words
;
438 unsigned long flags
= rctx
->flags
;
439 u32 encr_cfg
, auth_cfg
, config
, totallen
;
442 qce_setup_config(qce
);
444 /* Write encryption key */
445 enckey_words
= qce_be32_to_cpu_array(enckey
, ctx
->enc_key
, enc_keylen
);
446 qce_write_array(qce
, REG_ENCR_KEY0
, enckey
, enckey_words
);
448 /* Write encryption iv */
449 enciv_words
= qce_be32_to_cpu_array(enciv
, rctx
->iv
, enc_ivsize
);
450 qce_write_array(qce
, REG_CNTR0_IV0
, enciv
, enciv_words
);
452 if (IS_CCM(rctx
->flags
)) {
453 iv_last_word
= enciv
[enciv_words
- 1];
454 qce_write(qce
, REG_CNTR3_IV3
, iv_last_word
+ 1);
455 qce_write_array(qce
, REG_ENCR_CCM_INT_CNTR0
, (u32
*)enciv
, enciv_words
);
456 qce_write(qce
, REG_CNTR_MASK
, ~0);
457 qce_write(qce
, REG_CNTR_MASK0
, ~0);
458 qce_write(qce
, REG_CNTR_MASK1
, ~0);
459 qce_write(qce
, REG_CNTR_MASK2
, ~0);
462 /* Clear authentication IV and KEY registers of previous values */
463 qce_clear_array(qce
, REG_AUTH_IV0
, 16);
464 qce_clear_array(qce
, REG_AUTH_KEY0
, 16);
466 /* Clear byte count */
467 qce_clear_array(qce
, REG_AUTH_BYTECNT0
, 4);
469 /* Write authentication key */
470 authkey_words
= qce_be32_to_cpu_array(authkey
, ctx
->auth_key
, auth_keylen
);
471 qce_write_array(qce
, REG_AUTH_KEY0
, (u32
*)authkey
, authkey_words
);
473 /* Write initial authentication IV only for HMAC algorithms */
474 if (IS_SHA_HMAC(rctx
->flags
)) {
475 /* Write default authentication iv */
476 if (IS_SHA1_HMAC(rctx
->flags
)) {
477 auth_ivsize
= SHA1_DIGEST_SIZE
;
478 memcpy(authiv
, std_iv_sha1
, auth_ivsize
);
479 } else if (IS_SHA256_HMAC(rctx
->flags
)) {
480 auth_ivsize
= SHA256_DIGEST_SIZE
;
481 memcpy(authiv
, std_iv_sha256
, auth_ivsize
);
483 authiv_words
= auth_ivsize
/ sizeof(u32
);
484 qce_write_array(qce
, REG_AUTH_IV0
, (u32
*)authiv
, authiv_words
);
485 } else if (IS_CCM(rctx
->flags
)) {
486 /* Write nonce for CCM algorithms */
487 authnonce_words
= qce_be32_to_cpu_array(authnonce
, rctx
->ccm_nonce
, QCE_MAX_NONCE
);
488 qce_write_array(qce
, REG_AUTH_INFO_NONCE0
, authnonce
, authnonce_words
);
491 /* Set up ENCR_SEG_CFG */
492 encr_cfg
= qce_encr_cfg(flags
, enc_keylen
);
493 if (IS_ENCRYPT(flags
))
494 encr_cfg
|= BIT(ENCODE_SHIFT
);
495 qce_write(qce
, REG_ENCR_SEG_CFG
, encr_cfg
);
497 /* Set up AUTH_SEG_CFG */
498 auth_cfg
= qce_auth_cfg(rctx
->flags
, auth_keylen
, ctx
->authsize
);
499 auth_cfg
|= BIT(AUTH_LAST_SHIFT
);
500 auth_cfg
|= BIT(AUTH_FIRST_SHIFT
);
501 if (IS_ENCRYPT(flags
)) {
502 if (IS_CCM(rctx
->flags
))
503 auth_cfg
|= AUTH_POS_BEFORE
<< AUTH_POS_SHIFT
;
505 auth_cfg
|= AUTH_POS_AFTER
<< AUTH_POS_SHIFT
;
507 if (IS_CCM(rctx
->flags
))
508 auth_cfg
|= AUTH_POS_AFTER
<< AUTH_POS_SHIFT
;
510 auth_cfg
|= AUTH_POS_BEFORE
<< AUTH_POS_SHIFT
;
512 qce_write(qce
, REG_AUTH_SEG_CFG
, auth_cfg
);
514 totallen
= rctx
->cryptlen
+ rctx
->assoclen
;
516 /* Set the encryption size and start offset */
517 if (IS_CCM(rctx
->flags
) && IS_DECRYPT(rctx
->flags
))
518 qce_write(qce
, REG_ENCR_SEG_SIZE
, rctx
->cryptlen
+ ctx
->authsize
);
520 qce_write(qce
, REG_ENCR_SEG_SIZE
, rctx
->cryptlen
);
521 qce_write(qce
, REG_ENCR_SEG_START
, rctx
->assoclen
& 0xffff);
523 /* Set the authentication size and start offset */
524 qce_write(qce
, REG_AUTH_SEG_SIZE
, totallen
);
525 qce_write(qce
, REG_AUTH_SEG_START
, 0);
527 /* Write total length */
528 if (IS_CCM(rctx
->flags
) && IS_DECRYPT(rctx
->flags
))
529 qce_write(qce
, REG_SEG_SIZE
, totallen
+ ctx
->authsize
);
531 qce_write(qce
, REG_SEG_SIZE
, totallen
);
533 /* get little endianness */
534 config
= qce_config_reg(qce
, 1);
535 qce_write(qce
, REG_CONFIG
, config
);
537 /* Start the process */
538 qce_crypto_go(qce
, !IS_CCM(flags
));
544 int qce_start(struct crypto_async_request
*async_req
, u32 type
)
547 #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER
548 case CRYPTO_ALG_TYPE_SKCIPHER
:
549 return qce_setup_regs_skcipher(async_req
);
551 #ifdef CONFIG_CRYPTO_DEV_QCE_SHA
552 case CRYPTO_ALG_TYPE_AHASH
:
553 return qce_setup_regs_ahash(async_req
);
555 #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD
556 case CRYPTO_ALG_TYPE_AEAD
:
557 return qce_setup_regs_aead(async_req
);
564 #define STATUS_ERRORS \
565 (BIT(SW_ERR_SHIFT) | BIT(AXI_ERR_SHIFT) | BIT(HSD_ERR_SHIFT))
567 int qce_check_status(struct qce_device
*qce
, u32
*status
)
571 *status
= qce_read(qce
, REG_STATUS
);
574 * Don't use result dump status. The operation may not be complete.
575 * Instead, use the status we just read from device. In case, we need to
576 * use result_status from result dump the result_status needs to be byte
577 * swapped, since we set the device to little endian.
579 if (*status
& STATUS_ERRORS
|| !(*status
& BIT(OPERATION_DONE_SHIFT
)))
581 else if (*status
& BIT(MAC_FAILED_SHIFT
))
587 void qce_get_version(struct qce_device
*qce
, u32
*major
, u32
*minor
, u32
*step
)
591 val
= qce_read(qce
, REG_VERSION
);
592 *major
= (val
& CORE_MAJOR_REV_MASK
) >> CORE_MAJOR_REV_SHIFT
;
593 *minor
= (val
& CORE_MINOR_REV_MASK
) >> CORE_MINOR_REV_SHIFT
;
594 *step
= (val
& CORE_STEP_REV_MASK
) >> CORE_STEP_REV_SHIFT
;