1 // SPDX-License-Identifier: GPL-2.0-only
3 * AES CTR routines supporting VMX instructions on the Power 8
5 * Copyright (C) 2015 International Business Machines Inc.
7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
11 #include <asm/switch_to.h>
12 #include <crypto/aes.h>
13 #include <crypto/internal/simd.h>
14 #include <crypto/internal/skcipher.h>
16 #include "aesp8-ppc.h"
18 struct p8_aes_ctr_ctx
{
19 struct crypto_skcipher
*fallback
;
20 struct aes_key enc_key
;
23 static int p8_aes_ctr_init(struct crypto_skcipher
*tfm
)
25 struct p8_aes_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
26 struct crypto_skcipher
*fallback
;
28 fallback
= crypto_alloc_skcipher("ctr(aes)", 0,
29 CRYPTO_ALG_NEED_FALLBACK
|
31 if (IS_ERR(fallback
)) {
32 pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
34 return PTR_ERR(fallback
);
37 crypto_skcipher_set_reqsize(tfm
, sizeof(struct skcipher_request
) +
38 crypto_skcipher_reqsize(fallback
));
39 ctx
->fallback
= fallback
;
43 static void p8_aes_ctr_exit(struct crypto_skcipher
*tfm
)
45 struct p8_aes_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
47 crypto_free_skcipher(ctx
->fallback
);
50 static int p8_aes_ctr_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
53 struct p8_aes_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
59 ret
= aes_p8_set_encrypt_key(key
, keylen
* 8, &ctx
->enc_key
);
64 ret
|= crypto_skcipher_setkey(ctx
->fallback
, key
, keylen
);
66 return ret
? -EINVAL
: 0;
69 static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx
*ctx
,
70 struct skcipher_walk
*walk
)
72 u8
*ctrblk
= walk
->iv
;
73 u8 keystream
[AES_BLOCK_SIZE
];
74 u8
*src
= walk
->src
.virt
.addr
;
75 u8
*dst
= walk
->dst
.virt
.addr
;
76 unsigned int nbytes
= walk
->nbytes
;
81 aes_p8_encrypt(ctrblk
, keystream
, &ctx
->enc_key
);
86 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
87 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
90 static int p8_aes_ctr_crypt(struct skcipher_request
*req
)
92 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
93 const struct p8_aes_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
94 struct skcipher_walk walk
;
98 if (!crypto_simd_usable()) {
99 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
102 skcipher_request_set_tfm(subreq
, ctx
->fallback
);
103 return crypto_skcipher_encrypt(subreq
);
106 ret
= skcipher_walk_virt(&walk
, req
, false);
107 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
111 aes_p8_ctr32_encrypt_blocks(walk
.src
.virt
.addr
,
113 nbytes
/ AES_BLOCK_SIZE
,
114 &ctx
->enc_key
, walk
.iv
);
115 disable_kernel_vsx();
120 crypto_inc(walk
.iv
, AES_BLOCK_SIZE
);
121 } while ((nbytes
-= AES_BLOCK_SIZE
) >= AES_BLOCK_SIZE
);
123 ret
= skcipher_walk_done(&walk
, nbytes
);
126 p8_aes_ctr_final(ctx
, &walk
);
127 ret
= skcipher_walk_done(&walk
, 0);
132 struct skcipher_alg p8_aes_ctr_alg
= {
133 .base
.cra_name
= "ctr(aes)",
134 .base
.cra_driver_name
= "p8_aes_ctr",
135 .base
.cra_module
= THIS_MODULE
,
136 .base
.cra_priority
= 2000,
137 .base
.cra_flags
= CRYPTO_ALG_NEED_FALLBACK
,
138 .base
.cra_blocksize
= 1,
139 .base
.cra_ctxsize
= sizeof(struct p8_aes_ctr_ctx
),
140 .setkey
= p8_aes_ctr_setkey
,
141 .encrypt
= p8_aes_ctr_crypt
,
142 .decrypt
= p8_aes_ctr_crypt
,
143 .init
= p8_aes_ctr_init
,
144 .exit
= p8_aes_ctr_exit
,
145 .min_keysize
= AES_MIN_KEY_SIZE
,
146 .max_keysize
= AES_MAX_KEY_SIZE
,
147 .ivsize
= AES_BLOCK_SIZE
,
148 .chunksize
= AES_BLOCK_SIZE
,