1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * XCTR: XOR Counter mode - Adapted from ctr.c
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 * Copyright 2021 Google LLC
10 * XCTR mode is a blockcipher mode of operation used to implement HCTR2. XCTR is
11 * closely related to the CTR mode of operation; the main difference is that CTR
12 * generates the keystream using E(CTR + IV) whereas XCTR generates the
13 * keystream using E(CTR ^ IV). This allows implementations to avoid dealing
14 * with multi-limb integers (as is required in CTR mode). XCTR is also specified
15 * using little-endian arithmetic which makes it slightly faster on LE machines.
17 * See the HCTR2 paper for more details:
18 * Length-preserving encryption with HCTR2
19 * (https://eprint.iacr.org/2021/1441.pdf)
22 #include <crypto/algapi.h>
23 #include <crypto/internal/cipher.h>
24 #include <crypto/internal/skcipher.h>
25 #include <linux/err.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/module.h>
29 #include <linux/slab.h>
31 /* For now this implementation is limited to 16-byte blocks for simplicity */
32 #define XCTR_BLOCKSIZE 16
34 static void crypto_xctr_crypt_final(struct skcipher_walk
*walk
,
35 struct crypto_cipher
*tfm
, u32 byte_ctr
)
37 u8 keystream
[XCTR_BLOCKSIZE
];
38 const u8
*src
= walk
->src
.virt
.addr
;
39 u8
*dst
= walk
->dst
.virt
.addr
;
40 unsigned int nbytes
= walk
->nbytes
;
41 __le32 ctr32
= cpu_to_le32(byte_ctr
/ XCTR_BLOCKSIZE
+ 1);
43 crypto_xor(walk
->iv
, (u8
*)&ctr32
, sizeof(ctr32
));
44 crypto_cipher_encrypt_one(tfm
, keystream
, walk
->iv
);
45 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
46 crypto_xor(walk
->iv
, (u8
*)&ctr32
, sizeof(ctr32
));
49 static int crypto_xctr_crypt_segment(struct skcipher_walk
*walk
,
50 struct crypto_cipher
*tfm
, u32 byte_ctr
)
52 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) =
53 crypto_cipher_alg(tfm
)->cia_encrypt
;
54 const u8
*src
= walk
->src
.virt
.addr
;
55 u8
*dst
= walk
->dst
.virt
.addr
;
56 unsigned int nbytes
= walk
->nbytes
;
57 __le32 ctr32
= cpu_to_le32(byte_ctr
/ XCTR_BLOCKSIZE
+ 1);
60 crypto_xor(walk
->iv
, (u8
*)&ctr32
, sizeof(ctr32
));
61 fn(crypto_cipher_tfm(tfm
), dst
, walk
->iv
);
62 crypto_xor(dst
, src
, XCTR_BLOCKSIZE
);
63 crypto_xor(walk
->iv
, (u8
*)&ctr32
, sizeof(ctr32
));
65 le32_add_cpu(&ctr32
, 1);
67 src
+= XCTR_BLOCKSIZE
;
68 dst
+= XCTR_BLOCKSIZE
;
69 } while ((nbytes
-= XCTR_BLOCKSIZE
) >= XCTR_BLOCKSIZE
);
74 static int crypto_xctr_crypt_inplace(struct skcipher_walk
*walk
,
75 struct crypto_cipher
*tfm
, u32 byte_ctr
)
77 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) =
78 crypto_cipher_alg(tfm
)->cia_encrypt
;
79 unsigned long alignmask
= crypto_cipher_alignmask(tfm
);
80 unsigned int nbytes
= walk
->nbytes
;
81 u8
*data
= walk
->src
.virt
.addr
;
82 u8 tmp
[XCTR_BLOCKSIZE
+ MAX_CIPHER_ALIGNMASK
];
83 u8
*keystream
= PTR_ALIGN(tmp
+ 0, alignmask
+ 1);
84 __le32 ctr32
= cpu_to_le32(byte_ctr
/ XCTR_BLOCKSIZE
+ 1);
87 crypto_xor(walk
->iv
, (u8
*)&ctr32
, sizeof(ctr32
));
88 fn(crypto_cipher_tfm(tfm
), keystream
, walk
->iv
);
89 crypto_xor(data
, keystream
, XCTR_BLOCKSIZE
);
90 crypto_xor(walk
->iv
, (u8
*)&ctr32
, sizeof(ctr32
));
92 le32_add_cpu(&ctr32
, 1);
94 data
+= XCTR_BLOCKSIZE
;
95 } while ((nbytes
-= XCTR_BLOCKSIZE
) >= XCTR_BLOCKSIZE
);
100 static int crypto_xctr_crypt(struct skcipher_request
*req
)
102 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
103 struct crypto_cipher
*cipher
= skcipher_cipher_simple(tfm
);
104 struct skcipher_walk walk
;
109 err
= skcipher_walk_virt(&walk
, req
, false);
111 while (walk
.nbytes
>= XCTR_BLOCKSIZE
) {
112 if (walk
.src
.virt
.addr
== walk
.dst
.virt
.addr
)
113 nbytes
= crypto_xctr_crypt_inplace(&walk
, cipher
,
116 nbytes
= crypto_xctr_crypt_segment(&walk
, cipher
,
119 byte_ctr
+= walk
.nbytes
- nbytes
;
120 err
= skcipher_walk_done(&walk
, nbytes
);
124 crypto_xctr_crypt_final(&walk
, cipher
, byte_ctr
);
125 err
= skcipher_walk_done(&walk
, 0);
131 static int crypto_xctr_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
133 struct skcipher_instance
*inst
;
134 struct crypto_alg
*alg
;
137 inst
= skcipher_alloc_instance_simple(tmpl
, tb
);
139 return PTR_ERR(inst
);
141 alg
= skcipher_ialg_simple(inst
);
143 /* Block size must be 16 bytes. */
145 if (alg
->cra_blocksize
!= XCTR_BLOCKSIZE
)
148 /* XCTR mode is a stream cipher. */
149 inst
->alg
.base
.cra_blocksize
= 1;
152 * To simplify the implementation, configure the skcipher walk to only
153 * give a partial block at the very end, never earlier.
155 inst
->alg
.chunksize
= alg
->cra_blocksize
;
157 inst
->alg
.encrypt
= crypto_xctr_crypt
;
158 inst
->alg
.decrypt
= crypto_xctr_crypt
;
160 err
= skcipher_register_instance(tmpl
, inst
);
169 static struct crypto_template crypto_xctr_tmpl
= {
171 .create
= crypto_xctr_create
,
172 .module
= THIS_MODULE
,
175 static int __init
crypto_xctr_module_init(void)
177 return crypto_register_template(&crypto_xctr_tmpl
);
180 static void __exit
crypto_xctr_module_exit(void)
182 crypto_unregister_template(&crypto_xctr_tmpl
);
185 subsys_initcall(crypto_xctr_module_init
);
186 module_exit(crypto_xctr_module_exit
);
188 MODULE_LICENSE("GPL");
189 MODULE_DESCRIPTION("XCTR block cipher mode of operation");
190 MODULE_ALIAS_CRYPTO("xctr");
191 MODULE_IMPORT_NS(CRYPTO_INTERNAL
);