1 /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License as published by
5 * the Free Software Foundation; either version 2 of the License, or
6 * (at your option) any later version.
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/pci.h>
12 #include <linux/pci_ids.h>
13 #include <linux/crypto.h>
14 #include <linux/spinlock.h>
15 #include <crypto/algapi.h>
16 #include <crypto/aes.h>
17 #include <crypto/internal/skcipher.h>
20 #include <linux/delay.h>
22 #include "geode-aes.h"
24 /* Static structures */
26 static void __iomem
*_iobase
;
27 static spinlock_t lock
;
29 /* Write a 128 bit field (either a writable key or IV) */
31 _writefield(u32 offset
, const void *value
)
35 for (i
= 0; i
< 4; i
++)
36 iowrite32(((const u32
*) value
)[i
], _iobase
+ offset
+ (i
* 4));
39 /* Read a 128 bit field (either a writable key or IV) */
41 _readfield(u32 offset
, void *value
)
45 for (i
= 0; i
< 4; i
++)
46 ((u32
*) value
)[i
] = ioread32(_iobase
+ offset
+ (i
* 4));
50 do_crypt(const void *src
, void *dst
, u32 len
, u32 flags
)
53 u32 counter
= AES_OP_TIMEOUT
;
55 iowrite32(virt_to_phys((void *)src
), _iobase
+ AES_SOURCEA_REG
);
56 iowrite32(virt_to_phys(dst
), _iobase
+ AES_DSTA_REG
);
57 iowrite32(len
, _iobase
+ AES_LENA_REG
);
59 /* Start the operation */
60 iowrite32(AES_CTRL_START
| flags
, _iobase
+ AES_CTRLA_REG
);
63 status
= ioread32(_iobase
+ AES_INTR_REG
);
65 } while (!(status
& AES_INTRA_PENDING
) && --counter
);
68 iowrite32((status
& 0xFF) | AES_INTRA_PENDING
, _iobase
+ AES_INTR_REG
);
69 return counter
? 0 : 1;
73 geode_aes_crypt(const struct geode_aes_tfm_ctx
*tctx
, const void *src
,
74 void *dst
, u32 len
, u8
*iv
, int mode
, int dir
)
80 /* If the source and destination is the same, then
81 * we need to turn on the coherent flags, otherwise
82 * we don't need to worry
85 flags
|= (AES_CTRL_DCA
| AES_CTRL_SCA
);
87 if (dir
== AES_DIR_ENCRYPT
)
88 flags
|= AES_CTRL_ENCRYPT
;
90 /* Start the critical section */
92 spin_lock_irqsave(&lock
, iflags
);
94 if (mode
== AES_MODE_CBC
) {
95 flags
|= AES_CTRL_CBC
;
96 _writefield(AES_WRITEIV0_REG
, iv
);
99 flags
|= AES_CTRL_WRKEY
;
100 _writefield(AES_WRITEKEY0_REG
, tctx
->key
);
102 ret
= do_crypt(src
, dst
, len
, flags
);
105 if (mode
== AES_MODE_CBC
)
106 _readfield(AES_WRITEIV0_REG
, iv
);
108 spin_unlock_irqrestore(&lock
, iflags
);
111 /* CRYPTO-API Functions */
113 static int geode_setkey_cip(struct crypto_tfm
*tfm
, const u8
*key
,
116 struct geode_aes_tfm_ctx
*tctx
= crypto_tfm_ctx(tfm
);
121 if (len
== AES_KEYSIZE_128
) {
122 memcpy(tctx
->key
, key
, len
);
126 if (len
!= AES_KEYSIZE_192
&& len
!= AES_KEYSIZE_256
) {
127 /* not supported at all */
128 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
133 * The requested key size is not supported by HW, do a fallback
135 tctx
->fallback
.cip
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
136 tctx
->fallback
.cip
->base
.crt_flags
|=
137 (tfm
->crt_flags
& CRYPTO_TFM_REQ_MASK
);
139 ret
= crypto_cipher_setkey(tctx
->fallback
.cip
, key
, len
);
141 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
142 tfm
->crt_flags
|= (tctx
->fallback
.cip
->base
.crt_flags
&
143 CRYPTO_TFM_RES_MASK
);
148 static int geode_setkey_skcipher(struct crypto_skcipher
*tfm
, const u8
*key
,
151 struct geode_aes_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
156 if (len
== AES_KEYSIZE_128
) {
157 memcpy(tctx
->key
, key
, len
);
161 if (len
!= AES_KEYSIZE_192
&& len
!= AES_KEYSIZE_256
) {
162 /* not supported at all */
163 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
168 * The requested key size is not supported by HW, do a fallback
170 crypto_skcipher_clear_flags(tctx
->fallback
.skcipher
,
171 CRYPTO_TFM_REQ_MASK
);
172 crypto_skcipher_set_flags(tctx
->fallback
.skcipher
,
173 crypto_skcipher_get_flags(tfm
) &
174 CRYPTO_TFM_REQ_MASK
);
175 ret
= crypto_skcipher_setkey(tctx
->fallback
.skcipher
, key
, len
);
176 crypto_skcipher_set_flags(tfm
,
177 crypto_skcipher_get_flags(tctx
->fallback
.skcipher
) &
178 CRYPTO_TFM_RES_MASK
);
183 geode_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
185 const struct geode_aes_tfm_ctx
*tctx
= crypto_tfm_ctx(tfm
);
187 if (unlikely(tctx
->keylen
!= AES_KEYSIZE_128
)) {
188 crypto_cipher_encrypt_one(tctx
->fallback
.cip
, out
, in
);
192 geode_aes_crypt(tctx
, in
, out
, AES_BLOCK_SIZE
, NULL
,
193 AES_MODE_ECB
, AES_DIR_ENCRYPT
);
198 geode_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
200 const struct geode_aes_tfm_ctx
*tctx
= crypto_tfm_ctx(tfm
);
202 if (unlikely(tctx
->keylen
!= AES_KEYSIZE_128
)) {
203 crypto_cipher_decrypt_one(tctx
->fallback
.cip
, out
, in
);
207 geode_aes_crypt(tctx
, in
, out
, AES_BLOCK_SIZE
, NULL
,
208 AES_MODE_ECB
, AES_DIR_DECRYPT
);
211 static int fallback_init_cip(struct crypto_tfm
*tfm
)
213 const char *name
= crypto_tfm_alg_name(tfm
);
214 struct geode_aes_tfm_ctx
*tctx
= crypto_tfm_ctx(tfm
);
216 tctx
->fallback
.cip
= crypto_alloc_cipher(name
, 0,
217 CRYPTO_ALG_NEED_FALLBACK
);
219 if (IS_ERR(tctx
->fallback
.cip
)) {
220 printk(KERN_ERR
"Error allocating fallback algo %s\n", name
);
221 return PTR_ERR(tctx
->fallback
.cip
);
227 static void fallback_exit_cip(struct crypto_tfm
*tfm
)
229 struct geode_aes_tfm_ctx
*tctx
= crypto_tfm_ctx(tfm
);
231 crypto_free_cipher(tctx
->fallback
.cip
);
234 static struct crypto_alg geode_alg
= {
236 .cra_driver_name
= "geode-aes",
239 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
|
240 CRYPTO_ALG_NEED_FALLBACK
,
241 .cra_init
= fallback_init_cip
,
242 .cra_exit
= fallback_exit_cip
,
243 .cra_blocksize
= AES_BLOCK_SIZE
,
244 .cra_ctxsize
= sizeof(struct geode_aes_tfm_ctx
),
245 .cra_module
= THIS_MODULE
,
248 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
249 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
250 .cia_setkey
= geode_setkey_cip
,
251 .cia_encrypt
= geode_encrypt
,
252 .cia_decrypt
= geode_decrypt
257 static int geode_init_skcipher(struct crypto_skcipher
*tfm
)
259 const char *name
= crypto_tfm_alg_name(&tfm
->base
);
260 struct geode_aes_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
262 tctx
->fallback
.skcipher
=
263 crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
|
265 if (IS_ERR(tctx
->fallback
.skcipher
)) {
266 printk(KERN_ERR
"Error allocating fallback algo %s\n", name
);
267 return PTR_ERR(tctx
->fallback
.skcipher
);
270 crypto_skcipher_set_reqsize(tfm
, sizeof(struct skcipher_request
) +
271 crypto_skcipher_reqsize(tctx
->fallback
.skcipher
));
275 static void geode_exit_skcipher(struct crypto_skcipher
*tfm
)
277 struct geode_aes_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
279 crypto_free_skcipher(tctx
->fallback
.skcipher
);
282 static int geode_skcipher_crypt(struct skcipher_request
*req
, int mode
, int dir
)
284 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
285 const struct geode_aes_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
286 struct skcipher_walk walk
;
290 if (unlikely(tctx
->keylen
!= AES_KEYSIZE_128
)) {
291 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
294 skcipher_request_set_tfm(subreq
, tctx
->fallback
.skcipher
);
295 if (dir
== AES_DIR_DECRYPT
)
296 return crypto_skcipher_decrypt(subreq
);
298 return crypto_skcipher_encrypt(subreq
);
301 err
= skcipher_walk_virt(&walk
, req
, false);
303 while ((nbytes
= walk
.nbytes
) != 0) {
304 geode_aes_crypt(tctx
, walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
305 round_down(nbytes
, AES_BLOCK_SIZE
),
307 err
= skcipher_walk_done(&walk
, nbytes
% AES_BLOCK_SIZE
);
313 static int geode_cbc_encrypt(struct skcipher_request
*req
)
315 return geode_skcipher_crypt(req
, AES_MODE_CBC
, AES_DIR_ENCRYPT
);
318 static int geode_cbc_decrypt(struct skcipher_request
*req
)
320 return geode_skcipher_crypt(req
, AES_MODE_CBC
, AES_DIR_DECRYPT
);
323 static int geode_ecb_encrypt(struct skcipher_request
*req
)
325 return geode_skcipher_crypt(req
, AES_MODE_ECB
, AES_DIR_ENCRYPT
);
328 static int geode_ecb_decrypt(struct skcipher_request
*req
)
330 return geode_skcipher_crypt(req
, AES_MODE_ECB
, AES_DIR_DECRYPT
);
333 static struct skcipher_alg geode_skcipher_algs
[] = {
335 .base
.cra_name
= "cbc(aes)",
336 .base
.cra_driver_name
= "cbc-aes-geode",
337 .base
.cra_priority
= 400,
338 .base
.cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
339 CRYPTO_ALG_NEED_FALLBACK
,
340 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
341 .base
.cra_ctxsize
= sizeof(struct geode_aes_tfm_ctx
),
342 .base
.cra_alignmask
= 15,
343 .base
.cra_module
= THIS_MODULE
,
344 .init
= geode_init_skcipher
,
345 .exit
= geode_exit_skcipher
,
346 .setkey
= geode_setkey_skcipher
,
347 .encrypt
= geode_cbc_encrypt
,
348 .decrypt
= geode_cbc_decrypt
,
349 .min_keysize
= AES_MIN_KEY_SIZE
,
350 .max_keysize
= AES_MAX_KEY_SIZE
,
351 .ivsize
= AES_BLOCK_SIZE
,
353 .base
.cra_name
= "ecb(aes)",
354 .base
.cra_driver_name
= "ecb-aes-geode",
355 .base
.cra_priority
= 400,
356 .base
.cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
357 CRYPTO_ALG_NEED_FALLBACK
,
358 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
359 .base
.cra_ctxsize
= sizeof(struct geode_aes_tfm_ctx
),
360 .base
.cra_alignmask
= 15,
361 .base
.cra_module
= THIS_MODULE
,
362 .init
= geode_init_skcipher
,
363 .exit
= geode_exit_skcipher
,
364 .setkey
= geode_setkey_skcipher
,
365 .encrypt
= geode_ecb_encrypt
,
366 .decrypt
= geode_ecb_decrypt
,
367 .min_keysize
= AES_MIN_KEY_SIZE
,
368 .max_keysize
= AES_MAX_KEY_SIZE
,
372 static void geode_aes_remove(struct pci_dev
*dev
)
374 crypto_unregister_alg(&geode_alg
);
375 crypto_unregister_skciphers(geode_skcipher_algs
,
376 ARRAY_SIZE(geode_skcipher_algs
));
378 pci_iounmap(dev
, _iobase
);
381 pci_release_regions(dev
);
382 pci_disable_device(dev
);
386 static int geode_aes_probe(struct pci_dev
*dev
, const struct pci_device_id
*id
)
390 ret
= pci_enable_device(dev
);
394 ret
= pci_request_regions(dev
, "geode-aes");
398 _iobase
= pci_iomap(dev
, 0, 0);
400 if (_iobase
== NULL
) {
405 spin_lock_init(&lock
);
407 /* Clear any pending activity */
408 iowrite32(AES_INTR_PENDING
| AES_INTR_MASK
, _iobase
+ AES_INTR_REG
);
410 ret
= crypto_register_alg(&geode_alg
);
414 ret
= crypto_register_skciphers(geode_skcipher_algs
,
415 ARRAY_SIZE(geode_skcipher_algs
));
419 dev_notice(&dev
->dev
, "GEODE AES engine enabled.\n");
423 crypto_unregister_alg(&geode_alg
);
426 pci_iounmap(dev
, _iobase
);
429 pci_release_regions(dev
);
432 pci_disable_device(dev
);
434 dev_err(&dev
->dev
, "GEODE AES initialization failed.\n");
438 static struct pci_device_id geode_aes_tbl
[] = {
439 { PCI_VDEVICE(AMD
, PCI_DEVICE_ID_AMD_LX_AES
), },
443 MODULE_DEVICE_TABLE(pci
, geode_aes_tbl
);
445 static struct pci_driver geode_aes_driver
= {
446 .name
= "Geode LX AES",
447 .id_table
= geode_aes_tbl
,
448 .probe
= geode_aes_probe
,
449 .remove
= geode_aes_remove
,
452 module_pci_driver(geode_aes_driver
);
454 MODULE_AUTHOR("Advanced Micro Devices, Inc.");
455 MODULE_DESCRIPTION("Geode LX Hardware AES driver");
456 MODULE_LICENSE("GPL");