1 /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License as published by
5 * the Free Software Foundation; either version 2 of the License, or
6 * (at your option) any later version.
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/pci.h>
12 #include <linux/pci_ids.h>
13 #include <linux/crypto.h>
14 #include <linux/spinlock.h>
15 #include <crypto/algapi.h>
18 #include <asm/delay.h>
20 #include "geode-aes.h"
22 /* Register definitions */
24 #define AES_CTRLA_REG 0x0000
26 #define AES_CTRL_START 0x01
27 #define AES_CTRL_DECRYPT 0x00
28 #define AES_CTRL_ENCRYPT 0x02
29 #define AES_CTRL_WRKEY 0x04
30 #define AES_CTRL_DCA 0x08
31 #define AES_CTRL_SCA 0x10
32 #define AES_CTRL_CBC 0x20
34 #define AES_INTR_REG 0x0008
36 #define AES_INTRA_PENDING (1 << 16)
37 #define AES_INTRB_PENDING (1 << 17)
39 #define AES_INTR_PENDING (AES_INTRA_PENDING | AES_INTRB_PENDING)
40 #define AES_INTR_MASK 0x07
42 #define AES_SOURCEA_REG 0x0010
43 #define AES_DSTA_REG 0x0014
44 #define AES_LENA_REG 0x0018
45 #define AES_WRITEKEY0_REG 0x0030
46 #define AES_WRITEIV0_REG 0x0040
48 /* A very large counter that is used to gracefully bail out of an
49 * operation in case of trouble
52 #define AES_OP_TIMEOUT 0x50000
54 /* Static structures */
56 static void __iomem * _iobase;
57 static spinlock_t lock;
59 /* Write a 128 bit field (either a writable key or IV) */
61 _writefield(u32 offset, void *value)
64 for(i = 0; i < 4; i++)
65 iowrite32(((u32 *) value)[i], _iobase + offset + (i * 4));
68 /* Read a 128 bit field (either a writable key or IV) */
70 _readfield(u32 offset, void *value)
73 for(i = 0; i < 4; i++)
74 ((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4));
78 do_crypt(void *src, void *dst, int len, u32 flags)
81 u32 counter = AES_OP_TIMEOUT;
83 iowrite32(virt_to_phys(src), _iobase + AES_SOURCEA_REG);
84 iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG);
85 iowrite32(len, _iobase + AES_LENA_REG);
87 /* Start the operation */
88 iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
91 status = ioread32(_iobase + AES_INTR_REG);
92 while(!(status & AES_INTRA_PENDING) && --counter);
95 iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
96 return counter ? 0 : 1;
100 geode_aes_crypt(struct geode_aes_op *op)
103 unsigned long iflags;
105 if (op->len == 0 || op->src == op->dst)
108 if (op->flags & AES_FLAGS_COHERENT)
109 flags |= (AES_CTRL_DCA | AES_CTRL_SCA);
111 if (op->dir == AES_DIR_ENCRYPT)
112 flags |= AES_CTRL_ENCRYPT;
114 /* Start the critical section */
116 spin_lock_irqsave(&lock, iflags);
118 if (op->mode == AES_MODE_CBC) {
119 flags |= AES_CTRL_CBC;
120 _writefield(AES_WRITEIV0_REG, op->iv);
123 if (op->flags & AES_FLAGS_USRKEY) {
124 flags |= AES_CTRL_WRKEY;
125 _writefield(AES_WRITEKEY0_REG, op->key);
128 do_crypt(op->src, op->dst, op->len, flags);
130 if (op->mode == AES_MODE_CBC)
131 _readfield(AES_WRITEIV0_REG, op->iv);
133 spin_unlock_irqrestore(&lock, iflags);
138 /* CRYPTO-API Functions */
141 geode_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int len)
143 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
145 if (len != AES_KEY_LENGTH) {
146 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
150 memcpy(op->key, key, len);
155 geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
157 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
159 if ((out == NULL) || (in == NULL))
162 op->src = (void *) in;
163 op->dst = (void *) out;
164 op->mode = AES_MODE_ECB;
166 op->len = AES_MIN_BLOCK_SIZE;
167 op->dir = AES_DIR_ENCRYPT;
174 geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
176 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
178 if ((out == NULL) || (in == NULL))
181 op->src = (void *) in;
182 op->dst = (void *) out;
183 op->mode = AES_MODE_ECB;
185 op->len = AES_MIN_BLOCK_SIZE;
186 op->dir = AES_DIR_DECRYPT;
192 static struct crypto_alg geode_alg = {
194 .cra_driver_name = "geode-aes-128",
197 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
198 .cra_blocksize = AES_MIN_BLOCK_SIZE,
199 .cra_ctxsize = sizeof(struct geode_aes_op),
200 .cra_module = THIS_MODULE,
201 .cra_list = LIST_HEAD_INIT(geode_alg.cra_list),
204 .cia_min_keysize = AES_KEY_LENGTH,
205 .cia_max_keysize = AES_KEY_LENGTH,
206 .cia_setkey = geode_setkey,
207 .cia_encrypt = geode_encrypt,
208 .cia_decrypt = geode_decrypt
214 geode_cbc_decrypt(struct blkcipher_desc *desc,
215 struct scatterlist *dst, struct scatterlist *src,
218 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
219 struct blkcipher_walk walk;
222 blkcipher_walk_init(&walk, dst, src, nbytes);
223 err = blkcipher_walk_virt(desc, &walk);
225 while((nbytes = walk.nbytes)) {
226 op->src = walk.src.virt.addr,
227 op->dst = walk.dst.virt.addr;
228 op->mode = AES_MODE_CBC;
229 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
230 op->dir = AES_DIR_DECRYPT;
232 memcpy(op->iv, walk.iv, AES_IV_LENGTH);
234 ret = geode_aes_crypt(op);
236 memcpy(walk.iv, op->iv, AES_IV_LENGTH);
239 err = blkcipher_walk_done(desc, &walk, nbytes);
246 geode_cbc_encrypt(struct blkcipher_desc *desc,
247 struct scatterlist *dst, struct scatterlist *src,
250 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
251 struct blkcipher_walk walk;
254 blkcipher_walk_init(&walk, dst, src, nbytes);
255 err = blkcipher_walk_virt(desc, &walk);
257 while((nbytes = walk.nbytes)) {
258 op->src = walk.src.virt.addr,
259 op->dst = walk.dst.virt.addr;
260 op->mode = AES_MODE_CBC;
261 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
262 op->dir = AES_DIR_ENCRYPT;
264 memcpy(op->iv, walk.iv, AES_IV_LENGTH);
266 ret = geode_aes_crypt(op);
268 err = blkcipher_walk_done(desc, &walk, nbytes);
274 static struct crypto_alg geode_cbc_alg = {
275 .cra_name = "cbc(aes)",
276 .cra_driver_name = "cbc-aes-geode-128",
278 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
279 .cra_blocksize = AES_MIN_BLOCK_SIZE,
280 .cra_ctxsize = sizeof(struct geode_aes_op),
282 .cra_type = &crypto_blkcipher_type,
283 .cra_module = THIS_MODULE,
284 .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list),
287 .min_keysize = AES_KEY_LENGTH,
288 .max_keysize = AES_KEY_LENGTH,
289 .setkey = geode_setkey,
290 .encrypt = geode_cbc_encrypt,
291 .decrypt = geode_cbc_decrypt,
297 geode_ecb_decrypt(struct blkcipher_desc *desc,
298 struct scatterlist *dst, struct scatterlist *src,
301 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
302 struct blkcipher_walk walk;
305 blkcipher_walk_init(&walk, dst, src, nbytes);
306 err = blkcipher_walk_virt(desc, &walk);
308 while((nbytes = walk.nbytes)) {
309 op->src = walk.src.virt.addr,
310 op->dst = walk.dst.virt.addr;
311 op->mode = AES_MODE_ECB;
312 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
313 op->dir = AES_DIR_DECRYPT;
315 ret = geode_aes_crypt(op);
317 err = blkcipher_walk_done(desc, &walk, nbytes);
324 geode_ecb_encrypt(struct blkcipher_desc *desc,
325 struct scatterlist *dst, struct scatterlist *src,
328 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
329 struct blkcipher_walk walk;
332 blkcipher_walk_init(&walk, dst, src, nbytes);
333 err = blkcipher_walk_virt(desc, &walk);
335 while((nbytes = walk.nbytes)) {
336 op->src = walk.src.virt.addr,
337 op->dst = walk.dst.virt.addr;
338 op->mode = AES_MODE_ECB;
339 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
340 op->dir = AES_DIR_ENCRYPT;
342 ret = geode_aes_crypt(op);
344 ret = blkcipher_walk_done(desc, &walk, nbytes);
350 static struct crypto_alg geode_ecb_alg = {
351 .cra_name = "ecb(aes)",
352 .cra_driver_name = "ecb-aes-geode-128",
354 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
355 .cra_blocksize = AES_MIN_BLOCK_SIZE,
356 .cra_ctxsize = sizeof(struct geode_aes_op),
358 .cra_type = &crypto_blkcipher_type,
359 .cra_module = THIS_MODULE,
360 .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list),
363 .min_keysize = AES_KEY_LENGTH,
364 .max_keysize = AES_KEY_LENGTH,
365 .setkey = geode_setkey,
366 .encrypt = geode_ecb_encrypt,
367 .decrypt = geode_ecb_decrypt,
373 geode_aes_remove(struct pci_dev *dev)
375 crypto_unregister_alg(&geode_alg);
376 crypto_unregister_alg(&geode_ecb_alg);
377 crypto_unregister_alg(&geode_cbc_alg);
379 pci_iounmap(dev, _iobase);
382 pci_release_regions(dev);
383 pci_disable_device(dev);
388 geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
392 if ((ret = pci_enable_device(dev)))
395 if ((ret = pci_request_regions(dev, "geode-aes-128")))
398 _iobase = pci_iomap(dev, 0, 0);
400 if (_iobase == NULL) {
405 spin_lock_init(&lock);
407 /* Clear any pending activity */
408 iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG);
410 if ((ret = crypto_register_alg(&geode_alg)))
413 if ((ret = crypto_register_alg(&geode_ecb_alg)))
416 if ((ret = crypto_register_alg(&geode_cbc_alg)))
419 printk(KERN_NOTICE "geode-aes: GEODE AES engine enabled.\n");
423 crypto_unregister_alg(&geode_ecb_alg);
426 crypto_unregister_alg(&geode_alg);
429 pci_iounmap(dev, _iobase);
432 pci_release_regions(dev);
435 pci_disable_device(dev);
437 printk(KERN_ERR "geode-aes: GEODE AES initialization failed.\n");
441 static struct pci_device_id geode_aes_tbl[] = {
442 { PCI_VENDOR_ID_AMD, PCI_DEVICE_ID_AMD_LX_AES, PCI_ANY_ID, PCI_ANY_ID} ,
446 MODULE_DEVICE_TABLE(pci, geode_aes_tbl);
448 static struct pci_driver geode_aes_driver = {
449 .name = "Geode LX AES",
450 .id_table = geode_aes_tbl,
451 .probe = geode_aes_probe,
452 .remove = __devexit_p(geode_aes_remove)
458 return pci_register_driver(&geode_aes_driver);
464 pci_unregister_driver(&geode_aes_driver);
467 MODULE_AUTHOR("Advanced Micro Devices, Inc.");
468 MODULE_DESCRIPTION("Geode LX Hardware AES driver");
469 MODULE_LICENSE("GPL");
471 module_init(geode_aes_init);
472 module_exit(geode_aes_exit);