4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/aes.h>
21 #include <crypto/algapi.h>
22 #include <linux/err.h>
23 #include <linux/module.h>
24 #include <linux/init.h>
25 #include "crypt_s390.h"
27 #define AES_KEYLEN_128 1
28 #define AES_KEYLEN_192 2
29 #define AES_KEYLEN_256 4
31 static char keylen_flag = 0;
34 u8 iv[AES_BLOCK_SIZE];
35 u8 key[AES_MAX_KEY_SIZE];
40 struct crypto_blkcipher *blk;
41 struct crypto_cipher *cip;
46 * Check if the key_len is supported by the HW.
47 * Returns 0 if it is, a positive number if it is not and software fallback is
48 * required or a negative number in case the key size is not valid
50 static int need_fallback(unsigned int key_len)
54 if (!(keylen_flag & AES_KEYLEN_128))
58 if (!(keylen_flag & AES_KEYLEN_192))
62 if (!(keylen_flag & AES_KEYLEN_256))
72 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
75 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
78 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
79 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
82 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
84 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
85 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
91 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
94 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
95 u32 *flags = &tfm->crt_flags;
98 ret = need_fallback(key_len);
100 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
104 sctx->key_len = key_len;
106 memcpy(sctx->key, in_key, key_len);
110 return setkey_fallback_cip(tfm, in_key, key_len);
113 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
115 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
117 if (unlikely(need_fallback(sctx->key_len))) {
118 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
122 switch (sctx->key_len) {
124 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
128 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
132 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
138 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
140 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
142 if (unlikely(need_fallback(sctx->key_len))) {
143 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
147 switch (sctx->key_len) {
149 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
153 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
157 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
163 static int fallback_init_cip(struct crypto_tfm *tfm)
165 const char *name = tfm->__crt_alg->cra_name;
166 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
168 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
169 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
171 if (IS_ERR(sctx->fallback.cip)) {
172 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
173 return PTR_ERR(sctx->fallback.blk);
179 static void fallback_exit_cip(struct crypto_tfm *tfm)
181 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
183 crypto_free_cipher(sctx->fallback.cip);
184 sctx->fallback.cip = NULL;
187 static struct crypto_alg aes_alg = {
189 .cra_driver_name = "aes-s390",
190 .cra_priority = CRYPT_S390_PRIORITY,
191 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
192 CRYPTO_ALG_NEED_FALLBACK,
193 .cra_blocksize = AES_BLOCK_SIZE,
194 .cra_ctxsize = sizeof(struct s390_aes_ctx),
195 .cra_module = THIS_MODULE,
196 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
197 .cra_init = fallback_init_cip,
198 .cra_exit = fallback_exit_cip,
201 .cia_min_keysize = AES_MIN_KEY_SIZE,
202 .cia_max_keysize = AES_MAX_KEY_SIZE,
203 .cia_setkey = aes_set_key,
204 .cia_encrypt = aes_encrypt,
205 .cia_decrypt = aes_decrypt,
210 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
213 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
216 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
217 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
218 CRYPTO_TFM_REQ_MASK);
220 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
222 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
223 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
224 CRYPTO_TFM_RES_MASK);
229 static int fallback_blk_dec(struct blkcipher_desc *desc,
230 struct scatterlist *dst, struct scatterlist *src,
234 struct crypto_blkcipher *tfm;
235 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
237 memcpy(crypto_blkcipher_crt(sctx->fallback.blk)->iv, desc->info,
241 desc->tfm = sctx->fallback.blk;
243 ret = crypto_blkcipher_decrypt(desc, dst, src, nbytes);
249 static int fallback_blk_enc(struct blkcipher_desc *desc,
250 struct scatterlist *dst, struct scatterlist *src,
254 struct crypto_blkcipher *tfm;
255 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
257 memcpy(crypto_blkcipher_crt(sctx->fallback.blk)->iv, desc->info,
261 desc->tfm = sctx->fallback.blk;
263 ret = crypto_blkcipher_encrypt(desc, dst, src, nbytes);
269 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
270 unsigned int key_len)
272 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
275 ret = need_fallback(key_len);
277 sctx->key_len = key_len;
278 return setkey_fallback_blk(tfm, in_key, key_len);
283 sctx->enc = KM_AES_128_ENCRYPT;
284 sctx->dec = KM_AES_128_DECRYPT;
287 sctx->enc = KM_AES_192_ENCRYPT;
288 sctx->dec = KM_AES_192_DECRYPT;
291 sctx->enc = KM_AES_256_ENCRYPT;
292 sctx->dec = KM_AES_256_DECRYPT;
296 return aes_set_key(tfm, in_key, key_len);
299 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
300 struct blkcipher_walk *walk)
302 int ret = blkcipher_walk_virt(desc, walk);
305 while ((nbytes = walk->nbytes)) {
306 /* only use complete blocks */
307 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
308 u8 *out = walk->dst.virt.addr;
309 u8 *in = walk->src.virt.addr;
311 ret = crypt_s390_km(func, param, out, in, n);
312 BUG_ON((ret < 0) || (ret != n));
314 nbytes &= AES_BLOCK_SIZE - 1;
315 ret = blkcipher_walk_done(desc, walk, nbytes);
321 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
322 struct scatterlist *dst, struct scatterlist *src,
325 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
326 struct blkcipher_walk walk;
328 if (unlikely(need_fallback(sctx->key_len)))
329 return fallback_blk_enc(desc, dst, src, nbytes);
331 blkcipher_walk_init(&walk, dst, src, nbytes);
332 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
335 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
336 struct scatterlist *dst, struct scatterlist *src,
339 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
340 struct blkcipher_walk walk;
342 if (unlikely(need_fallback(sctx->key_len)))
343 return fallback_blk_dec(desc, dst, src, nbytes);
345 blkcipher_walk_init(&walk, dst, src, nbytes);
346 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
349 static int fallback_init_blk(struct crypto_tfm *tfm)
351 const char *name = tfm->__crt_alg->cra_name;
352 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
354 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
355 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
357 if (IS_ERR(sctx->fallback.blk)) {
358 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
359 return PTR_ERR(sctx->fallback.blk);
365 static void fallback_exit_blk(struct crypto_tfm *tfm)
367 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
369 crypto_free_blkcipher(sctx->fallback.blk);
370 sctx->fallback.blk = NULL;
373 static struct crypto_alg ecb_aes_alg = {
374 .cra_name = "ecb(aes)",
375 .cra_driver_name = "ecb-aes-s390",
376 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
377 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
378 CRYPTO_ALG_NEED_FALLBACK,
379 .cra_blocksize = AES_BLOCK_SIZE,
380 .cra_ctxsize = sizeof(struct s390_aes_ctx),
381 .cra_type = &crypto_blkcipher_type,
382 .cra_module = THIS_MODULE,
383 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
384 .cra_init = fallback_init_blk,
385 .cra_exit = fallback_exit_blk,
388 .min_keysize = AES_MIN_KEY_SIZE,
389 .max_keysize = AES_MAX_KEY_SIZE,
390 .setkey = ecb_aes_set_key,
391 .encrypt = ecb_aes_encrypt,
392 .decrypt = ecb_aes_decrypt,
397 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
398 unsigned int key_len)
400 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
403 ret = need_fallback(key_len);
405 sctx->key_len = key_len;
406 return setkey_fallback_blk(tfm, in_key, key_len);
411 sctx->enc = KMC_AES_128_ENCRYPT;
412 sctx->dec = KMC_AES_128_DECRYPT;
415 sctx->enc = KMC_AES_192_ENCRYPT;
416 sctx->dec = KMC_AES_192_DECRYPT;
419 sctx->enc = KMC_AES_256_ENCRYPT;
420 sctx->dec = KMC_AES_256_DECRYPT;
424 return aes_set_key(tfm, in_key, key_len);
427 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
428 struct blkcipher_walk *walk)
430 int ret = blkcipher_walk_virt(desc, walk);
431 unsigned int nbytes = walk->nbytes;
436 memcpy(param, walk->iv, AES_BLOCK_SIZE);
438 /* only use complete blocks */
439 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
440 u8 *out = walk->dst.virt.addr;
441 u8 *in = walk->src.virt.addr;
443 ret = crypt_s390_kmc(func, param, out, in, n);
444 BUG_ON((ret < 0) || (ret != n));
446 nbytes &= AES_BLOCK_SIZE - 1;
447 ret = blkcipher_walk_done(desc, walk, nbytes);
448 } while ((nbytes = walk->nbytes));
449 memcpy(walk->iv, param, AES_BLOCK_SIZE);
455 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
456 struct scatterlist *dst, struct scatterlist *src,
459 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
460 struct blkcipher_walk walk;
462 if (unlikely(need_fallback(sctx->key_len)))
463 return fallback_blk_enc(desc, dst, src, nbytes);
465 blkcipher_walk_init(&walk, dst, src, nbytes);
466 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
469 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
470 struct scatterlist *dst, struct scatterlist *src,
473 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
474 struct blkcipher_walk walk;
476 if (unlikely(need_fallback(sctx->key_len)))
477 return fallback_blk_dec(desc, dst, src, nbytes);
479 blkcipher_walk_init(&walk, dst, src, nbytes);
480 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
483 static struct crypto_alg cbc_aes_alg = {
484 .cra_name = "cbc(aes)",
485 .cra_driver_name = "cbc-aes-s390",
486 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
487 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
488 CRYPTO_ALG_NEED_FALLBACK,
489 .cra_blocksize = AES_BLOCK_SIZE,
490 .cra_ctxsize = sizeof(struct s390_aes_ctx),
491 .cra_type = &crypto_blkcipher_type,
492 .cra_module = THIS_MODULE,
493 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
494 .cra_init = fallback_init_blk,
495 .cra_exit = fallback_exit_blk,
498 .min_keysize = AES_MIN_KEY_SIZE,
499 .max_keysize = AES_MAX_KEY_SIZE,
500 .ivsize = AES_BLOCK_SIZE,
501 .setkey = cbc_aes_set_key,
502 .encrypt = cbc_aes_encrypt,
503 .decrypt = cbc_aes_decrypt,
508 static int __init aes_init(void)
512 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
513 keylen_flag |= AES_KEYLEN_128;
514 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
515 keylen_flag |= AES_KEYLEN_192;
516 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
517 keylen_flag |= AES_KEYLEN_256;
522 /* z9 109 and z9 BC/EC only support 128 bit key length */
523 if (keylen_flag == AES_KEYLEN_128)
525 "aes_s390: hardware acceleration only available for"
528 ret = crypto_register_alg(&aes_alg);
532 ret = crypto_register_alg(&ecb_aes_alg);
536 ret = crypto_register_alg(&cbc_aes_alg);
544 crypto_unregister_alg(&ecb_aes_alg);
546 crypto_unregister_alg(&aes_alg);
551 static void __exit aes_fini(void)
553 crypto_unregister_alg(&cbc_aes_alg);
554 crypto_unregister_alg(&ecb_aes_alg);
555 crypto_unregister_alg(&aes_alg);
558 module_init(aes_init);
559 module_exit(aes_fini);
563 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
564 MODULE_LICENSE("GPL");