4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/cpufeature.h>
28 #include <linux/init.h>
29 #include <linux/spinlock.h>
30 #include "crypt_s390.h"
32 #define AES_KEYLEN_128 1
33 #define AES_KEYLEN_192 2
34 #define AES_KEYLEN_256 4
37 static DEFINE_SPINLOCK(ctrblk_lock);
38 static char keylen_flag;
41 u8 key[AES_MAX_KEY_SIZE];
46 struct crypto_blkcipher *blk;
47 struct crypto_cipher *cip;
65 struct crypto_blkcipher *fallback;
69 * Check if the key_len is supported by the HW.
70 * Returns 0 if it is, a positive number if it is not and software fallback is
71 * required or a negative number in case the key size is not valid
73 static int need_fallback(unsigned int key_len)
77 if (!(keylen_flag & AES_KEYLEN_128))
81 if (!(keylen_flag & AES_KEYLEN_192))
85 if (!(keylen_flag & AES_KEYLEN_256))
95 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
98 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
101 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
102 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
103 CRYPTO_TFM_REQ_MASK);
105 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
107 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
108 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
109 CRYPTO_TFM_RES_MASK);
114 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
115 unsigned int key_len)
117 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
118 u32 *flags = &tfm->crt_flags;
121 ret = need_fallback(key_len);
123 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
127 sctx->key_len = key_len;
129 memcpy(sctx->key, in_key, key_len);
133 return setkey_fallback_cip(tfm, in_key, key_len);
136 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
138 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
140 if (unlikely(need_fallback(sctx->key_len))) {
141 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
145 switch (sctx->key_len) {
147 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
151 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
155 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
161 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
163 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
165 if (unlikely(need_fallback(sctx->key_len))) {
166 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
170 switch (sctx->key_len) {
172 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
176 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
180 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
186 static int fallback_init_cip(struct crypto_tfm *tfm)
188 const char *name = tfm->__crt_alg->cra_name;
189 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
191 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
192 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
194 if (IS_ERR(sctx->fallback.cip)) {
195 pr_err("Allocating AES fallback algorithm %s failed\n",
197 return PTR_ERR(sctx->fallback.cip);
203 static void fallback_exit_cip(struct crypto_tfm *tfm)
205 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
207 crypto_free_cipher(sctx->fallback.cip);
208 sctx->fallback.cip = NULL;
211 static struct crypto_alg aes_alg = {
213 .cra_driver_name = "aes-s390",
214 .cra_priority = CRYPT_S390_PRIORITY,
215 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
216 CRYPTO_ALG_NEED_FALLBACK,
217 .cra_blocksize = AES_BLOCK_SIZE,
218 .cra_ctxsize = sizeof(struct s390_aes_ctx),
219 .cra_module = THIS_MODULE,
220 .cra_init = fallback_init_cip,
221 .cra_exit = fallback_exit_cip,
224 .cia_min_keysize = AES_MIN_KEY_SIZE,
225 .cia_max_keysize = AES_MAX_KEY_SIZE,
226 .cia_setkey = aes_set_key,
227 .cia_encrypt = aes_encrypt,
228 .cia_decrypt = aes_decrypt,
233 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
236 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
239 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
240 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
241 CRYPTO_TFM_REQ_MASK);
243 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
245 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
246 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
247 CRYPTO_TFM_RES_MASK);
252 static int fallback_blk_dec(struct blkcipher_desc *desc,
253 struct scatterlist *dst, struct scatterlist *src,
257 struct crypto_blkcipher *tfm;
258 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
261 desc->tfm = sctx->fallback.blk;
263 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
269 static int fallback_blk_enc(struct blkcipher_desc *desc,
270 struct scatterlist *dst, struct scatterlist *src,
274 struct crypto_blkcipher *tfm;
275 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
278 desc->tfm = sctx->fallback.blk;
280 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
286 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
287 unsigned int key_len)
289 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
292 ret = need_fallback(key_len);
294 sctx->key_len = key_len;
295 return setkey_fallback_blk(tfm, in_key, key_len);
300 sctx->enc = KM_AES_128_ENCRYPT;
301 sctx->dec = KM_AES_128_DECRYPT;
304 sctx->enc = KM_AES_192_ENCRYPT;
305 sctx->dec = KM_AES_192_DECRYPT;
308 sctx->enc = KM_AES_256_ENCRYPT;
309 sctx->dec = KM_AES_256_DECRYPT;
313 return aes_set_key(tfm, in_key, key_len);
316 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
317 struct blkcipher_walk *walk)
319 int ret = blkcipher_walk_virt(desc, walk);
322 while ((nbytes = walk->nbytes)) {
323 /* only use complete blocks */
324 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
325 u8 *out = walk->dst.virt.addr;
326 u8 *in = walk->src.virt.addr;
328 ret = crypt_s390_km(func, param, out, in, n);
329 if (ret < 0 || ret != n)
332 nbytes &= AES_BLOCK_SIZE - 1;
333 ret = blkcipher_walk_done(desc, walk, nbytes);
339 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
340 struct scatterlist *dst, struct scatterlist *src,
343 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
344 struct blkcipher_walk walk;
346 if (unlikely(need_fallback(sctx->key_len)))
347 return fallback_blk_enc(desc, dst, src, nbytes);
349 blkcipher_walk_init(&walk, dst, src, nbytes);
350 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
353 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
354 struct scatterlist *dst, struct scatterlist *src,
357 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
358 struct blkcipher_walk walk;
360 if (unlikely(need_fallback(sctx->key_len)))
361 return fallback_blk_dec(desc, dst, src, nbytes);
363 blkcipher_walk_init(&walk, dst, src, nbytes);
364 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
367 static int fallback_init_blk(struct crypto_tfm *tfm)
369 const char *name = tfm->__crt_alg->cra_name;
370 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
372 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
373 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
375 if (IS_ERR(sctx->fallback.blk)) {
376 pr_err("Allocating AES fallback algorithm %s failed\n",
378 return PTR_ERR(sctx->fallback.blk);
384 static void fallback_exit_blk(struct crypto_tfm *tfm)
386 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
388 crypto_free_blkcipher(sctx->fallback.blk);
389 sctx->fallback.blk = NULL;
392 static struct crypto_alg ecb_aes_alg = {
393 .cra_name = "ecb(aes)",
394 .cra_driver_name = "ecb-aes-s390",
395 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
396 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
397 CRYPTO_ALG_NEED_FALLBACK,
398 .cra_blocksize = AES_BLOCK_SIZE,
399 .cra_ctxsize = sizeof(struct s390_aes_ctx),
400 .cra_type = &crypto_blkcipher_type,
401 .cra_module = THIS_MODULE,
402 .cra_init = fallback_init_blk,
403 .cra_exit = fallback_exit_blk,
406 .min_keysize = AES_MIN_KEY_SIZE,
407 .max_keysize = AES_MAX_KEY_SIZE,
408 .setkey = ecb_aes_set_key,
409 .encrypt = ecb_aes_encrypt,
410 .decrypt = ecb_aes_decrypt,
415 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
416 unsigned int key_len)
418 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
421 ret = need_fallback(key_len);
423 sctx->key_len = key_len;
424 return setkey_fallback_blk(tfm, in_key, key_len);
429 sctx->enc = KMC_AES_128_ENCRYPT;
430 sctx->dec = KMC_AES_128_DECRYPT;
433 sctx->enc = KMC_AES_192_ENCRYPT;
434 sctx->dec = KMC_AES_192_DECRYPT;
437 sctx->enc = KMC_AES_256_ENCRYPT;
438 sctx->dec = KMC_AES_256_DECRYPT;
442 return aes_set_key(tfm, in_key, key_len);
445 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
446 struct blkcipher_walk *walk)
448 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
449 int ret = blkcipher_walk_virt(desc, walk);
450 unsigned int nbytes = walk->nbytes;
452 u8 iv[AES_BLOCK_SIZE];
453 u8 key[AES_MAX_KEY_SIZE];
459 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
460 memcpy(param.key, sctx->key, sctx->key_len);
462 /* only use complete blocks */
463 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
464 u8 *out = walk->dst.virt.addr;
465 u8 *in = walk->src.virt.addr;
467 ret = crypt_s390_kmc(func, ¶m, out, in, n);
468 if (ret < 0 || ret != n)
471 nbytes &= AES_BLOCK_SIZE - 1;
472 ret = blkcipher_walk_done(desc, walk, nbytes);
473 } while ((nbytes = walk->nbytes));
474 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
480 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
481 struct scatterlist *dst, struct scatterlist *src,
484 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
485 struct blkcipher_walk walk;
487 if (unlikely(need_fallback(sctx->key_len)))
488 return fallback_blk_enc(desc, dst, src, nbytes);
490 blkcipher_walk_init(&walk, dst, src, nbytes);
491 return cbc_aes_crypt(desc, sctx->enc, &walk);
494 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
495 struct scatterlist *dst, struct scatterlist *src,
498 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
499 struct blkcipher_walk walk;
501 if (unlikely(need_fallback(sctx->key_len)))
502 return fallback_blk_dec(desc, dst, src, nbytes);
504 blkcipher_walk_init(&walk, dst, src, nbytes);
505 return cbc_aes_crypt(desc, sctx->dec, &walk);
508 static struct crypto_alg cbc_aes_alg = {
509 .cra_name = "cbc(aes)",
510 .cra_driver_name = "cbc-aes-s390",
511 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
512 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
513 CRYPTO_ALG_NEED_FALLBACK,
514 .cra_blocksize = AES_BLOCK_SIZE,
515 .cra_ctxsize = sizeof(struct s390_aes_ctx),
516 .cra_type = &crypto_blkcipher_type,
517 .cra_module = THIS_MODULE,
518 .cra_init = fallback_init_blk,
519 .cra_exit = fallback_exit_blk,
522 .min_keysize = AES_MIN_KEY_SIZE,
523 .max_keysize = AES_MAX_KEY_SIZE,
524 .ivsize = AES_BLOCK_SIZE,
525 .setkey = cbc_aes_set_key,
526 .encrypt = cbc_aes_encrypt,
527 .decrypt = cbc_aes_decrypt,
532 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
535 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
538 xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
539 xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
540 CRYPTO_TFM_REQ_MASK);
542 ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
544 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
545 tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
546 CRYPTO_TFM_RES_MASK);
551 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
552 struct scatterlist *dst, struct scatterlist *src,
555 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
556 struct crypto_blkcipher *tfm;
560 desc->tfm = xts_ctx->fallback;
562 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
568 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
569 struct scatterlist *dst, struct scatterlist *src,
572 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
573 struct crypto_blkcipher *tfm;
577 desc->tfm = xts_ctx->fallback;
579 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
585 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
586 unsigned int key_len)
588 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
589 u32 *flags = &tfm->crt_flags;
593 xts_ctx->enc = KM_XTS_128_ENCRYPT;
594 xts_ctx->dec = KM_XTS_128_DECRYPT;
595 memcpy(xts_ctx->key + 16, in_key, 16);
596 memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
601 xts_fallback_setkey(tfm, in_key, key_len);
604 xts_ctx->enc = KM_XTS_256_ENCRYPT;
605 xts_ctx->dec = KM_XTS_256_DECRYPT;
606 memcpy(xts_ctx->key, in_key, 32);
607 memcpy(xts_ctx->pcc_key, in_key + 32, 32);
610 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
613 xts_ctx->key_len = key_len;
617 static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
618 struct s390_xts_ctx *xts_ctx,
619 struct blkcipher_walk *walk)
621 unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
622 int ret = blkcipher_walk_virt(desc, walk);
623 unsigned int nbytes = walk->nbytes;
626 struct pcc_param pcc_param;
635 memset(pcc_param.block, 0, sizeof(pcc_param.block));
636 memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
637 memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
638 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
639 memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
640 ret = crypt_s390_pcc(func, &pcc_param.key[offset]);
644 memcpy(xts_param.key, xts_ctx->key, 32);
645 memcpy(xts_param.init, pcc_param.xts, 16);
647 /* only use complete blocks */
648 n = nbytes & ~(AES_BLOCK_SIZE - 1);
649 out = walk->dst.virt.addr;
650 in = walk->src.virt.addr;
652 ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n);
653 if (ret < 0 || ret != n)
656 nbytes &= AES_BLOCK_SIZE - 1;
657 ret = blkcipher_walk_done(desc, walk, nbytes);
658 } while ((nbytes = walk->nbytes));
663 static int xts_aes_encrypt(struct blkcipher_desc *desc,
664 struct scatterlist *dst, struct scatterlist *src,
667 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
668 struct blkcipher_walk walk;
670 if (unlikely(xts_ctx->key_len == 48))
671 return xts_fallback_encrypt(desc, dst, src, nbytes);
673 blkcipher_walk_init(&walk, dst, src, nbytes);
674 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
677 static int xts_aes_decrypt(struct blkcipher_desc *desc,
678 struct scatterlist *dst, struct scatterlist *src,
681 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
682 struct blkcipher_walk walk;
684 if (unlikely(xts_ctx->key_len == 48))
685 return xts_fallback_decrypt(desc, dst, src, nbytes);
687 blkcipher_walk_init(&walk, dst, src, nbytes);
688 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
691 static int xts_fallback_init(struct crypto_tfm *tfm)
693 const char *name = tfm->__crt_alg->cra_name;
694 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
696 xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
697 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
699 if (IS_ERR(xts_ctx->fallback)) {
700 pr_err("Allocating XTS fallback algorithm %s failed\n",
702 return PTR_ERR(xts_ctx->fallback);
707 static void xts_fallback_exit(struct crypto_tfm *tfm)
709 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
711 crypto_free_blkcipher(xts_ctx->fallback);
712 xts_ctx->fallback = NULL;
715 static struct crypto_alg xts_aes_alg = {
716 .cra_name = "xts(aes)",
717 .cra_driver_name = "xts-aes-s390",
718 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
719 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
720 CRYPTO_ALG_NEED_FALLBACK,
721 .cra_blocksize = AES_BLOCK_SIZE,
722 .cra_ctxsize = sizeof(struct s390_xts_ctx),
723 .cra_type = &crypto_blkcipher_type,
724 .cra_module = THIS_MODULE,
725 .cra_init = xts_fallback_init,
726 .cra_exit = xts_fallback_exit,
729 .min_keysize = 2 * AES_MIN_KEY_SIZE,
730 .max_keysize = 2 * AES_MAX_KEY_SIZE,
731 .ivsize = AES_BLOCK_SIZE,
732 .setkey = xts_aes_set_key,
733 .encrypt = xts_aes_encrypt,
734 .decrypt = xts_aes_decrypt,
739 static int xts_aes_alg_reg;
741 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
742 unsigned int key_len)
744 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
748 sctx->enc = KMCTR_AES_128_ENCRYPT;
749 sctx->dec = KMCTR_AES_128_DECRYPT;
752 sctx->enc = KMCTR_AES_192_ENCRYPT;
753 sctx->dec = KMCTR_AES_192_DECRYPT;
756 sctx->enc = KMCTR_AES_256_ENCRYPT;
757 sctx->dec = KMCTR_AES_256_DECRYPT;
761 return aes_set_key(tfm, in_key, key_len);
764 static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
768 /* only use complete blocks, max. PAGE_SIZE */
769 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
770 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
771 memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
773 crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
778 static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
779 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
781 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
782 unsigned int n, nbytes;
783 u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
784 u8 *out, *in, *ctrptr = ctrbuf;
789 if (spin_trylock(&ctrblk_lock))
792 memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
793 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
794 out = walk->dst.virt.addr;
795 in = walk->src.virt.addr;
796 while (nbytes >= AES_BLOCK_SIZE) {
797 if (ctrptr == ctrblk)
798 n = __ctrblk_init(ctrptr, nbytes);
801 ret = crypt_s390_kmctr(func, sctx->key, out, in,
803 if (ret < 0 || ret != n) {
804 if (ctrptr == ctrblk)
805 spin_unlock(&ctrblk_lock);
808 if (n > AES_BLOCK_SIZE)
809 memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
811 crypto_inc(ctrptr, AES_BLOCK_SIZE);
816 ret = blkcipher_walk_done(desc, walk, nbytes);
818 if (ctrptr == ctrblk) {
820 memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
822 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
823 spin_unlock(&ctrblk_lock);
826 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
829 * final block may be < AES_BLOCK_SIZE, copy only nbytes
832 out = walk->dst.virt.addr;
833 in = walk->src.virt.addr;
834 ret = crypt_s390_kmctr(func, sctx->key, buf, in,
835 AES_BLOCK_SIZE, ctrbuf);
836 if (ret < 0 || ret != AES_BLOCK_SIZE)
838 memcpy(out, buf, nbytes);
839 crypto_inc(ctrbuf, AES_BLOCK_SIZE);
840 ret = blkcipher_walk_done(desc, walk, 0);
841 memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
847 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
848 struct scatterlist *dst, struct scatterlist *src,
851 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
852 struct blkcipher_walk walk;
854 blkcipher_walk_init(&walk, dst, src, nbytes);
855 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
858 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
859 struct scatterlist *dst, struct scatterlist *src,
862 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
863 struct blkcipher_walk walk;
865 blkcipher_walk_init(&walk, dst, src, nbytes);
866 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
869 static struct crypto_alg ctr_aes_alg = {
870 .cra_name = "ctr(aes)",
871 .cra_driver_name = "ctr-aes-s390",
872 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
873 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
875 .cra_ctxsize = sizeof(struct s390_aes_ctx),
876 .cra_type = &crypto_blkcipher_type,
877 .cra_module = THIS_MODULE,
880 .min_keysize = AES_MIN_KEY_SIZE,
881 .max_keysize = AES_MAX_KEY_SIZE,
882 .ivsize = AES_BLOCK_SIZE,
883 .setkey = ctr_aes_set_key,
884 .encrypt = ctr_aes_encrypt,
885 .decrypt = ctr_aes_decrypt,
890 static int ctr_aes_alg_reg;
892 static int __init aes_s390_init(void)
896 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
897 keylen_flag |= AES_KEYLEN_128;
898 if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
899 keylen_flag |= AES_KEYLEN_192;
900 if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
901 keylen_flag |= AES_KEYLEN_256;
906 /* z9 109 and z9 BC/EC only support 128 bit key length */
907 if (keylen_flag == AES_KEYLEN_128)
908 pr_info("AES hardware acceleration is only available for"
911 ret = crypto_register_alg(&aes_alg);
915 ret = crypto_register_alg(&ecb_aes_alg);
919 ret = crypto_register_alg(&cbc_aes_alg);
923 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
924 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
925 crypt_s390_func_available(KM_XTS_256_ENCRYPT,
926 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
927 ret = crypto_register_alg(&xts_aes_alg);
933 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
934 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
935 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
936 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
937 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
938 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
939 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
944 ret = crypto_register_alg(&ctr_aes_alg);
946 free_page((unsigned long) ctrblk);
956 crypto_unregister_alg(&xts_aes_alg);
958 crypto_unregister_alg(&cbc_aes_alg);
960 crypto_unregister_alg(&ecb_aes_alg);
962 crypto_unregister_alg(&aes_alg);
967 static void __exit aes_s390_fini(void)
969 if (ctr_aes_alg_reg) {
970 crypto_unregister_alg(&ctr_aes_alg);
971 free_page((unsigned long) ctrblk);
974 crypto_unregister_alg(&xts_aes_alg);
975 crypto_unregister_alg(&cbc_aes_alg);
976 crypto_unregister_alg(&ecb_aes_alg);
977 crypto_unregister_alg(&aes_alg);
980 module_cpu_feature_match(MSA, aes_s390_init);
981 module_exit(aes_s390_fini);
983 MODULE_ALIAS_CRYPTO("aes-all");
985 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
986 MODULE_LICENSE("GPL");