4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/init.h>
28 #include "crypt_s390.h"
30 #define AES_KEYLEN_128 1
31 #define AES_KEYLEN_192 2
32 #define AES_KEYLEN_256 4
35 static char keylen_flag;
38 u8 key[AES_MAX_KEY_SIZE];
43 struct crypto_blkcipher *blk;
44 struct crypto_cipher *cip;
63 struct crypto_blkcipher *fallback;
67 * Check if the key_len is supported by the HW.
68 * Returns 0 if it is, a positive number if it is not and software fallback is
69 * required or a negative number in case the key size is not valid
71 static int need_fallback(unsigned int key_len)
75 if (!(keylen_flag & AES_KEYLEN_128))
79 if (!(keylen_flag & AES_KEYLEN_192))
83 if (!(keylen_flag & AES_KEYLEN_256))
93 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
96 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
99 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
100 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
101 CRYPTO_TFM_REQ_MASK);
103 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
105 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
106 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
107 CRYPTO_TFM_RES_MASK);
112 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
113 unsigned int key_len)
115 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
116 u32 *flags = &tfm->crt_flags;
119 ret = need_fallback(key_len);
121 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
125 sctx->key_len = key_len;
127 memcpy(sctx->key, in_key, key_len);
131 return setkey_fallback_cip(tfm, in_key, key_len);
134 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
136 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
138 if (unlikely(need_fallback(sctx->key_len))) {
139 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
143 switch (sctx->key_len) {
145 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
149 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
153 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
159 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
161 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
163 if (unlikely(need_fallback(sctx->key_len))) {
164 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
168 switch (sctx->key_len) {
170 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
174 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
178 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
184 static int fallback_init_cip(struct crypto_tfm *tfm)
186 const char *name = tfm->__crt_alg->cra_name;
187 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
189 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
190 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
192 if (IS_ERR(sctx->fallback.cip)) {
193 pr_err("Allocating AES fallback algorithm %s failed\n",
195 return PTR_ERR(sctx->fallback.cip);
201 static void fallback_exit_cip(struct crypto_tfm *tfm)
203 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
205 crypto_free_cipher(sctx->fallback.cip);
206 sctx->fallback.cip = NULL;
209 static struct crypto_alg aes_alg = {
211 .cra_driver_name = "aes-s390",
212 .cra_priority = CRYPT_S390_PRIORITY,
213 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
214 CRYPTO_ALG_NEED_FALLBACK,
215 .cra_blocksize = AES_BLOCK_SIZE,
216 .cra_ctxsize = sizeof(struct s390_aes_ctx),
217 .cra_module = THIS_MODULE,
218 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
219 .cra_init = fallback_init_cip,
220 .cra_exit = fallback_exit_cip,
223 .cia_min_keysize = AES_MIN_KEY_SIZE,
224 .cia_max_keysize = AES_MAX_KEY_SIZE,
225 .cia_setkey = aes_set_key,
226 .cia_encrypt = aes_encrypt,
227 .cia_decrypt = aes_decrypt,
232 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
235 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
238 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
239 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
240 CRYPTO_TFM_REQ_MASK);
242 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
244 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
245 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
246 CRYPTO_TFM_RES_MASK);
251 static int fallback_blk_dec(struct blkcipher_desc *desc,
252 struct scatterlist *dst, struct scatterlist *src,
256 struct crypto_blkcipher *tfm;
257 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
260 desc->tfm = sctx->fallback.blk;
262 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
268 static int fallback_blk_enc(struct blkcipher_desc *desc,
269 struct scatterlist *dst, struct scatterlist *src,
273 struct crypto_blkcipher *tfm;
274 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
277 desc->tfm = sctx->fallback.blk;
279 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
285 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
286 unsigned int key_len)
288 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
291 ret = need_fallback(key_len);
293 sctx->key_len = key_len;
294 return setkey_fallback_blk(tfm, in_key, key_len);
299 sctx->enc = KM_AES_128_ENCRYPT;
300 sctx->dec = KM_AES_128_DECRYPT;
303 sctx->enc = KM_AES_192_ENCRYPT;
304 sctx->dec = KM_AES_192_DECRYPT;
307 sctx->enc = KM_AES_256_ENCRYPT;
308 sctx->dec = KM_AES_256_DECRYPT;
312 return aes_set_key(tfm, in_key, key_len);
315 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
316 struct blkcipher_walk *walk)
318 int ret = blkcipher_walk_virt(desc, walk);
321 while ((nbytes = walk->nbytes)) {
322 /* only use complete blocks */
323 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
324 u8 *out = walk->dst.virt.addr;
325 u8 *in = walk->src.virt.addr;
327 ret = crypt_s390_km(func, param, out, in, n);
328 BUG_ON((ret < 0) || (ret != n));
330 nbytes &= AES_BLOCK_SIZE - 1;
331 ret = blkcipher_walk_done(desc, walk, nbytes);
337 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
338 struct scatterlist *dst, struct scatterlist *src,
341 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
342 struct blkcipher_walk walk;
344 if (unlikely(need_fallback(sctx->key_len)))
345 return fallback_blk_enc(desc, dst, src, nbytes);
347 blkcipher_walk_init(&walk, dst, src, nbytes);
348 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
351 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
352 struct scatterlist *dst, struct scatterlist *src,
355 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
356 struct blkcipher_walk walk;
358 if (unlikely(need_fallback(sctx->key_len)))
359 return fallback_blk_dec(desc, dst, src, nbytes);
361 blkcipher_walk_init(&walk, dst, src, nbytes);
362 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
365 static int fallback_init_blk(struct crypto_tfm *tfm)
367 const char *name = tfm->__crt_alg->cra_name;
368 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
370 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
371 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
373 if (IS_ERR(sctx->fallback.blk)) {
374 pr_err("Allocating AES fallback algorithm %s failed\n",
376 return PTR_ERR(sctx->fallback.blk);
382 static void fallback_exit_blk(struct crypto_tfm *tfm)
384 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
386 crypto_free_blkcipher(sctx->fallback.blk);
387 sctx->fallback.blk = NULL;
390 static struct crypto_alg ecb_aes_alg = {
391 .cra_name = "ecb(aes)",
392 .cra_driver_name = "ecb-aes-s390",
393 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
394 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
395 CRYPTO_ALG_NEED_FALLBACK,
396 .cra_blocksize = AES_BLOCK_SIZE,
397 .cra_ctxsize = sizeof(struct s390_aes_ctx),
398 .cra_type = &crypto_blkcipher_type,
399 .cra_module = THIS_MODULE,
400 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
401 .cra_init = fallback_init_blk,
402 .cra_exit = fallback_exit_blk,
405 .min_keysize = AES_MIN_KEY_SIZE,
406 .max_keysize = AES_MAX_KEY_SIZE,
407 .setkey = ecb_aes_set_key,
408 .encrypt = ecb_aes_encrypt,
409 .decrypt = ecb_aes_decrypt,
414 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
415 unsigned int key_len)
417 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
420 ret = need_fallback(key_len);
422 sctx->key_len = key_len;
423 return setkey_fallback_blk(tfm, in_key, key_len);
428 sctx->enc = KMC_AES_128_ENCRYPT;
429 sctx->dec = KMC_AES_128_DECRYPT;
432 sctx->enc = KMC_AES_192_ENCRYPT;
433 sctx->dec = KMC_AES_192_DECRYPT;
436 sctx->enc = KMC_AES_256_ENCRYPT;
437 sctx->dec = KMC_AES_256_DECRYPT;
441 return aes_set_key(tfm, in_key, key_len);
444 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
445 struct blkcipher_walk *walk)
447 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
448 int ret = blkcipher_walk_virt(desc, walk);
449 unsigned int nbytes = walk->nbytes;
451 u8 iv[AES_BLOCK_SIZE];
452 u8 key[AES_MAX_KEY_SIZE];
458 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
459 memcpy(param.key, sctx->key, sctx->key_len);
461 /* only use complete blocks */
462 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
463 u8 *out = walk->dst.virt.addr;
464 u8 *in = walk->src.virt.addr;
466 ret = crypt_s390_kmc(func, ¶m, out, in, n);
467 BUG_ON((ret < 0) || (ret != n));
469 nbytes &= AES_BLOCK_SIZE - 1;
470 ret = blkcipher_walk_done(desc, walk, nbytes);
471 } while ((nbytes = walk->nbytes));
472 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
478 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
479 struct scatterlist *dst, struct scatterlist *src,
482 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
483 struct blkcipher_walk walk;
485 if (unlikely(need_fallback(sctx->key_len)))
486 return fallback_blk_enc(desc, dst, src, nbytes);
488 blkcipher_walk_init(&walk, dst, src, nbytes);
489 return cbc_aes_crypt(desc, sctx->enc, &walk);
492 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
493 struct scatterlist *dst, struct scatterlist *src,
496 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
497 struct blkcipher_walk walk;
499 if (unlikely(need_fallback(sctx->key_len)))
500 return fallback_blk_dec(desc, dst, src, nbytes);
502 blkcipher_walk_init(&walk, dst, src, nbytes);
503 return cbc_aes_crypt(desc, sctx->dec, &walk);
506 static struct crypto_alg cbc_aes_alg = {
507 .cra_name = "cbc(aes)",
508 .cra_driver_name = "cbc-aes-s390",
509 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
510 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
511 CRYPTO_ALG_NEED_FALLBACK,
512 .cra_blocksize = AES_BLOCK_SIZE,
513 .cra_ctxsize = sizeof(struct s390_aes_ctx),
514 .cra_type = &crypto_blkcipher_type,
515 .cra_module = THIS_MODULE,
516 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
517 .cra_init = fallback_init_blk,
518 .cra_exit = fallback_exit_blk,
521 .min_keysize = AES_MIN_KEY_SIZE,
522 .max_keysize = AES_MAX_KEY_SIZE,
523 .ivsize = AES_BLOCK_SIZE,
524 .setkey = cbc_aes_set_key,
525 .encrypt = cbc_aes_encrypt,
526 .decrypt = cbc_aes_decrypt,
531 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
534 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
537 xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
538 xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
539 CRYPTO_TFM_REQ_MASK);
541 ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
543 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
544 tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
545 CRYPTO_TFM_RES_MASK);
550 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
551 struct scatterlist *dst, struct scatterlist *src,
554 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
555 struct crypto_blkcipher *tfm;
559 desc->tfm = xts_ctx->fallback;
561 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
567 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
568 struct scatterlist *dst, struct scatterlist *src,
571 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
572 struct crypto_blkcipher *tfm;
576 desc->tfm = xts_ctx->fallback;
578 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
584 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
585 unsigned int key_len)
587 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
588 u32 *flags = &tfm->crt_flags;
592 xts_ctx->enc = KM_XTS_128_ENCRYPT;
593 xts_ctx->dec = KM_XTS_128_DECRYPT;
594 memcpy(xts_ctx->key + 16, in_key, 16);
595 memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
600 xts_fallback_setkey(tfm, in_key, key_len);
603 xts_ctx->enc = KM_XTS_256_ENCRYPT;
604 xts_ctx->dec = KM_XTS_256_DECRYPT;
605 memcpy(xts_ctx->key, in_key, 32);
606 memcpy(xts_ctx->pcc.key, in_key + 32, 32);
609 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
612 xts_ctx->key_len = key_len;
616 static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
617 struct s390_xts_ctx *xts_ctx,
618 struct blkcipher_walk *walk)
620 unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
621 int ret = blkcipher_walk_virt(desc, walk);
622 unsigned int nbytes = walk->nbytes;
630 memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
631 memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
632 memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
633 memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
634 param = xts_ctx->pcc.key + offset;
635 ret = crypt_s390_pcc(func, param);
638 memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
639 param = xts_ctx->key + offset;
641 /* only use complete blocks */
642 n = nbytes & ~(AES_BLOCK_SIZE - 1);
643 out = walk->dst.virt.addr;
644 in = walk->src.virt.addr;
646 ret = crypt_s390_km(func, param, out, in, n);
647 BUG_ON(ret < 0 || ret != n);
649 nbytes &= AES_BLOCK_SIZE - 1;
650 ret = blkcipher_walk_done(desc, walk, nbytes);
651 } while ((nbytes = walk->nbytes));
656 static int xts_aes_encrypt(struct blkcipher_desc *desc,
657 struct scatterlist *dst, struct scatterlist *src,
660 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
661 struct blkcipher_walk walk;
663 if (unlikely(xts_ctx->key_len == 48))
664 return xts_fallback_encrypt(desc, dst, src, nbytes);
666 blkcipher_walk_init(&walk, dst, src, nbytes);
667 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
670 static int xts_aes_decrypt(struct blkcipher_desc *desc,
671 struct scatterlist *dst, struct scatterlist *src,
674 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
675 struct blkcipher_walk walk;
677 if (unlikely(xts_ctx->key_len == 48))
678 return xts_fallback_decrypt(desc, dst, src, nbytes);
680 blkcipher_walk_init(&walk, dst, src, nbytes);
681 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
684 static int xts_fallback_init(struct crypto_tfm *tfm)
686 const char *name = tfm->__crt_alg->cra_name;
687 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
689 xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
690 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
692 if (IS_ERR(xts_ctx->fallback)) {
693 pr_err("Allocating XTS fallback algorithm %s failed\n",
695 return PTR_ERR(xts_ctx->fallback);
700 static void xts_fallback_exit(struct crypto_tfm *tfm)
702 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
704 crypto_free_blkcipher(xts_ctx->fallback);
705 xts_ctx->fallback = NULL;
708 static struct crypto_alg xts_aes_alg = {
709 .cra_name = "xts(aes)",
710 .cra_driver_name = "xts-aes-s390",
711 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
712 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
713 CRYPTO_ALG_NEED_FALLBACK,
714 .cra_blocksize = AES_BLOCK_SIZE,
715 .cra_ctxsize = sizeof(struct s390_xts_ctx),
716 .cra_type = &crypto_blkcipher_type,
717 .cra_module = THIS_MODULE,
718 .cra_list = LIST_HEAD_INIT(xts_aes_alg.cra_list),
719 .cra_init = xts_fallback_init,
720 .cra_exit = xts_fallback_exit,
723 .min_keysize = 2 * AES_MIN_KEY_SIZE,
724 .max_keysize = 2 * AES_MAX_KEY_SIZE,
725 .ivsize = AES_BLOCK_SIZE,
726 .setkey = xts_aes_set_key,
727 .encrypt = xts_aes_encrypt,
728 .decrypt = xts_aes_decrypt,
733 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
734 unsigned int key_len)
736 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
740 sctx->enc = KMCTR_AES_128_ENCRYPT;
741 sctx->dec = KMCTR_AES_128_DECRYPT;
744 sctx->enc = KMCTR_AES_192_ENCRYPT;
745 sctx->dec = KMCTR_AES_192_DECRYPT;
748 sctx->enc = KMCTR_AES_256_ENCRYPT;
749 sctx->dec = KMCTR_AES_256_DECRYPT;
753 return aes_set_key(tfm, in_key, key_len);
756 static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
757 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
759 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
760 unsigned int i, n, nbytes;
761 u8 buf[AES_BLOCK_SIZE];
767 memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
768 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
769 out = walk->dst.virt.addr;
770 in = walk->src.virt.addr;
771 while (nbytes >= AES_BLOCK_SIZE) {
772 /* only use complete blocks, max. PAGE_SIZE */
773 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
774 nbytes & ~(AES_BLOCK_SIZE - 1);
775 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
776 memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
778 crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
780 ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
781 BUG_ON(ret < 0 || ret != n);
782 if (n > AES_BLOCK_SIZE)
783 memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
785 crypto_inc(ctrblk, AES_BLOCK_SIZE);
790 ret = blkcipher_walk_done(desc, walk, nbytes);
793 * final block may be < AES_BLOCK_SIZE, copy only nbytes
796 out = walk->dst.virt.addr;
797 in = walk->src.virt.addr;
798 ret = crypt_s390_kmctr(func, sctx->key, buf, in,
799 AES_BLOCK_SIZE, ctrblk);
800 BUG_ON(ret < 0 || ret != AES_BLOCK_SIZE);
801 memcpy(out, buf, nbytes);
802 crypto_inc(ctrblk, AES_BLOCK_SIZE);
803 ret = blkcipher_walk_done(desc, walk, 0);
805 memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
809 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
810 struct scatterlist *dst, struct scatterlist *src,
813 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
814 struct blkcipher_walk walk;
816 blkcipher_walk_init(&walk, dst, src, nbytes);
817 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
820 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
821 struct scatterlist *dst, struct scatterlist *src,
824 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
825 struct blkcipher_walk walk;
827 blkcipher_walk_init(&walk, dst, src, nbytes);
828 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
831 static struct crypto_alg ctr_aes_alg = {
832 .cra_name = "ctr(aes)",
833 .cra_driver_name = "ctr-aes-s390",
834 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
835 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
837 .cra_ctxsize = sizeof(struct s390_aes_ctx),
838 .cra_type = &crypto_blkcipher_type,
839 .cra_module = THIS_MODULE,
840 .cra_list = LIST_HEAD_INIT(ctr_aes_alg.cra_list),
843 .min_keysize = AES_MIN_KEY_SIZE,
844 .max_keysize = AES_MAX_KEY_SIZE,
845 .ivsize = AES_BLOCK_SIZE,
846 .setkey = ctr_aes_set_key,
847 .encrypt = ctr_aes_encrypt,
848 .decrypt = ctr_aes_decrypt,
853 static int __init aes_s390_init(void)
857 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
858 keylen_flag |= AES_KEYLEN_128;
859 if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
860 keylen_flag |= AES_KEYLEN_192;
861 if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
862 keylen_flag |= AES_KEYLEN_256;
867 /* z9 109 and z9 BC/EC only support 128 bit key length */
868 if (keylen_flag == AES_KEYLEN_128)
869 pr_info("AES hardware acceleration is only available for"
872 ret = crypto_register_alg(&aes_alg);
876 ret = crypto_register_alg(&ecb_aes_alg);
880 ret = crypto_register_alg(&cbc_aes_alg);
884 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
885 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
886 crypt_s390_func_available(KM_XTS_256_ENCRYPT,
887 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
888 ret = crypto_register_alg(&xts_aes_alg);
893 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
894 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
895 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
896 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
897 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
898 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
899 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
904 ret = crypto_register_alg(&ctr_aes_alg);
906 free_page((unsigned long) ctrblk);
915 crypto_unregister_alg(&xts_aes_alg);
917 crypto_unregister_alg(&cbc_aes_alg);
919 crypto_unregister_alg(&ecb_aes_alg);
921 crypto_unregister_alg(&aes_alg);
926 static void __exit aes_s390_fini(void)
928 crypto_unregister_alg(&ctr_aes_alg);
929 free_page((unsigned long) ctrblk);
930 crypto_unregister_alg(&xts_aes_alg);
931 crypto_unregister_alg(&cbc_aes_alg);
932 crypto_unregister_alg(&ecb_aes_alg);
933 crypto_unregister_alg(&aes_alg);
936 module_init(aes_s390_init);
937 module_exit(aes_s390_fini);
939 MODULE_ALIAS("aes-all");
941 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
942 MODULE_LICENSE("GPL");