4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <crypto/internal/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/module.h>
28 #include <linux/cpufeature.h>
29 #include <linux/init.h>
30 #include <linux/spinlock.h>
31 #include <crypto/xts.h>
32 #include <asm/cpacf.h>
34 #define AES_KEYLEN_128 1
35 #define AES_KEYLEN_192 2
36 #define AES_KEYLEN_256 4
39 static DEFINE_SPINLOCK(ctrblk_lock);
40 static char keylen_flag;
43 u8 key[AES_MAX_KEY_SIZE];
48 struct crypto_skcipher *blk;
49 struct crypto_cipher *cip;
67 struct crypto_skcipher *fallback;
71 * Check if the key_len is supported by the HW.
72 * Returns 0 if it is, a positive number if it is not and software fallback is
73 * required or a negative number in case the key size is not valid
75 static int need_fallback(unsigned int key_len)
79 if (!(keylen_flag & AES_KEYLEN_128))
83 if (!(keylen_flag & AES_KEYLEN_192))
87 if (!(keylen_flag & AES_KEYLEN_256))
97 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
100 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
103 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
104 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
105 CRYPTO_TFM_REQ_MASK);
107 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
109 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
110 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
111 CRYPTO_TFM_RES_MASK);
116 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
117 unsigned int key_len)
119 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
120 u32 *flags = &tfm->crt_flags;
123 ret = need_fallback(key_len);
125 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
129 sctx->key_len = key_len;
131 memcpy(sctx->key, in_key, key_len);
135 return setkey_fallback_cip(tfm, in_key, key_len);
138 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
140 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
142 if (unlikely(need_fallback(sctx->key_len))) {
143 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
147 switch (sctx->key_len) {
149 cpacf_km(CPACF_KM_AES_128_ENC, &sctx->key, out, in,
153 cpacf_km(CPACF_KM_AES_192_ENC, &sctx->key, out, in,
157 cpacf_km(CPACF_KM_AES_256_ENC, &sctx->key, out, in,
163 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
165 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
167 if (unlikely(need_fallback(sctx->key_len))) {
168 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
172 switch (sctx->key_len) {
174 cpacf_km(CPACF_KM_AES_128_DEC, &sctx->key, out, in,
178 cpacf_km(CPACF_KM_AES_192_DEC, &sctx->key, out, in,
182 cpacf_km(CPACF_KM_AES_256_DEC, &sctx->key, out, in,
188 static int fallback_init_cip(struct crypto_tfm *tfm)
190 const char *name = tfm->__crt_alg->cra_name;
191 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
193 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
194 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
196 if (IS_ERR(sctx->fallback.cip)) {
197 pr_err("Allocating AES fallback algorithm %s failed\n",
199 return PTR_ERR(sctx->fallback.cip);
205 static void fallback_exit_cip(struct crypto_tfm *tfm)
207 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
209 crypto_free_cipher(sctx->fallback.cip);
210 sctx->fallback.cip = NULL;
213 static struct crypto_alg aes_alg = {
215 .cra_driver_name = "aes-s390",
217 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
218 CRYPTO_ALG_NEED_FALLBACK,
219 .cra_blocksize = AES_BLOCK_SIZE,
220 .cra_ctxsize = sizeof(struct s390_aes_ctx),
221 .cra_module = THIS_MODULE,
222 .cra_init = fallback_init_cip,
223 .cra_exit = fallback_exit_cip,
226 .cia_min_keysize = AES_MIN_KEY_SIZE,
227 .cia_max_keysize = AES_MAX_KEY_SIZE,
228 .cia_setkey = aes_set_key,
229 .cia_encrypt = aes_encrypt,
230 .cia_decrypt = aes_decrypt,
235 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
238 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
241 crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK);
242 crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
243 CRYPTO_TFM_REQ_MASK);
245 ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len);
247 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
248 tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) &
254 static int fallback_blk_dec(struct blkcipher_desc *desc,
255 struct scatterlist *dst, struct scatterlist *src,
259 struct crypto_blkcipher *tfm = desc->tfm;
260 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
261 SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
263 skcipher_request_set_tfm(req, sctx->fallback.blk);
264 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
265 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
267 ret = crypto_skcipher_decrypt(req);
269 skcipher_request_zero(req);
273 static int fallback_blk_enc(struct blkcipher_desc *desc,
274 struct scatterlist *dst, struct scatterlist *src,
278 struct crypto_blkcipher *tfm = desc->tfm;
279 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
280 SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
282 skcipher_request_set_tfm(req, sctx->fallback.blk);
283 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
284 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
286 ret = crypto_skcipher_encrypt(req);
290 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
291 unsigned int key_len)
293 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
296 ret = need_fallback(key_len);
298 sctx->key_len = key_len;
299 return setkey_fallback_blk(tfm, in_key, key_len);
304 sctx->enc = CPACF_KM_AES_128_ENC;
305 sctx->dec = CPACF_KM_AES_128_DEC;
308 sctx->enc = CPACF_KM_AES_192_ENC;
309 sctx->dec = CPACF_KM_AES_192_DEC;
312 sctx->enc = CPACF_KM_AES_256_ENC;
313 sctx->dec = CPACF_KM_AES_256_DEC;
317 return aes_set_key(tfm, in_key, key_len);
320 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
321 struct blkcipher_walk *walk)
323 int ret = blkcipher_walk_virt(desc, walk);
326 while ((nbytes = walk->nbytes)) {
327 /* only use complete blocks */
328 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
329 u8 *out = walk->dst.virt.addr;
330 u8 *in = walk->src.virt.addr;
332 ret = cpacf_km(func, param, out, in, n);
333 if (ret < 0 || ret != n)
336 nbytes &= AES_BLOCK_SIZE - 1;
337 ret = blkcipher_walk_done(desc, walk, nbytes);
343 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
344 struct scatterlist *dst, struct scatterlist *src,
347 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
348 struct blkcipher_walk walk;
350 if (unlikely(need_fallback(sctx->key_len)))
351 return fallback_blk_enc(desc, dst, src, nbytes);
353 blkcipher_walk_init(&walk, dst, src, nbytes);
354 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
357 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
358 struct scatterlist *dst, struct scatterlist *src,
361 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
362 struct blkcipher_walk walk;
364 if (unlikely(need_fallback(sctx->key_len)))
365 return fallback_blk_dec(desc, dst, src, nbytes);
367 blkcipher_walk_init(&walk, dst, src, nbytes);
368 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
371 static int fallback_init_blk(struct crypto_tfm *tfm)
373 const char *name = tfm->__crt_alg->cra_name;
374 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
376 sctx->fallback.blk = crypto_alloc_skcipher(name, 0,
378 CRYPTO_ALG_NEED_FALLBACK);
380 if (IS_ERR(sctx->fallback.blk)) {
381 pr_err("Allocating AES fallback algorithm %s failed\n",
383 return PTR_ERR(sctx->fallback.blk);
389 static void fallback_exit_blk(struct crypto_tfm *tfm)
391 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
393 crypto_free_skcipher(sctx->fallback.blk);
396 static struct crypto_alg ecb_aes_alg = {
397 .cra_name = "ecb(aes)",
398 .cra_driver_name = "ecb-aes-s390",
399 .cra_priority = 400, /* combo: aes + ecb */
400 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
401 CRYPTO_ALG_NEED_FALLBACK,
402 .cra_blocksize = AES_BLOCK_SIZE,
403 .cra_ctxsize = sizeof(struct s390_aes_ctx),
404 .cra_type = &crypto_blkcipher_type,
405 .cra_module = THIS_MODULE,
406 .cra_init = fallback_init_blk,
407 .cra_exit = fallback_exit_blk,
410 .min_keysize = AES_MIN_KEY_SIZE,
411 .max_keysize = AES_MAX_KEY_SIZE,
412 .setkey = ecb_aes_set_key,
413 .encrypt = ecb_aes_encrypt,
414 .decrypt = ecb_aes_decrypt,
419 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
420 unsigned int key_len)
422 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
425 ret = need_fallback(key_len);
427 sctx->key_len = key_len;
428 return setkey_fallback_blk(tfm, in_key, key_len);
433 sctx->enc = CPACF_KMC_AES_128_ENC;
434 sctx->dec = CPACF_KMC_AES_128_DEC;
437 sctx->enc = CPACF_KMC_AES_192_ENC;
438 sctx->dec = CPACF_KMC_AES_192_DEC;
441 sctx->enc = CPACF_KMC_AES_256_ENC;
442 sctx->dec = CPACF_KMC_AES_256_DEC;
446 return aes_set_key(tfm, in_key, key_len);
449 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
450 struct blkcipher_walk *walk)
452 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
453 int ret = blkcipher_walk_virt(desc, walk);
454 unsigned int nbytes = walk->nbytes;
456 u8 iv[AES_BLOCK_SIZE];
457 u8 key[AES_MAX_KEY_SIZE];
463 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
464 memcpy(param.key, sctx->key, sctx->key_len);
466 /* only use complete blocks */
467 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
468 u8 *out = walk->dst.virt.addr;
469 u8 *in = walk->src.virt.addr;
471 ret = cpacf_kmc(func, ¶m, out, in, n);
472 if (ret < 0 || ret != n)
475 nbytes &= AES_BLOCK_SIZE - 1;
476 ret = blkcipher_walk_done(desc, walk, nbytes);
477 } while ((nbytes = walk->nbytes));
478 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
484 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
485 struct scatterlist *dst, struct scatterlist *src,
488 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
489 struct blkcipher_walk walk;
491 if (unlikely(need_fallback(sctx->key_len)))
492 return fallback_blk_enc(desc, dst, src, nbytes);
494 blkcipher_walk_init(&walk, dst, src, nbytes);
495 return cbc_aes_crypt(desc, sctx->enc, &walk);
498 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
499 struct scatterlist *dst, struct scatterlist *src,
502 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
503 struct blkcipher_walk walk;
505 if (unlikely(need_fallback(sctx->key_len)))
506 return fallback_blk_dec(desc, dst, src, nbytes);
508 blkcipher_walk_init(&walk, dst, src, nbytes);
509 return cbc_aes_crypt(desc, sctx->dec, &walk);
512 static struct crypto_alg cbc_aes_alg = {
513 .cra_name = "cbc(aes)",
514 .cra_driver_name = "cbc-aes-s390",
515 .cra_priority = 400, /* combo: aes + cbc */
516 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
517 CRYPTO_ALG_NEED_FALLBACK,
518 .cra_blocksize = AES_BLOCK_SIZE,
519 .cra_ctxsize = sizeof(struct s390_aes_ctx),
520 .cra_type = &crypto_blkcipher_type,
521 .cra_module = THIS_MODULE,
522 .cra_init = fallback_init_blk,
523 .cra_exit = fallback_exit_blk,
526 .min_keysize = AES_MIN_KEY_SIZE,
527 .max_keysize = AES_MAX_KEY_SIZE,
528 .ivsize = AES_BLOCK_SIZE,
529 .setkey = cbc_aes_set_key,
530 .encrypt = cbc_aes_encrypt,
531 .decrypt = cbc_aes_decrypt,
536 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
539 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
542 crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
543 crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
544 CRYPTO_TFM_REQ_MASK);
546 ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len);
548 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
549 tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) &
555 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
556 struct scatterlist *dst, struct scatterlist *src,
559 struct crypto_blkcipher *tfm = desc->tfm;
560 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
561 SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
564 skcipher_request_set_tfm(req, xts_ctx->fallback);
565 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
566 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
568 ret = crypto_skcipher_decrypt(req);
570 skcipher_request_zero(req);
574 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
575 struct scatterlist *dst, struct scatterlist *src,
578 struct crypto_blkcipher *tfm = desc->tfm;
579 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
580 SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
583 skcipher_request_set_tfm(req, xts_ctx->fallback);
584 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
585 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
587 ret = crypto_skcipher_encrypt(req);
589 skcipher_request_zero(req);
593 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
594 unsigned int key_len)
596 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
597 u32 *flags = &tfm->crt_flags;
600 err = xts_check_key(tfm, in_key, key_len);
606 xts_ctx->enc = CPACF_KM_XTS_128_ENC;
607 xts_ctx->dec = CPACF_KM_XTS_128_DEC;
608 memcpy(xts_ctx->key + 16, in_key, 16);
609 memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
614 xts_fallback_setkey(tfm, in_key, key_len);
617 xts_ctx->enc = CPACF_KM_XTS_256_ENC;
618 xts_ctx->dec = CPACF_KM_XTS_256_DEC;
619 memcpy(xts_ctx->key, in_key, 32);
620 memcpy(xts_ctx->pcc_key, in_key + 32, 32);
623 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
626 xts_ctx->key_len = key_len;
630 static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
631 struct s390_xts_ctx *xts_ctx,
632 struct blkcipher_walk *walk)
634 unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
635 int ret = blkcipher_walk_virt(desc, walk);
636 unsigned int nbytes = walk->nbytes;
639 struct pcc_param pcc_param;
648 memset(pcc_param.block, 0, sizeof(pcc_param.block));
649 memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
650 memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
651 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
652 memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
653 /* remove decipher modifier bit from 'func' and call PCC */
654 ret = cpacf_pcc(func & 0x7f, &pcc_param.key[offset]);
658 memcpy(xts_param.key, xts_ctx->key, 32);
659 memcpy(xts_param.init, pcc_param.xts, 16);
661 /* only use complete blocks */
662 n = nbytes & ~(AES_BLOCK_SIZE - 1);
663 out = walk->dst.virt.addr;
664 in = walk->src.virt.addr;
666 ret = cpacf_km(func, &xts_param.key[offset], out, in, n);
667 if (ret < 0 || ret != n)
670 nbytes &= AES_BLOCK_SIZE - 1;
671 ret = blkcipher_walk_done(desc, walk, nbytes);
672 } while ((nbytes = walk->nbytes));
677 static int xts_aes_encrypt(struct blkcipher_desc *desc,
678 struct scatterlist *dst, struct scatterlist *src,
681 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
682 struct blkcipher_walk walk;
684 if (unlikely(xts_ctx->key_len == 48))
685 return xts_fallback_encrypt(desc, dst, src, nbytes);
687 blkcipher_walk_init(&walk, dst, src, nbytes);
688 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
691 static int xts_aes_decrypt(struct blkcipher_desc *desc,
692 struct scatterlist *dst, struct scatterlist *src,
695 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
696 struct blkcipher_walk walk;
698 if (unlikely(xts_ctx->key_len == 48))
699 return xts_fallback_decrypt(desc, dst, src, nbytes);
701 blkcipher_walk_init(&walk, dst, src, nbytes);
702 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
705 static int xts_fallback_init(struct crypto_tfm *tfm)
707 const char *name = tfm->__crt_alg->cra_name;
708 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
710 xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
712 CRYPTO_ALG_NEED_FALLBACK);
714 if (IS_ERR(xts_ctx->fallback)) {
715 pr_err("Allocating XTS fallback algorithm %s failed\n",
717 return PTR_ERR(xts_ctx->fallback);
722 static void xts_fallback_exit(struct crypto_tfm *tfm)
724 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
726 crypto_free_skcipher(xts_ctx->fallback);
729 static struct crypto_alg xts_aes_alg = {
730 .cra_name = "xts(aes)",
731 .cra_driver_name = "xts-aes-s390",
732 .cra_priority = 400, /* combo: aes + xts */
733 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
734 CRYPTO_ALG_NEED_FALLBACK,
735 .cra_blocksize = AES_BLOCK_SIZE,
736 .cra_ctxsize = sizeof(struct s390_xts_ctx),
737 .cra_type = &crypto_blkcipher_type,
738 .cra_module = THIS_MODULE,
739 .cra_init = xts_fallback_init,
740 .cra_exit = xts_fallback_exit,
743 .min_keysize = 2 * AES_MIN_KEY_SIZE,
744 .max_keysize = 2 * AES_MAX_KEY_SIZE,
745 .ivsize = AES_BLOCK_SIZE,
746 .setkey = xts_aes_set_key,
747 .encrypt = xts_aes_encrypt,
748 .decrypt = xts_aes_decrypt,
753 static int xts_aes_alg_reg;
755 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
756 unsigned int key_len)
758 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
762 sctx->enc = CPACF_KMCTR_AES_128_ENC;
763 sctx->dec = CPACF_KMCTR_AES_128_DEC;
766 sctx->enc = CPACF_KMCTR_AES_192_ENC;
767 sctx->dec = CPACF_KMCTR_AES_192_DEC;
770 sctx->enc = CPACF_KMCTR_AES_256_ENC;
771 sctx->dec = CPACF_KMCTR_AES_256_DEC;
775 return aes_set_key(tfm, in_key, key_len);
778 static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
782 /* only use complete blocks, max. PAGE_SIZE */
783 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
784 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
785 memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
787 crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
792 static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
793 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
795 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
796 unsigned int n, nbytes;
797 u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
798 u8 *out, *in, *ctrptr = ctrbuf;
803 if (spin_trylock(&ctrblk_lock))
806 memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
807 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
808 out = walk->dst.virt.addr;
809 in = walk->src.virt.addr;
810 while (nbytes >= AES_BLOCK_SIZE) {
811 if (ctrptr == ctrblk)
812 n = __ctrblk_init(ctrptr, nbytes);
815 ret = cpacf_kmctr(func, sctx->key, out, in, n, ctrptr);
816 if (ret < 0 || ret != n) {
817 if (ctrptr == ctrblk)
818 spin_unlock(&ctrblk_lock);
821 if (n > AES_BLOCK_SIZE)
822 memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
824 crypto_inc(ctrptr, AES_BLOCK_SIZE);
829 ret = blkcipher_walk_done(desc, walk, nbytes);
831 if (ctrptr == ctrblk) {
833 memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
835 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
836 spin_unlock(&ctrblk_lock);
839 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
842 * final block may be < AES_BLOCK_SIZE, copy only nbytes
845 out = walk->dst.virt.addr;
846 in = walk->src.virt.addr;
847 ret = cpacf_kmctr(func, sctx->key, buf, in,
848 AES_BLOCK_SIZE, ctrbuf);
849 if (ret < 0 || ret != AES_BLOCK_SIZE)
851 memcpy(out, buf, nbytes);
852 crypto_inc(ctrbuf, AES_BLOCK_SIZE);
853 ret = blkcipher_walk_done(desc, walk, 0);
854 memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
860 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
861 struct scatterlist *dst, struct scatterlist *src,
864 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
865 struct blkcipher_walk walk;
867 blkcipher_walk_init(&walk, dst, src, nbytes);
868 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
871 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
872 struct scatterlist *dst, struct scatterlist *src,
875 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
876 struct blkcipher_walk walk;
878 blkcipher_walk_init(&walk, dst, src, nbytes);
879 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
882 static struct crypto_alg ctr_aes_alg = {
883 .cra_name = "ctr(aes)",
884 .cra_driver_name = "ctr-aes-s390",
885 .cra_priority = 400, /* combo: aes + ctr */
886 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
888 .cra_ctxsize = sizeof(struct s390_aes_ctx),
889 .cra_type = &crypto_blkcipher_type,
890 .cra_module = THIS_MODULE,
893 .min_keysize = AES_MIN_KEY_SIZE,
894 .max_keysize = AES_MAX_KEY_SIZE,
895 .ivsize = AES_BLOCK_SIZE,
896 .setkey = ctr_aes_set_key,
897 .encrypt = ctr_aes_encrypt,
898 .decrypt = ctr_aes_decrypt,
903 static int ctr_aes_alg_reg;
905 static int __init aes_s390_init(void)
909 if (cpacf_query(CPACF_KM, CPACF_KM_AES_128_ENC))
910 keylen_flag |= AES_KEYLEN_128;
911 if (cpacf_query(CPACF_KM, CPACF_KM_AES_192_ENC))
912 keylen_flag |= AES_KEYLEN_192;
913 if (cpacf_query(CPACF_KM, CPACF_KM_AES_256_ENC))
914 keylen_flag |= AES_KEYLEN_256;
919 /* z9 109 and z9 BC/EC only support 128 bit key length */
920 if (keylen_flag == AES_KEYLEN_128)
921 pr_info("AES hardware acceleration is only available for"
924 ret = crypto_register_alg(&aes_alg);
928 ret = crypto_register_alg(&ecb_aes_alg);
932 ret = crypto_register_alg(&cbc_aes_alg);
936 if (cpacf_query(CPACF_KM, CPACF_KM_XTS_128_ENC) &&
937 cpacf_query(CPACF_KM, CPACF_KM_XTS_256_ENC)) {
938 ret = crypto_register_alg(&xts_aes_alg);
944 if (cpacf_query(CPACF_KMCTR, CPACF_KMCTR_AES_128_ENC) &&
945 cpacf_query(CPACF_KMCTR, CPACF_KMCTR_AES_192_ENC) &&
946 cpacf_query(CPACF_KMCTR, CPACF_KMCTR_AES_256_ENC)) {
947 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
952 ret = crypto_register_alg(&ctr_aes_alg);
954 free_page((unsigned long) ctrblk);
964 crypto_unregister_alg(&xts_aes_alg);
966 crypto_unregister_alg(&cbc_aes_alg);
968 crypto_unregister_alg(&ecb_aes_alg);
970 crypto_unregister_alg(&aes_alg);
975 static void __exit aes_s390_fini(void)
977 if (ctr_aes_alg_reg) {
978 crypto_unregister_alg(&ctr_aes_alg);
979 free_page((unsigned long) ctrblk);
982 crypto_unregister_alg(&xts_aes_alg);
983 crypto_unregister_alg(&cbc_aes_alg);
984 crypto_unregister_alg(&ecb_aes_alg);
985 crypto_unregister_alg(&aes_alg);
988 module_cpu_feature_match(MSA, aes_s390_init);
989 module_exit(aes_s390_fini);
991 MODULE_ALIAS_CRYPTO("aes-all");
993 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
994 MODULE_LICENSE("GPL");