4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/init.h>
28 #include "crypt_s390.h"
30 #define AES_KEYLEN_128 1
31 #define AES_KEYLEN_192 2
32 #define AES_KEYLEN_256 4
35 static char keylen_flag
;
38 u8 key
[AES_MAX_KEY_SIZE
];
43 struct crypto_blkcipher
*blk
;
44 struct crypto_cipher
*cip
;
62 struct crypto_blkcipher
*fallback
;
66 * Check if the key_len is supported by the HW.
67 * Returns 0 if it is, a positive number if it is not and software fallback is
68 * required or a negative number in case the key size is not valid
70 static int need_fallback(unsigned int key_len
)
74 if (!(keylen_flag
& AES_KEYLEN_128
))
78 if (!(keylen_flag
& AES_KEYLEN_192
))
82 if (!(keylen_flag
& AES_KEYLEN_256
))
92 static int setkey_fallback_cip(struct crypto_tfm
*tfm
, const u8
*in_key
,
95 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
98 sctx
->fallback
.cip
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
99 sctx
->fallback
.cip
->base
.crt_flags
|= (tfm
->crt_flags
&
100 CRYPTO_TFM_REQ_MASK
);
102 ret
= crypto_cipher_setkey(sctx
->fallback
.cip
, in_key
, key_len
);
104 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
105 tfm
->crt_flags
|= (sctx
->fallback
.cip
->base
.crt_flags
&
106 CRYPTO_TFM_RES_MASK
);
111 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
112 unsigned int key_len
)
114 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
115 u32
*flags
= &tfm
->crt_flags
;
118 ret
= need_fallback(key_len
);
120 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
124 sctx
->key_len
= key_len
;
126 memcpy(sctx
->key
, in_key
, key_len
);
130 return setkey_fallback_cip(tfm
, in_key
, key_len
);
133 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
135 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
137 if (unlikely(need_fallback(sctx
->key_len
))) {
138 crypto_cipher_encrypt_one(sctx
->fallback
.cip
, out
, in
);
142 switch (sctx
->key_len
) {
144 crypt_s390_km(KM_AES_128_ENCRYPT
, &sctx
->key
, out
, in
,
148 crypt_s390_km(KM_AES_192_ENCRYPT
, &sctx
->key
, out
, in
,
152 crypt_s390_km(KM_AES_256_ENCRYPT
, &sctx
->key
, out
, in
,
158 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
160 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
162 if (unlikely(need_fallback(sctx
->key_len
))) {
163 crypto_cipher_decrypt_one(sctx
->fallback
.cip
, out
, in
);
167 switch (sctx
->key_len
) {
169 crypt_s390_km(KM_AES_128_DECRYPT
, &sctx
->key
, out
, in
,
173 crypt_s390_km(KM_AES_192_DECRYPT
, &sctx
->key
, out
, in
,
177 crypt_s390_km(KM_AES_256_DECRYPT
, &sctx
->key
, out
, in
,
183 static int fallback_init_cip(struct crypto_tfm
*tfm
)
185 const char *name
= tfm
->__crt_alg
->cra_name
;
186 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
188 sctx
->fallback
.cip
= crypto_alloc_cipher(name
, 0,
189 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
191 if (IS_ERR(sctx
->fallback
.cip
)) {
192 pr_err("Allocating AES fallback algorithm %s failed\n",
194 return PTR_ERR(sctx
->fallback
.cip
);
200 static void fallback_exit_cip(struct crypto_tfm
*tfm
)
202 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
204 crypto_free_cipher(sctx
->fallback
.cip
);
205 sctx
->fallback
.cip
= NULL
;
208 static struct crypto_alg aes_alg
= {
210 .cra_driver_name
= "aes-s390",
211 .cra_priority
= CRYPT_S390_PRIORITY
,
212 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
|
213 CRYPTO_ALG_NEED_FALLBACK
,
214 .cra_blocksize
= AES_BLOCK_SIZE
,
215 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
216 .cra_module
= THIS_MODULE
,
217 .cra_init
= fallback_init_cip
,
218 .cra_exit
= fallback_exit_cip
,
221 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
222 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
223 .cia_setkey
= aes_set_key
,
224 .cia_encrypt
= aes_encrypt
,
225 .cia_decrypt
= aes_decrypt
,
230 static int setkey_fallback_blk(struct crypto_tfm
*tfm
, const u8
*key
,
233 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
236 sctx
->fallback
.blk
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
237 sctx
->fallback
.blk
->base
.crt_flags
|= (tfm
->crt_flags
&
238 CRYPTO_TFM_REQ_MASK
);
240 ret
= crypto_blkcipher_setkey(sctx
->fallback
.blk
, key
, len
);
242 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
243 tfm
->crt_flags
|= (sctx
->fallback
.blk
->base
.crt_flags
&
244 CRYPTO_TFM_RES_MASK
);
249 static int fallback_blk_dec(struct blkcipher_desc
*desc
,
250 struct scatterlist
*dst
, struct scatterlist
*src
,
254 struct crypto_blkcipher
*tfm
;
255 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
258 desc
->tfm
= sctx
->fallback
.blk
;
260 ret
= crypto_blkcipher_decrypt_iv(desc
, dst
, src
, nbytes
);
266 static int fallback_blk_enc(struct blkcipher_desc
*desc
,
267 struct scatterlist
*dst
, struct scatterlist
*src
,
271 struct crypto_blkcipher
*tfm
;
272 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
275 desc
->tfm
= sctx
->fallback
.blk
;
277 ret
= crypto_blkcipher_encrypt_iv(desc
, dst
, src
, nbytes
);
283 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
284 unsigned int key_len
)
286 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
289 ret
= need_fallback(key_len
);
291 sctx
->key_len
= key_len
;
292 return setkey_fallback_blk(tfm
, in_key
, key_len
);
297 sctx
->enc
= KM_AES_128_ENCRYPT
;
298 sctx
->dec
= KM_AES_128_DECRYPT
;
301 sctx
->enc
= KM_AES_192_ENCRYPT
;
302 sctx
->dec
= KM_AES_192_DECRYPT
;
305 sctx
->enc
= KM_AES_256_ENCRYPT
;
306 sctx
->dec
= KM_AES_256_DECRYPT
;
310 return aes_set_key(tfm
, in_key
, key_len
);
313 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
314 struct blkcipher_walk
*walk
)
316 int ret
= blkcipher_walk_virt(desc
, walk
);
319 while ((nbytes
= walk
->nbytes
)) {
320 /* only use complete blocks */
321 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
322 u8
*out
= walk
->dst
.virt
.addr
;
323 u8
*in
= walk
->src
.virt
.addr
;
325 ret
= crypt_s390_km(func
, param
, out
, in
, n
);
326 if (ret
< 0 || ret
!= n
)
329 nbytes
&= AES_BLOCK_SIZE
- 1;
330 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
336 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
337 struct scatterlist
*dst
, struct scatterlist
*src
,
340 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
341 struct blkcipher_walk walk
;
343 if (unlikely(need_fallback(sctx
->key_len
)))
344 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
346 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
347 return ecb_aes_crypt(desc
, sctx
->enc
, sctx
->key
, &walk
);
350 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
351 struct scatterlist
*dst
, struct scatterlist
*src
,
354 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
355 struct blkcipher_walk walk
;
357 if (unlikely(need_fallback(sctx
->key_len
)))
358 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
360 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
361 return ecb_aes_crypt(desc
, sctx
->dec
, sctx
->key
, &walk
);
364 static int fallback_init_blk(struct crypto_tfm
*tfm
)
366 const char *name
= tfm
->__crt_alg
->cra_name
;
367 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
369 sctx
->fallback
.blk
= crypto_alloc_blkcipher(name
, 0,
370 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
372 if (IS_ERR(sctx
->fallback
.blk
)) {
373 pr_err("Allocating AES fallback algorithm %s failed\n",
375 return PTR_ERR(sctx
->fallback
.blk
);
381 static void fallback_exit_blk(struct crypto_tfm
*tfm
)
383 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
385 crypto_free_blkcipher(sctx
->fallback
.blk
);
386 sctx
->fallback
.blk
= NULL
;
389 static struct crypto_alg ecb_aes_alg
= {
390 .cra_name
= "ecb(aes)",
391 .cra_driver_name
= "ecb-aes-s390",
392 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
393 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
394 CRYPTO_ALG_NEED_FALLBACK
,
395 .cra_blocksize
= AES_BLOCK_SIZE
,
396 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
397 .cra_type
= &crypto_blkcipher_type
,
398 .cra_module
= THIS_MODULE
,
399 .cra_init
= fallback_init_blk
,
400 .cra_exit
= fallback_exit_blk
,
403 .min_keysize
= AES_MIN_KEY_SIZE
,
404 .max_keysize
= AES_MAX_KEY_SIZE
,
405 .setkey
= ecb_aes_set_key
,
406 .encrypt
= ecb_aes_encrypt
,
407 .decrypt
= ecb_aes_decrypt
,
412 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
413 unsigned int key_len
)
415 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
418 ret
= need_fallback(key_len
);
420 sctx
->key_len
= key_len
;
421 return setkey_fallback_blk(tfm
, in_key
, key_len
);
426 sctx
->enc
= KMC_AES_128_ENCRYPT
;
427 sctx
->dec
= KMC_AES_128_DECRYPT
;
430 sctx
->enc
= KMC_AES_192_ENCRYPT
;
431 sctx
->dec
= KMC_AES_192_DECRYPT
;
434 sctx
->enc
= KMC_AES_256_ENCRYPT
;
435 sctx
->dec
= KMC_AES_256_DECRYPT
;
439 return aes_set_key(tfm
, in_key
, key_len
);
442 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, long func
,
443 struct blkcipher_walk
*walk
)
445 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
446 int ret
= blkcipher_walk_virt(desc
, walk
);
447 unsigned int nbytes
= walk
->nbytes
;
449 u8 iv
[AES_BLOCK_SIZE
];
450 u8 key
[AES_MAX_KEY_SIZE
];
456 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
457 memcpy(param
.key
, sctx
->key
, sctx
->key_len
);
459 /* only use complete blocks */
460 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
461 u8
*out
= walk
->dst
.virt
.addr
;
462 u8
*in
= walk
->src
.virt
.addr
;
464 ret
= crypt_s390_kmc(func
, ¶m
, out
, in
, n
);
465 if (ret
< 0 || ret
!= n
)
468 nbytes
&= AES_BLOCK_SIZE
- 1;
469 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
470 } while ((nbytes
= walk
->nbytes
));
471 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
477 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
478 struct scatterlist
*dst
, struct scatterlist
*src
,
481 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
482 struct blkcipher_walk walk
;
484 if (unlikely(need_fallback(sctx
->key_len
)))
485 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
487 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
488 return cbc_aes_crypt(desc
, sctx
->enc
, &walk
);
491 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
492 struct scatterlist
*dst
, struct scatterlist
*src
,
495 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
496 struct blkcipher_walk walk
;
498 if (unlikely(need_fallback(sctx
->key_len
)))
499 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
501 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
502 return cbc_aes_crypt(desc
, sctx
->dec
, &walk
);
505 static struct crypto_alg cbc_aes_alg
= {
506 .cra_name
= "cbc(aes)",
507 .cra_driver_name
= "cbc-aes-s390",
508 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
509 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
510 CRYPTO_ALG_NEED_FALLBACK
,
511 .cra_blocksize
= AES_BLOCK_SIZE
,
512 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
513 .cra_type
= &crypto_blkcipher_type
,
514 .cra_module
= THIS_MODULE
,
515 .cra_init
= fallback_init_blk
,
516 .cra_exit
= fallback_exit_blk
,
519 .min_keysize
= AES_MIN_KEY_SIZE
,
520 .max_keysize
= AES_MAX_KEY_SIZE
,
521 .ivsize
= AES_BLOCK_SIZE
,
522 .setkey
= cbc_aes_set_key
,
523 .encrypt
= cbc_aes_encrypt
,
524 .decrypt
= cbc_aes_decrypt
,
529 static int xts_fallback_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
532 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
535 xts_ctx
->fallback
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
536 xts_ctx
->fallback
->base
.crt_flags
|= (tfm
->crt_flags
&
537 CRYPTO_TFM_REQ_MASK
);
539 ret
= crypto_blkcipher_setkey(xts_ctx
->fallback
, key
, len
);
541 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
542 tfm
->crt_flags
|= (xts_ctx
->fallback
->base
.crt_flags
&
543 CRYPTO_TFM_RES_MASK
);
548 static int xts_fallback_decrypt(struct blkcipher_desc
*desc
,
549 struct scatterlist
*dst
, struct scatterlist
*src
,
552 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
553 struct crypto_blkcipher
*tfm
;
557 desc
->tfm
= xts_ctx
->fallback
;
559 ret
= crypto_blkcipher_decrypt_iv(desc
, dst
, src
, nbytes
);
565 static int xts_fallback_encrypt(struct blkcipher_desc
*desc
,
566 struct scatterlist
*dst
, struct scatterlist
*src
,
569 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
570 struct crypto_blkcipher
*tfm
;
574 desc
->tfm
= xts_ctx
->fallback
;
576 ret
= crypto_blkcipher_encrypt_iv(desc
, dst
, src
, nbytes
);
582 static int xts_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
583 unsigned int key_len
)
585 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
586 u32
*flags
= &tfm
->crt_flags
;
590 xts_ctx
->enc
= KM_XTS_128_ENCRYPT
;
591 xts_ctx
->dec
= KM_XTS_128_DECRYPT
;
592 memcpy(xts_ctx
->key
+ 16, in_key
, 16);
593 memcpy(xts_ctx
->pcc_key
+ 16, in_key
+ 16, 16);
598 xts_fallback_setkey(tfm
, in_key
, key_len
);
601 xts_ctx
->enc
= KM_XTS_256_ENCRYPT
;
602 xts_ctx
->dec
= KM_XTS_256_DECRYPT
;
603 memcpy(xts_ctx
->key
, in_key
, 32);
604 memcpy(xts_ctx
->pcc_key
, in_key
+ 32, 32);
607 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
610 xts_ctx
->key_len
= key_len
;
614 static int xts_aes_crypt(struct blkcipher_desc
*desc
, long func
,
615 struct s390_xts_ctx
*xts_ctx
,
616 struct blkcipher_walk
*walk
)
618 unsigned int offset
= (xts_ctx
->key_len
>> 1) & 0x10;
619 int ret
= blkcipher_walk_virt(desc
, walk
);
620 unsigned int nbytes
= walk
->nbytes
;
623 struct pcc_param pcc_param
;
632 memset(pcc_param
.block
, 0, sizeof(pcc_param
.block
));
633 memset(pcc_param
.bit
, 0, sizeof(pcc_param
.bit
));
634 memset(pcc_param
.xts
, 0, sizeof(pcc_param
.xts
));
635 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
636 memcpy(pcc_param
.key
, xts_ctx
->pcc_key
, 32);
637 ret
= crypt_s390_pcc(func
, &pcc_param
.key
[offset
]);
641 memcpy(xts_param
.key
, xts_ctx
->key
, 32);
642 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
644 /* only use complete blocks */
645 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
646 out
= walk
->dst
.virt
.addr
;
647 in
= walk
->src
.virt
.addr
;
649 ret
= crypt_s390_km(func
, &xts_param
.key
[offset
], out
, in
, n
);
650 if (ret
< 0 || ret
!= n
)
653 nbytes
&= AES_BLOCK_SIZE
- 1;
654 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
655 } while ((nbytes
= walk
->nbytes
));
660 static int xts_aes_encrypt(struct blkcipher_desc
*desc
,
661 struct scatterlist
*dst
, struct scatterlist
*src
,
664 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
665 struct blkcipher_walk walk
;
667 if (unlikely(xts_ctx
->key_len
== 48))
668 return xts_fallback_encrypt(desc
, dst
, src
, nbytes
);
670 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
671 return xts_aes_crypt(desc
, xts_ctx
->enc
, xts_ctx
, &walk
);
674 static int xts_aes_decrypt(struct blkcipher_desc
*desc
,
675 struct scatterlist
*dst
, struct scatterlist
*src
,
678 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
679 struct blkcipher_walk walk
;
681 if (unlikely(xts_ctx
->key_len
== 48))
682 return xts_fallback_decrypt(desc
, dst
, src
, nbytes
);
684 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
685 return xts_aes_crypt(desc
, xts_ctx
->dec
, xts_ctx
, &walk
);
688 static int xts_fallback_init(struct crypto_tfm
*tfm
)
690 const char *name
= tfm
->__crt_alg
->cra_name
;
691 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
693 xts_ctx
->fallback
= crypto_alloc_blkcipher(name
, 0,
694 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
696 if (IS_ERR(xts_ctx
->fallback
)) {
697 pr_err("Allocating XTS fallback algorithm %s failed\n",
699 return PTR_ERR(xts_ctx
->fallback
);
704 static void xts_fallback_exit(struct crypto_tfm
*tfm
)
706 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
708 crypto_free_blkcipher(xts_ctx
->fallback
);
709 xts_ctx
->fallback
= NULL
;
712 static struct crypto_alg xts_aes_alg
= {
713 .cra_name
= "xts(aes)",
714 .cra_driver_name
= "xts-aes-s390",
715 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
716 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
717 CRYPTO_ALG_NEED_FALLBACK
,
718 .cra_blocksize
= AES_BLOCK_SIZE
,
719 .cra_ctxsize
= sizeof(struct s390_xts_ctx
),
720 .cra_type
= &crypto_blkcipher_type
,
721 .cra_module
= THIS_MODULE
,
722 .cra_init
= xts_fallback_init
,
723 .cra_exit
= xts_fallback_exit
,
726 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
727 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
728 .ivsize
= AES_BLOCK_SIZE
,
729 .setkey
= xts_aes_set_key
,
730 .encrypt
= xts_aes_encrypt
,
731 .decrypt
= xts_aes_decrypt
,
736 static int xts_aes_alg_reg
;
738 static int ctr_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
739 unsigned int key_len
)
741 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
745 sctx
->enc
= KMCTR_AES_128_ENCRYPT
;
746 sctx
->dec
= KMCTR_AES_128_DECRYPT
;
749 sctx
->enc
= KMCTR_AES_192_ENCRYPT
;
750 sctx
->dec
= KMCTR_AES_192_DECRYPT
;
753 sctx
->enc
= KMCTR_AES_256_ENCRYPT
;
754 sctx
->dec
= KMCTR_AES_256_DECRYPT
;
758 return aes_set_key(tfm
, in_key
, key_len
);
761 static int ctr_aes_crypt(struct blkcipher_desc
*desc
, long func
,
762 struct s390_aes_ctx
*sctx
, struct blkcipher_walk
*walk
)
764 int ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
765 unsigned int i
, n
, nbytes
;
766 u8 buf
[AES_BLOCK_SIZE
];
772 memcpy(ctrblk
, walk
->iv
, AES_BLOCK_SIZE
);
773 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
774 out
= walk
->dst
.virt
.addr
;
775 in
= walk
->src
.virt
.addr
;
776 while (nbytes
>= AES_BLOCK_SIZE
) {
777 /* only use complete blocks, max. PAGE_SIZE */
778 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
:
779 nbytes
& ~(AES_BLOCK_SIZE
- 1);
780 for (i
= AES_BLOCK_SIZE
; i
< n
; i
+= AES_BLOCK_SIZE
) {
781 memcpy(ctrblk
+ i
, ctrblk
+ i
- AES_BLOCK_SIZE
,
783 crypto_inc(ctrblk
+ i
, AES_BLOCK_SIZE
);
785 ret
= crypt_s390_kmctr(func
, sctx
->key
, out
, in
, n
, ctrblk
);
786 if (ret
< 0 || ret
!= n
)
788 if (n
> AES_BLOCK_SIZE
)
789 memcpy(ctrblk
, ctrblk
+ n
- AES_BLOCK_SIZE
,
791 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
796 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
799 * final block may be < AES_BLOCK_SIZE, copy only nbytes
802 out
= walk
->dst
.virt
.addr
;
803 in
= walk
->src
.virt
.addr
;
804 ret
= crypt_s390_kmctr(func
, sctx
->key
, buf
, in
,
805 AES_BLOCK_SIZE
, ctrblk
);
806 if (ret
< 0 || ret
!= AES_BLOCK_SIZE
)
808 memcpy(out
, buf
, nbytes
);
809 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
810 ret
= blkcipher_walk_done(desc
, walk
, 0);
812 memcpy(walk
->iv
, ctrblk
, AES_BLOCK_SIZE
);
816 static int ctr_aes_encrypt(struct blkcipher_desc
*desc
,
817 struct scatterlist
*dst
, struct scatterlist
*src
,
820 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
821 struct blkcipher_walk walk
;
823 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
824 return ctr_aes_crypt(desc
, sctx
->enc
, sctx
, &walk
);
827 static int ctr_aes_decrypt(struct blkcipher_desc
*desc
,
828 struct scatterlist
*dst
, struct scatterlist
*src
,
831 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
832 struct blkcipher_walk walk
;
834 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
835 return ctr_aes_crypt(desc
, sctx
->dec
, sctx
, &walk
);
838 static struct crypto_alg ctr_aes_alg
= {
839 .cra_name
= "ctr(aes)",
840 .cra_driver_name
= "ctr-aes-s390",
841 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
842 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
844 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
845 .cra_type
= &crypto_blkcipher_type
,
846 .cra_module
= THIS_MODULE
,
849 .min_keysize
= AES_MIN_KEY_SIZE
,
850 .max_keysize
= AES_MAX_KEY_SIZE
,
851 .ivsize
= AES_BLOCK_SIZE
,
852 .setkey
= ctr_aes_set_key
,
853 .encrypt
= ctr_aes_encrypt
,
854 .decrypt
= ctr_aes_decrypt
,
859 static int ctr_aes_alg_reg
;
861 static int __init
aes_s390_init(void)
865 if (crypt_s390_func_available(KM_AES_128_ENCRYPT
, CRYPT_S390_MSA
))
866 keylen_flag
|= AES_KEYLEN_128
;
867 if (crypt_s390_func_available(KM_AES_192_ENCRYPT
, CRYPT_S390_MSA
))
868 keylen_flag
|= AES_KEYLEN_192
;
869 if (crypt_s390_func_available(KM_AES_256_ENCRYPT
, CRYPT_S390_MSA
))
870 keylen_flag
|= AES_KEYLEN_256
;
875 /* z9 109 and z9 BC/EC only support 128 bit key length */
876 if (keylen_flag
== AES_KEYLEN_128
)
877 pr_info("AES hardware acceleration is only available for"
880 ret
= crypto_register_alg(&aes_alg
);
884 ret
= crypto_register_alg(&ecb_aes_alg
);
888 ret
= crypto_register_alg(&cbc_aes_alg
);
892 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT
,
893 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
894 crypt_s390_func_available(KM_XTS_256_ENCRYPT
,
895 CRYPT_S390_MSA
| CRYPT_S390_MSA4
)) {
896 ret
= crypto_register_alg(&xts_aes_alg
);
902 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT
,
903 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
904 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT
,
905 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
906 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT
,
907 CRYPT_S390_MSA
| CRYPT_S390_MSA4
)) {
908 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
913 ret
= crypto_register_alg(&ctr_aes_alg
);
915 free_page((unsigned long) ctrblk
);
925 crypto_unregister_alg(&xts_aes_alg
);
927 crypto_unregister_alg(&cbc_aes_alg
);
929 crypto_unregister_alg(&ecb_aes_alg
);
931 crypto_unregister_alg(&aes_alg
);
936 static void __exit
aes_s390_fini(void)
938 if (ctr_aes_alg_reg
) {
939 crypto_unregister_alg(&ctr_aes_alg
);
940 free_page((unsigned long) ctrblk
);
943 crypto_unregister_alg(&xts_aes_alg
);
944 crypto_unregister_alg(&cbc_aes_alg
);
945 crypto_unregister_alg(&ecb_aes_alg
);
946 crypto_unregister_alg(&aes_alg
);
949 module_init(aes_s390_init
);
950 module_exit(aes_s390_fini
);
952 MODULE_ALIAS("aes-all");
954 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
955 MODULE_LICENSE("GPL");