20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
26 #include <linux/module.h>
30 #define AES_KEYLEN_128 1
31 #define AES_KEYLEN_192 2
32 #define AES_KEYLEN_256 4
35 static char keylen_flag;
72 static int need_fallback(
unsigned int key_len)
94 static int setkey_fallback_cip(
struct crypto_tfm *tfm,
const u8 *in_key,
104 ret = crypto_cipher_setkey(sctx->
fallback.
cip, in_key, key_len);
113 static int aes_set_key(
struct crypto_tfm *tfm,
const u8 *in_key,
114 unsigned int key_len)
120 ret = need_fallback(key_len);
132 return setkey_fallback_cip(tfm, in_key, key_len);
140 crypto_cipher_encrypt_one(sctx->
fallback.
cip, out, in);
160 static void aes_decrypt(
struct crypto_tfm *tfm,
u8 *out,
const u8 *in)
165 crypto_cipher_decrypt_one(sctx->
fallback.
cip, out, in);
185 static int fallback_init_cip(
struct crypto_tfm *tfm)
194 pr_err(
"Allocating AES fallback algorithm %s failed\n",
202 static void fallback_exit_cip(
struct crypto_tfm *tfm)
212 .cra_driver_name =
"aes-s390",
219 .cra_init = fallback_init_cip,
220 .cra_exit = fallback_exit_cip,
225 .cia_setkey = aes_set_key,
226 .cia_encrypt = aes_encrypt,
227 .cia_decrypt = aes_decrypt,
242 ret = crypto_blkcipher_setkey(sctx->
fallback.
blk, key, len);
262 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
279 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
285 static int ecb_aes_set_key(
struct crypto_tfm *tfm,
const u8 *in_key,
286 unsigned int key_len)
291 ret = need_fallback(key_len);
294 return setkey_fallback_blk(tfm, in_key, key_len);
312 return aes_set_key(tfm, in_key, key_len);
321 while ((nbytes = walk->
nbytes)) {
327 ret = crypt_s390_km(func, param, out, in, n);
328 BUG_ON((ret < 0) || (ret != n));
345 return fallback_blk_enc(desc, dst, src, nbytes);
347 blkcipher_walk_init(&walk, dst, src, nbytes);
348 return ecb_aes_crypt(desc, sctx->
enc, sctx->
key, &walk);
359 return fallback_blk_dec(desc, dst, src, nbytes);
361 blkcipher_walk_init(&walk, dst, src, nbytes);
362 return ecb_aes_crypt(desc, sctx->
dec, sctx->
key, &walk);
365 static int fallback_init_blk(
struct crypto_tfm *tfm)
367 const char *name = tfm->
__crt_alg->cra_name;
370 sctx->
fallback.
blk = crypto_alloc_blkcipher(name, 0,
374 pr_err(
"Allocating AES fallback algorithm %s failed\n",
382 static void fallback_exit_blk(
struct crypto_tfm *tfm)
391 .cra_name =
"ecb(aes)",
392 .cra_driver_name =
"ecb-aes-s390",
400 .cra_init = fallback_init_blk,
401 .cra_exit = fallback_exit_blk,
406 .setkey = ecb_aes_set_key,
407 .encrypt = ecb_aes_encrypt,
408 .decrypt = ecb_aes_decrypt,
413 static int cbc_aes_set_key(
struct crypto_tfm *tfm,
const u8 *in_key,
414 unsigned int key_len)
419 ret = need_fallback(key_len);
422 return setkey_fallback_blk(tfm, in_key, key_len);
440 return aes_set_key(tfm, in_key, key_len);
443 static int cbc_aes_crypt(
struct blkcipher_desc *desc,
long func,
void *param,
447 unsigned int nbytes = walk->
nbytes;
459 ret = crypt_s390_kmc(func, param, out, in, n);
460 BUG_ON((ret < 0) || (ret != n));
464 }
while ((nbytes = walk->
nbytes));
479 return fallback_blk_enc(desc, dst, src, nbytes);
481 blkcipher_walk_init(&walk, dst, src, nbytes);
482 return cbc_aes_crypt(desc, sctx->
enc, sctx->
iv, &walk);
493 return fallback_blk_dec(desc, dst, src, nbytes);
495 blkcipher_walk_init(&walk, dst, src, nbytes);
496 return cbc_aes_crypt(desc, sctx->
dec, sctx->
iv, &walk);
500 .cra_name =
"cbc(aes)",
501 .cra_driver_name =
"cbc-aes-s390",
509 .cra_init = fallback_init_blk,
510 .cra_exit = fallback_exit_blk,
516 .setkey = cbc_aes_set_key,
517 .encrypt = cbc_aes_encrypt,
518 .decrypt = cbc_aes_decrypt,
523 static int xts_fallback_setkey(
struct crypto_tfm *tfm,
const u8 *key,
533 ret = crypto_blkcipher_setkey(xts_ctx->
fallback, key, len);
553 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
570 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
576 static int xts_aes_set_key(
struct crypto_tfm *tfm,
const u8 *in_key,
577 unsigned int key_len)
587 memcpy(xts_ctx->
pcc.key + 16, in_key + 16, 16);
592 xts_fallback_setkey(tfm, in_key, key_len);
598 memcpy(xts_ctx->
pcc.key, in_key + 32, 32);
614 unsigned int nbytes = walk->
nbytes;
622 memset(xts_ctx->
pcc.block, 0,
sizeof(xts_ctx->
pcc.block));
627 ret = crypt_s390_pcc(func, param);
638 ret = crypt_s390_km(func, param, out, in, n);
639 BUG_ON(ret < 0 || ret != n);
643 }
while ((nbytes = walk->
nbytes));
656 return xts_fallback_encrypt(desc, dst, src, nbytes);
658 blkcipher_walk_init(&walk, dst, src, nbytes);
659 return xts_aes_crypt(desc, xts_ctx->
enc, xts_ctx, &walk);
670 return xts_fallback_decrypt(desc, dst, src, nbytes);
672 blkcipher_walk_init(&walk, dst, src, nbytes);
673 return xts_aes_crypt(desc, xts_ctx->
dec, xts_ctx, &walk);
676 static int xts_fallback_init(
struct crypto_tfm *tfm)
678 const char *name = tfm->
__crt_alg->cra_name;
681 xts_ctx->
fallback = crypto_alloc_blkcipher(name, 0,
685 pr_err(
"Allocating XTS fallback algorithm %s failed\n",
692 static void xts_fallback_exit(
struct crypto_tfm *tfm)
696 crypto_free_blkcipher(xts_ctx->
fallback);
701 .cra_name =
"xts(aes)",
702 .cra_driver_name =
"xts-aes-s390",
710 .cra_init = xts_fallback_init,
711 .cra_exit = xts_fallback_exit,
717 .setkey = xts_aes_set_key,
718 .encrypt = xts_aes_encrypt,
719 .decrypt = xts_aes_decrypt,
724 static int ctr_aes_set_key(
struct crypto_tfm *tfm,
const u8 *in_key,
725 unsigned int key_len)
744 return aes_set_key(tfm, in_key, key_len);
771 ret = crypt_s390_kmctr(func, sctx->
key, out, in, n, ctrblk);
772 BUG_ON(ret < 0 || ret != n);
789 ret = crypt_s390_kmctr(func, sctx->
key, buf, in,
807 blkcipher_walk_init(&walk, dst, src, nbytes);
808 return ctr_aes_crypt(desc, sctx->
enc, sctx, &walk);
818 blkcipher_walk_init(&walk, dst, src, nbytes);
819 return ctr_aes_crypt(desc, sctx->
dec, sctx, &walk);
823 .cra_name =
"ctr(aes)",
824 .cra_driver_name =
"ctr-aes-s390",
836 .setkey = ctr_aes_set_key,
837 .encrypt = ctr_aes_encrypt,
838 .decrypt = ctr_aes_decrypt,
843 static int __init aes_s390_init(
void)
859 pr_info(
"AES hardware acceleration is only available for"
916 static void __exit aes_s390_fini(
void)