21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/slab.h>
76 static void *gcm_zeroes;
81 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
83 return (
void *)
PTR_ALIGN((
u8 *)aead_request_ctx(req), align + 1);
115 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
118 err = crypto_ablkcipher_setkey(ctr, key, keylen);
122 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
125 data = kzalloc(
sizeof(*
data) + crypto_ablkcipher_reqsize(ctr),
130 init_completion(&
data->result.completion);
132 ablkcipher_request_set_tfm(&
data->req, ctr);
135 crypto_gcm_setkey_done,
137 ablkcipher_request_set_crypt(&
data->req,
data->sg,
data->sg,
140 err = crypto_ablkcipher_encrypt(&
data->req);
143 &
data->result.completion);
145 err =
data->result.err;
152 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
155 crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) &
163 static int crypto_gcm_setauthsize(
struct crypto_aead *tfm,
164 unsigned int authsize)
184 unsigned int cryptlen)
186 struct crypto_aead *aead = crypto_aead_reqtfm(req);
197 scatterwalk_sg_chain(pctx->
src, 2, req->
src);
200 if (req->
src != req->
dst) {
203 scatterwalk_sg_chain(pctx->
dst, 2, req->
dst);
207 ablkcipher_request_set_tfm(ablk_req, ctx->
ctr);
208 ablkcipher_request_set_crypt(ablk_req, pctx->
src, dst,
213 static inline unsigned int gcm_remain(
unsigned int len)
216 return len ? 16 - len : 0;
230 ahash_request_set_callback(ahreq, aead_request_flags(req),
232 ahash_request_set_crypt(ahreq, src,
NULL, len);
234 return crypto_ahash_update(ahreq);
244 ahash_request_set_callback(ahreq, aead_request_flags(req),
247 ahash_request_set_crypt(ahreq, pctx->
src,
NULL, remain);
249 return crypto_ahash_update(ahreq);
263 ahash_request_set_callback(ahreq, aead_request_flags(req),
264 gcm_hash_len_done, req);
265 ahash_request_set_crypt(ahreq, pctx->
src,
266 NULL,
sizeof(lengths));
268 return crypto_ahash_update(ahreq);
276 ahash_request_set_callback(ahreq, aead_request_flags(req),
277 gcm_hash_final_done, req);
283 static void __gcm_hash_final_done(
struct aead_request *req,
int err)
298 __gcm_hash_final_done(req, err);
301 static void __gcm_hash_len_done(
struct aead_request *req,
int err)
306 err = gcm_hash_final(req, pctx);
311 __gcm_hash_final_done(req, err);
318 __gcm_hash_len_done(req, err);
321 static void __gcm_hash_crypt_remain_done(
struct aead_request *req,
int err)
326 err = gcm_hash_len(req, pctx);
331 __gcm_hash_len_done(req, err);
339 __gcm_hash_crypt_remain_done(req, err);
342 static void __gcm_hash_crypt_done(
struct aead_request *req,
int err)
349 remain = gcm_remain(gctx->
cryptlen);
351 err = gcm_hash_remain(req, pctx, remain,
352 gcm_hash_crypt_remain_done);
357 __gcm_hash_crypt_remain_done(req, err);
364 __gcm_hash_crypt_done(req, err);
367 static void __gcm_hash_assoc_remain_done(
struct aead_request *req,
int err)
372 unsigned int remain = 0;
375 remain = gcm_remain(gctx->
cryptlen);
376 complete = remain ? gcm_hash_crypt_done :
377 gcm_hash_crypt_remain_done;
378 err = gcm_hash_update(req, pctx, complete,
385 __gcm_hash_crypt_done(req, err);
387 __gcm_hash_crypt_remain_done(req, err);
395 __gcm_hash_assoc_remain_done(req, err);
398 static void __gcm_hash_assoc_done(
struct aead_request *req,
int err)
406 err = gcm_hash_remain(req, pctx, remain,
407 gcm_hash_assoc_remain_done);
412 __gcm_hash_assoc_remain_done(req, err);
419 __gcm_hash_assoc_done(req, err);
422 static void __gcm_hash_init_done(
struct aead_request *req,
int err)
426 unsigned int remain = 0;
430 complete = remain ? gcm_hash_assoc_done :
431 gcm_hash_assoc_remain_done;
432 err = gcm_hash_update(req, pctx, complete,
439 __gcm_hash_assoc_done(req, err);
441 __gcm_hash_assoc_remain_done(req, err);
448 __gcm_hash_init_done(req, err);
461 ahash_request_set_tfm(ahreq, ctx->
ghash);
463 ahash_request_set_callback(ahreq, aead_request_flags(req),
464 gcm_hash_init_done, req);
465 err = crypto_ahash_init(ahreq);
469 complete = remain ? gcm_hash_assoc_done : gcm_hash_assoc_remain_done;
470 err = gcm_hash_update(req, pctx, complete, req->
assoc, req->
assoclen);
474 err = gcm_hash_remain(req, pctx, remain,
475 gcm_hash_assoc_remain_done);
479 remain = gcm_remain(gctx->
cryptlen);
480 complete = remain ? gcm_hash_crypt_done : gcm_hash_crypt_remain_done;
481 err = gcm_hash_update(req, pctx, complete, gctx->
src, gctx->
cryptlen);
485 err = gcm_hash_remain(req, pctx, remain,
486 gcm_hash_crypt_remain_done);
490 err = gcm_hash_len(req, pctx);
493 err = gcm_hash_final(req, pctx);
503 struct crypto_aead *aead = crypto_aead_reqtfm(req);
507 crypto_aead_authsize(aead), 1);
510 static void gcm_enc_hash_done(
struct aead_request *req,
int err)
515 gcm_enc_copy_hash(req, pctx);
517 aead_request_complete(req, err);
526 err = gcm_hash(req, pctx);
531 gcm_enc_copy_hash(req, pctx);
535 aead_request_complete(req, err);
545 crypto_gcm_init_crypt(abreq, req, req->
cryptlen);
546 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
547 gcm_encrypt_done, req);
553 err = crypto_ablkcipher_encrypt(abreq);
557 err = gcm_hash(req, pctx);
562 gcm_enc_copy_hash(req, pctx);
570 struct crypto_aead *aead = crypto_aead_reqtfm(req);
573 unsigned int authsize = crypto_aead_authsize(aead);
574 unsigned int cryptlen = req->
cryptlen - authsize;
587 err = crypto_gcm_verify(req, pctx);
589 aead_request_complete(req, err);
592 static void gcm_dec_hash_done(
struct aead_request *req,
int err)
599 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
600 gcm_decrypt_done, req);
601 crypto_gcm_init_crypt(abreq, req, gctx->
cryptlen);
602 err = crypto_ablkcipher_decrypt(abreq);
606 err = crypto_gcm_verify(req, pctx);
609 aead_request_complete(req, err);
614 struct crypto_aead *aead = crypto_aead_reqtfm(req);
618 unsigned int authsize = crypto_aead_authsize(aead);
619 unsigned int cryptlen = req->
cryptlen;
622 if (cryptlen < authsize)
624 cryptlen -= authsize;
630 err = gcm_hash(req, pctx);
634 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
635 gcm_decrypt_done, req);
636 crypto_gcm_init_crypt(abreq, req, cryptlen);
637 err = crypto_ablkcipher_decrypt(abreq);
641 return crypto_gcm_verify(req, pctx);
644 static int crypto_gcm_init_tfm(
struct crypto_tfm *tfm)
654 ghash = crypto_spawn_ahash(&ictx->ghash);
656 return PTR_ERR(ghash);
658 ctr = crypto_spawn_skcipher(&ictx->ctr);
666 align = crypto_tfm_alg_alignmask(tfm);
667 align &= ~(crypto_tfm_ctx_alignment() - 1);
668 tfm->crt_aead.reqsize = align +
671 crypto_ablkcipher_reqsize(ctr),
673 crypto_ahash_reqsize(ghash));
678 crypto_free_ahash(ghash);
682 static void crypto_gcm_exit_tfm(
struct crypto_tfm *tfm)
686 crypto_free_ahash(ctx->
ghash);
687 crypto_free_ablkcipher(ctx->
ctr);
691 const char *full_name,
692 const char *ctr_name,
693 const char *ghash_name)
714 err = PTR_ERR(ghash_alg);
715 if (IS_ERR(ghash_alg))
719 inst = kzalloc(
sizeof(*inst) +
sizeof(*ctx),
GFP_KERNEL);
723 ctx = crypto_instance_ctx(inst);
730 crypto_set_skcipher_spawn(&ctx->
ctr, inst);
732 crypto_requires_sync(algt->
type,
737 ctr = crypto_skcipher_spawn_alg(&ctx->
ctr);
740 if (ctr->cra_ablkcipher.ivsize != 16)
760 inst->
alg.cra_blocksize = 1;
763 inst->
alg.cra_aead.ivsize = 16;
764 inst->
alg.cra_aead.maxauthsize = 16;
766 inst->
alg.cra_init = crypto_gcm_init_tfm;
767 inst->
alg.cra_exit = crypto_gcm_exit_tfm;
768 inst->
alg.cra_aead.setkey = crypto_gcm_setkey;
769 inst->
alg.cra_aead.setauthsize = crypto_gcm_setauthsize;
770 inst->
alg.cra_aead.encrypt = crypto_gcm_encrypt;
771 inst->
alg.cra_aead.decrypt = crypto_gcm_decrypt;
778 crypto_drop_skcipher(&ctx->
ctr);
780 crypto_drop_ahash(&ctx->
ghash);
791 const char *cipher_name;
796 err = PTR_ERR(cipher_name);
797 if (IS_ERR(cipher_name))
808 return crypto_gcm_alloc_common(tb, full_name, ctr_name,
"ghash");
815 crypto_drop_skcipher(&ctx->
ctr);
816 crypto_drop_ahash(&ctx->
ghash);
822 .alloc = crypto_gcm_alloc,
823 .free = crypto_gcm_free,
830 const char *ctr_name;
831 const char *ghash_name;
835 err = PTR_ERR(ctr_name);
836 if (IS_ERR(ctr_name))
840 err = PTR_ERR(ghash_name);
841 if (IS_ERR(ghash_name))
848 return crypto_gcm_alloc_common(tb, full_name, ctr_name, ghash_name);
853 .alloc = crypto_gcm_base_alloc,
854 .free = crypto_gcm_free,
858 static int crypto_rfc4106_setkey(
struct crypto_aead *parent,
const u8 *key,
872 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
874 err = crypto_aead_setkey(child, key, keylen);
875 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
881 static int crypto_rfc4106_setauthsize(
struct crypto_aead *parent,
882 unsigned int authsize)
901 struct crypto_aead *aead = crypto_aead_reqtfm(req);
905 crypto_aead_alignmask(child) + 1);
910 aead_request_set_tfm(subreq, child);
911 aead_request_set_callback(subreq, req->
base.flags, req->
base.complete,
913 aead_request_set_crypt(subreq, req->
src, req->
dst, req->
cryptlen, iv);
919 static int crypto_rfc4106_encrypt(
struct aead_request *req)
921 req = crypto_rfc4106_crypt(req);
923 return crypto_aead_encrypt(req);
926 static int crypto_rfc4106_decrypt(
struct aead_request *req)
928 req = crypto_rfc4106_crypt(req);
930 return crypto_aead_decrypt(req);
933 static int crypto_rfc4106_init_tfm(
struct crypto_tfm *tfm)
941 aead = crypto_spawn_aead(spawn);
943 return PTR_ERR(aead);
947 align = crypto_aead_alignmask(aead);
948 align &= ~(crypto_tfm_ctx_alignment() - 1);
950 ALIGN(crypto_aead_reqsize(aead),
951 crypto_tfm_ctx_alignment()) +
957 static void crypto_rfc4106_exit_tfm(
struct crypto_tfm *tfm)
961 crypto_free_aead(ctx->
child);
970 const char *ccm_name;
982 err = PTR_ERR(ccm_name);
983 if (IS_ERR(ccm_name))
986 inst = kzalloc(
sizeof(*inst) +
sizeof(*spawn),
GFP_KERNEL);
990 spawn = crypto_instance_ctx(inst);
991 crypto_set_aead_spawn(spawn, inst);
993 crypto_requires_sync(algt->
type, algt->
mask));
997 alg = crypto_aead_spawn_alg(spawn);
1002 if (alg->cra_aead.ivsize != 16)
1020 inst->
alg.cra_blocksize = 1;
1024 inst->
alg.cra_aead.ivsize = 8;
1025 inst->
alg.cra_aead.maxauthsize = 16;
1029 inst->
alg.cra_init = crypto_rfc4106_init_tfm;
1030 inst->
alg.cra_exit = crypto_rfc4106_exit_tfm;
1032 inst->
alg.cra_aead.setkey = crypto_rfc4106_setkey;
1033 inst->
alg.cra_aead.setauthsize = crypto_rfc4106_setauthsize;
1034 inst->
alg.cra_aead.encrypt = crypto_rfc4106_encrypt;
1035 inst->
alg.cra_aead.decrypt = crypto_rfc4106_decrypt;
1037 inst->
alg.cra_aead.geniv =
"seqiv";
1043 crypto_drop_aead(spawn);
1046 inst = ERR_PTR(err);
1058 .alloc = crypto_rfc4106_alloc,
1059 .free = crypto_rfc4106_free,
1066 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
1068 return (
void *)
PTR_ALIGN((
u8 *)aead_request_ctx(req), align + 1);
1071 static int crypto_rfc4543_setkey(
struct crypto_aead *parent,
const u8 *key,
1072 unsigned int keylen)
1085 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
1087 err = crypto_aead_setkey(child, key, keylen);
1088 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
1094 static int crypto_rfc4543_setauthsize(
struct crypto_aead *parent,
1095 unsigned int authsize)
1108 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1116 unsigned int authsize = crypto_aead_authsize(aead);
1117 unsigned int assoclen = req->
assoclen;
1121 crypto_aead_alignmask(ctx->
child) + 1);
1137 dstp = sg_page(dst);
1141 sg_set_buf(payload, req->
iv, 8);
1142 scatterwalk_crypto_chain(payload, dst, vdst == req->
iv + 8, 2);
1143 assoclen += 8 + req->
cryptlen - (enc ? 0 : authsize);
1146 sg_set_page(assoc, sg_page(req->
assoc), req->
assoc->length,
1147 req->
assoc->offset);
1148 scatterwalk_crypto_chain(assoc, payload, 0, 2);
1150 aead_request_set_tfm(subreq, ctx->
child);
1151 aead_request_set_callback(subreq, req->
base.flags, req->
base.complete,
1153 aead_request_set_crypt(subreq, cipher, cipher, enc ? 0 : authsize, iv);
1154 aead_request_set_assoc(subreq, assoc, assoclen);
1159 static int crypto_rfc4543_encrypt(
struct aead_request *req)
1161 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1166 subreq = crypto_rfc4543_crypt(req, 1);
1167 err = crypto_aead_encrypt(subreq);
1172 crypto_aead_authsize(aead), 1);
1177 static int crypto_rfc4543_decrypt(
struct aead_request *req)
1179 req = crypto_rfc4543_crypt(req, 0);
1181 return crypto_aead_decrypt(req);
1184 static int crypto_rfc4543_init_tfm(
struct crypto_tfm *tfm)
1190 unsigned long align;
1192 aead = crypto_spawn_aead(spawn);
1194 return PTR_ERR(aead);
1198 align = crypto_aead_alignmask(aead);
1199 align &= ~(crypto_tfm_ctx_alignment() - 1);
1201 ALIGN(crypto_aead_reqsize(aead),
1202 crypto_tfm_ctx_alignment()) +
1208 static void crypto_rfc4543_exit_tfm(
struct crypto_tfm *tfm)
1212 crypto_free_aead(ctx->
child);
1221 const char *ccm_name;
1225 err = PTR_ERR(algt);
1227 return ERR_PTR(err);
1233 err = PTR_ERR(ccm_name);
1234 if (IS_ERR(ccm_name))
1235 return ERR_PTR(err);
1237 inst = kzalloc(
sizeof(*inst) +
sizeof(*spawn),
GFP_KERNEL);
1241 spawn = crypto_instance_ctx(inst);
1242 crypto_set_aead_spawn(spawn, inst);
1244 crypto_requires_sync(algt->
type, algt->
mask));
1248 alg = crypto_aead_spawn_alg(spawn);
1253 if (alg->cra_aead.ivsize != 16)
1271 inst->
alg.cra_blocksize = 1;
1275 inst->
alg.cra_aead.ivsize = 8;
1276 inst->
alg.cra_aead.maxauthsize = 16;
1280 inst->
alg.cra_init = crypto_rfc4543_init_tfm;
1281 inst->
alg.cra_exit = crypto_rfc4543_exit_tfm;
1283 inst->
alg.cra_aead.setkey = crypto_rfc4543_setkey;
1284 inst->
alg.cra_aead.setauthsize = crypto_rfc4543_setauthsize;
1285 inst->
alg.cra_aead.encrypt = crypto_rfc4543_encrypt;
1286 inst->
alg.cra_aead.decrypt = crypto_rfc4543_decrypt;
1288 inst->
alg.cra_aead.geniv =
"seqiv";
1294 crypto_drop_aead(spawn);
1297 inst = ERR_PTR(err);
1309 .alloc = crypto_rfc4543_alloc,
1310 .free = crypto_rfc4543_free,
1314 static int __init crypto_gcm_module_init(
void)
1336 goto out_undo_rfc4106;
1351 static void __exit crypto_gcm_module_exit(
void)