18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/rtnetlink.h>
21 #include <linux/sched.h>
22 #include <linux/slab.h>
32 struct aead_alg *aead = crypto_aead_alg(tfm);
33 unsigned long alignmask = crypto_aead_alignmask(tfm);
38 absize = keylen + alignmask;
43 alignbuffer = (
u8 *)
ALIGN((
unsigned long)
buffer, alignmask + 1);
44 memcpy(alignbuffer, key, keylen);
45 ret = aead->
setkey(tfm, alignbuffer, keylen);
46 memset(alignbuffer, 0, keylen);
53 struct aead_alg *aead = crypto_aead_alg(tfm);
54 unsigned long alignmask = crypto_aead_alignmask(tfm);
56 if ((
unsigned long)key & alignmask)
57 return setkey_unaligned(tfm, key, keylen);
59 return aead->
setkey(tfm, key, keylen);
64 struct aead_tfm *crt = crypto_aead_crt(tfm);
67 if (authsize > crypto_aead_alg(tfm)->maxauthsize)
70 if (crypto_aead_alg(tfm)->setauthsize) {
71 err = crypto_aead_alg(tfm)->setauthsize(crt->
base, authsize);
96 struct aead_tfm *crt = &tfm->crt_aead;
107 crt->
base = __crypto_aead_cast(tfm);
118 struct aead_alg *aead = &alg->cra_aead;
122 aead->
geniv ?:
"<built-in>");
126 raead.ivsize = aead->
ivsize;
130 goto nla_put_failure;
147 struct aead_alg *aead = &alg->cra_aead;
159 .ctxsize = crypto_aead_ctxsize,
160 .init = crypto_init_aead_ops,
161 #ifdef CONFIG_PROC_FS
162 .show = crypto_aead_show,
164 .report = crypto_aead_report,
170 return crypto_aead_encrypt(&req->
areq);
175 return crypto_aead_decrypt(&req->
areq);
178 static int crypto_init_nivaead_ops(
struct crypto_tfm *tfm,
u32 type,
u32 mask)
181 struct aead_tfm *crt = &tfm->crt_aead;
193 crt->
base = __crypto_aead_cast(tfm);
204 struct aead_alg *aead = &alg->cra_aead;
211 raead.ivsize = aead->
ivsize;
215 goto nla_put_failure;
233 struct aead_alg *aead = &alg->cra_aead;
245 .ctxsize = crypto_aead_ctxsize,
246 .init = crypto_init_nivaead_ops,
247 #ifdef CONFIG_PROC_FS
248 .show = crypto_nivaead_show,
250 .report = crypto_nivaead_report,
298 inst = kzalloc(
sizeof(*inst) +
sizeof(*spawn),
GFP_KERNEL);
302 spawn = crypto_instance_ctx(inst);
305 mask |= crypto_requires_sync(algt->
type, algt->
mask);
307 crypto_set_aead_spawn(spawn, inst);
308 err = crypto_grab_nivaead(spawn, name, type, mask);
312 alg = crypto_aead_spawn_alg(spawn);
315 if (!alg->cra_aead.ivsize)
349 inst->
alg.cra_aead.ivsize = alg->cra_aead.ivsize;
350 inst->
alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
351 inst->
alg.cra_aead.geniv = alg->cra_aead.geniv;
353 inst->
alg.cra_aead.setkey = alg->cra_aead.setkey;
354 inst->
alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
355 inst->
alg.cra_aead.encrypt = alg->cra_aead.encrypt;
356 inst->
alg.cra_aead.decrypt = alg->cra_aead.decrypt;
362 crypto_drop_aead(spawn);
372 crypto_drop_aead(crypto_instance_ctx(inst));
382 aead = crypto_spawn_aead(crypto_instance_ctx(inst));
384 return PTR_ERR(aead);
386 tfm->crt_aead.base = aead;
387 tfm->crt_aead.reqsize += crypto_aead_reqsize(aead);
395 crypto_free_aead(tfm->crt_aead.base);
399 static int crypto_nivaead_default(
struct crypto_alg *alg,
u32 type,
u32 mask)
419 err = PTR_ERR(larval);
424 if (!crypto_is_larval(larval))
427 ptype.attr.rta_len =
sizeof(ptype);
434 palg.attr.
rta_len =
sizeof(palg);
442 geniv = alg->cra_aead.geniv;
449 inst = tmpl->
alloc(tb);
463 crypto_tmpl_put(tmpl);
481 if (alg->
cra_type == &crypto_aead_type)
484 if (!alg->cra_aead.ivsize)
493 if (alg->
cra_type == &crypto_aead_type) {
501 BUG_ON(!alg->cra_aead.ivsize);
503 return ERR_PTR(crypto_nivaead_default(alg, type, mask));
549 return __crypto_aead_cast(tfm);