25 #include <linux/module.h>
26 #include <linux/slab.h>
60 static struct kset *pcrypt_kset;
72 static int pcrypt_do_parallel(
struct padata_priv *padata,
unsigned int *cb_cpu,
85 if (!cpumask_weight(cpumask->mask))
88 cpu_index = cpu % cpumask_weight(cpumask->mask);
90 cpu = cpumask_first(cpumask->mask);
92 cpu = cpumask_next(cpu, cpumask->mask);
101 static int pcrypt_aead_setkey(
struct crypto_aead *parent,
102 const u8 *
key,
unsigned int keylen)
106 return crypto_aead_setkey(ctx->
child, key, keylen);
109 static int pcrypt_aead_setauthsize(
struct crypto_aead *parent,
110 unsigned int authsize)
117 static void pcrypt_aead_serial(
struct padata_priv *padata)
122 aead_request_complete(req->
base.data, padata->
info);
125 static void pcrypt_aead_giv_serial(
struct padata_priv *padata)
130 aead_request_complete(req->
areq.base.data, padata->
info);
137 struct padata_priv *padata = pcrypt_request_padata(preq);
145 static void pcrypt_aead_enc(
struct padata_priv *padata)
150 padata->
info = crypto_aead_encrypt(req);
158 static int pcrypt_aead_encrypt(
struct aead_request *req)
163 struct padata_priv *padata = pcrypt_request_padata(preq);
164 struct crypto_aead *aead = crypto_aead_reqtfm(req);
166 u32 flags = aead_request_flags(req);
171 padata->
serial = pcrypt_aead_serial;
173 aead_request_set_tfm(creq, ctx->
child);
175 pcrypt_aead_done, req);
176 aead_request_set_crypt(creq, req->
src, req->
dst,
180 err = pcrypt_do_parallel(padata, &ctx->
cb_cpu, &pencrypt);
187 static void pcrypt_aead_dec(
struct padata_priv *padata)
192 padata->
info = crypto_aead_decrypt(req);
200 static int pcrypt_aead_decrypt(
struct aead_request *req)
205 struct padata_priv *padata = pcrypt_request_padata(preq);
206 struct crypto_aead *aead = crypto_aead_reqtfm(req);
208 u32 flags = aead_request_flags(req);
213 padata->
serial = pcrypt_aead_serial;
215 aead_request_set_tfm(creq, ctx->
child);
217 pcrypt_aead_done, req);
218 aead_request_set_crypt(creq, req->
src, req->
dst,
222 err = pcrypt_do_parallel(padata, &ctx->
cb_cpu, &pdecrypt);
229 static void pcrypt_aead_givenc(
struct padata_priv *padata)
234 padata->
info = crypto_aead_givencrypt(req);
248 struct padata_priv *padata = pcrypt_request_padata(preq);
249 struct crypto_aead *aead = aead_givcrypt_reqtfm(req);
251 u32 flags = aead_request_flags(areq);
255 padata->
parallel = pcrypt_aead_givenc;
256 padata->
serial = pcrypt_aead_giv_serial;
258 aead_givcrypt_set_tfm(creq, ctx->
child);
260 pcrypt_aead_done, areq);
261 aead_givcrypt_set_crypt(creq, areq->
src, areq->
dst,
264 aead_givcrypt_set_giv(creq, req->
giv, req->
seq);
266 err = pcrypt_do_parallel(padata, &ctx->
cb_cpu, &pencrypt);
273 static int pcrypt_aead_init_tfm(
struct crypto_tfm *tfm)
283 cpu_index = ictx->
tfm_count % cpumask_weight(cpu_online_mask);
285 ctx->
cb_cpu = cpumask_first(cpu_online_mask);
287 ctx->
cb_cpu = cpumask_next(ctx->
cb_cpu, cpu_online_mask);
289 cipher = crypto_spawn_aead(crypto_instance_ctx(inst));
292 return PTR_ERR(cipher);
297 + crypto_aead_reqsize(cipher);
302 static void pcrypt_aead_exit_tfm(
struct crypto_tfm *tfm)
306 crypto_free_aead(ctx->
child);
315 inst = kzalloc(
sizeof(*inst) +
sizeof(*ctx),
GFP_KERNEL);
328 ctx = crypto_instance_ctx(inst);
355 return ERR_CAST(alg);
357 inst = pcrypt_alloc_instance(alg);
364 inst->
alg.cra_aead.ivsize = alg->cra_aead.ivsize;
365 inst->
alg.cra_aead.geniv = alg->cra_aead.geniv;
366 inst->
alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
370 inst->
alg.cra_init = pcrypt_aead_init_tfm;
371 inst->
alg.cra_exit = pcrypt_aead_exit_tfm;
373 inst->
alg.cra_aead.setkey = pcrypt_aead_setkey;
374 inst->
alg.cra_aead.setauthsize = pcrypt_aead_setauthsize;
375 inst->
alg.cra_aead.encrypt = pcrypt_aead_encrypt;
376 inst->
alg.cra_aead.decrypt = pcrypt_aead_decrypt;
377 inst->
alg.cra_aead.givencrypt = pcrypt_aead_givencrypt;
390 return ERR_CAST(algt);
394 return pcrypt_alloc_aead(tb, algt->
type, algt->
mask);
408 static int pcrypt_cpumask_change_notify(
struct notifier_block *
self,
412 struct pcrypt_cpumask *new_mask, *old_mask;
422 if (!alloc_cpumask_var(&new_mask->mask,
GFP_KERNEL)) {
429 cpumask_copy(new_mask->mask, cpumask->
cbcpu);
433 free_cpumask_var(old_mask->mask);
442 pinst->
kobj.kset = pcrypt_kset;
454 struct pcrypt_cpumask *
mask;
465 goto err_destroy_workqueue;
469 goto err_free_padata;
470 if (!alloc_cpumask_var(&mask->mask,
GFP_KERNEL)) {
472 goto err_free_padata;
475 cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask);
478 pcrypt->
nblock.notifier_call = pcrypt_cpumask_change_notify;
481 goto err_free_cpumask;
483 ret = pcrypt_sysfs_add(pcrypt->
pinst, name);
485 goto err_unregister_notifier;
491 err_unregister_notifier:
494 free_cpumask_var(mask->mask);
498 err_destroy_workqueue:
519 .alloc = pcrypt_alloc,
524 static int __init pcrypt_init(
void)
532 err = pcrypt_init_padata(&pencrypt,
"pencrypt");
536 err = pcrypt_init_padata(&pdecrypt,
"pdecrypt");
538 goto err_deinit_pencrypt;
546 pcrypt_fini_padata(&pencrypt);
553 static void __exit pcrypt_exit(
void)
555 pcrypt_fini_padata(&pencrypt);
556 pcrypt_fini_padata(&pdecrypt);