19 #include <linux/errno.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
54 static inline void blkcipher_unmap_src(
struct blkcipher_walk *walk)
56 scatterwalk_unmap(walk->
src.
virt.addr);
59 static inline void blkcipher_unmap_dst(
struct blkcipher_walk *walk)
61 scatterwalk_unmap(walk->
dst.
virt.addr);
67 static inline u8 *blkcipher_get_spot(
u8 *
start,
unsigned int len)
69 u8 *end_page = (
u8 *)(((
unsigned long)(start + len - 1)) &
PAGE_MASK);
70 return max(start, end_page);
78 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
80 addr = (
u8 *)
ALIGN((
unsigned long)walk->
buffer, alignmask + 1);
81 addr = blkcipher_get_spot(addr, bsize);
86 static inline unsigned int blkcipher_done_fast(
struct blkcipher_walk *walk,
90 blkcipher_map_dst(walk);
92 blkcipher_unmap_dst(walk);
95 blkcipher_unmap_dst(walk);
96 blkcipher_unmap_src(walk);
99 scatterwalk_advance(&walk->
in, n);
100 scatterwalk_advance(&walk->
out, n);
115 n = blkcipher_done_fast(walk, n);
120 n = blkcipher_done_slow(tfm, walk, n);
134 crypto_yield(desc->
flags);
135 return blkcipher_walk_next(desc, walk);
138 if (walk->
iv != desc->
info)
139 memcpy(desc->
info, walk->
iv, crypto_blkcipher_ivsize(tfm));
152 unsigned int alignmask)
155 unsigned aligned_bsize =
ALIGN(bsize, alignmask + 1);
164 n = aligned_bsize * 3 - (alignmask + 1) +
165 (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
173 walk->
dst.
virt.addr = blkcipher_get_spot(walk->
dst.
virt.addr, bsize);
175 aligned_bsize, bsize);
185 static inline int blkcipher_next_copy(
struct blkcipher_walk *walk)
189 blkcipher_map_src(walk);
191 blkcipher_unmap_src(walk);
199 static inline int blkcipher_next_fast(
struct blkcipher_desc *desc,
204 walk->
src.
phys.page = scatterwalk_page(&walk->
in);
206 walk->
dst.
phys.page = scatterwalk_page(&walk->
out);
215 blkcipher_map_src(walk);
220 blkcipher_map_dst(walk);
230 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
236 if (
unlikely(n < crypto_blkcipher_blocksize(tfm))) {
243 if (!scatterwalk_aligned(&walk->
in, alignmask) ||
244 !scatterwalk_aligned(&walk->
out, alignmask)) {
254 n = scatterwalk_clamp(&walk->
in, n);
255 n = scatterwalk_clamp(&walk->
out, n);
258 err = blkcipher_next_slow(desc, walk, bsize, alignmask);
259 goto set_phys_lowmem;
264 err = blkcipher_next_copy(walk);
265 goto set_phys_lowmem;
268 return blkcipher_next_fast(desc, walk);
282 unsigned int alignmask)
285 unsigned int ivsize = crypto_blkcipher_ivsize(tfm);
286 unsigned aligned_bs =
ALIGN(bs, alignmask + 1);
287 unsigned int size = aligned_bs * 2 + ivsize +
max(aligned_bs, ivsize) -
291 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
296 iv = (
u8 *)
ALIGN((
unsigned long)walk->
buffer, alignmask + 1);
297 iv = blkcipher_get_spot(iv, bs) + aligned_bs;
298 iv = blkcipher_get_spot(iv, bs) + aligned_bs;
299 iv = blkcipher_get_spot(iv, ivsize);
309 walk->
blocksize = crypto_blkcipher_blocksize(desc->
tfm);
310 return blkcipher_walk_first(desc, walk);
318 walk->
blocksize = crypto_blkcipher_blocksize(desc->
tfm);
319 return blkcipher_walk_first(desc, walk);
327 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
338 if (
unlikely(((
unsigned long)walk->
iv & alignmask))) {
339 int err = blkcipher_copy_iv(walk, tfm, alignmask);
348 return blkcipher_walk_next(desc, walk);
353 unsigned int blocksize)
357 return blkcipher_walk_first(desc, walk);
365 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
368 unsigned long absize;
370 absize = keylen + alignmask;
375 alignbuffer = (
u8 *)
ALIGN((
unsigned long)
buffer, alignmask + 1);
376 memcpy(alignbuffer, key, keylen);
377 ret = cipher->
setkey(tfm, alignbuffer, keylen);
378 memset(alignbuffer, 0, keylen);
386 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
393 if ((
unsigned long)key & alignmask)
394 return setkey_unaligned(tfm, key, keylen);
396 return cipher->
setkey(tfm, key, keylen);
402 return setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
410 .
tfm = __crypto_blkcipher_cast(tfm),
412 .flags = req->
base.flags,
424 .
tfm = __crypto_blkcipher_cast(tfm),
426 .flags = req->
base.flags,
447 static int crypto_init_blkcipher_ops_async(
struct crypto_tfm *tfm)
452 crt->
setkey = async_setkey;
459 crt->
base = __crypto_ablkcipher_cast(tfm);
465 static int crypto_init_blkcipher_ops_sync(
struct crypto_tfm *tfm)
469 unsigned long align = crypto_tfm_alg_alignmask(tfm) + 1;
476 addr = (
unsigned long)crypto_tfm_ctx(tfm);
477 addr =
ALIGN(addr, align);
479 crt->
iv = (
void *)addr;
484 static int crypto_init_blkcipher_ops(
struct crypto_tfm *tfm,
u32 type,
u32 mask)
491 if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK)
492 return crypto_init_blkcipher_ops_sync(tfm);
494 return crypto_init_blkcipher_ops_async(tfm);
504 alg->cra_blkcipher.geniv ?:
"<default>");
507 rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
508 rblkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
509 rblkcipher.ivsize = alg->cra_blkcipher.ivsize;
513 goto nla_put_failure;
532 seq_printf(m,
"min keysize : %u\n", alg->cra_blkcipher.min_keysize);
533 seq_printf(m,
"max keysize : %u\n", alg->cra_blkcipher.max_keysize);
534 seq_printf(m,
"ivsize : %u\n", alg->cra_blkcipher.ivsize);
535 seq_printf(m,
"geniv : %s\n", alg->cra_blkcipher.geniv ?:
540 .ctxsize = crypto_blkcipher_ctxsize,
541 .init = crypto_init_blkcipher_ops,
542 #ifdef CONFIG_PROC_FS
543 .show = crypto_blkcipher_show,
545 .report = crypto_blkcipher_report,
555 type = crypto_skcipher_type(type);
573 unsigned int keylen);
577 unsigned int min_keysize;
578 unsigned int max_keysize;
604 inst = kzalloc(
sizeof(*inst) +
sizeof(*spawn),
GFP_KERNEL);
608 spawn = crypto_instance_ctx(inst);
611 mask |= crypto_requires_sync(algt->
type, algt->
mask);
613 crypto_set_skcipher_spawn(spawn, inst);
614 err = crypto_grab_nivcipher(spawn, name, type, mask);
618 alg = crypto_skcipher_spawn_alg(spawn);
620 if ((alg->
cra_flags & CRYPTO_ALG_TYPE_MASK) ==
622 balg.ivsize = alg->cra_blkcipher.ivsize;
623 balg.min_keysize = alg->cra_blkcipher.min_keysize;
624 balg.max_keysize = alg->cra_blkcipher.max_keysize;
626 balg.setkey = async_setkey;
627 balg.encrypt = async_encrypt;
628 balg.decrypt = async_decrypt;
630 balg.geniv = alg->cra_blkcipher.geniv;
632 balg.ivsize = alg->cra_ablkcipher.ivsize;
633 balg.min_keysize = alg->cra_ablkcipher.min_keysize;
634 balg.max_keysize = alg->cra_ablkcipher.max_keysize;
636 balg.setkey = alg->cra_ablkcipher.setkey;
637 balg.encrypt = alg->cra_ablkcipher.encrypt;
638 balg.decrypt = alg->cra_ablkcipher.decrypt;
640 balg.geniv = alg->cra_ablkcipher.geniv;
681 inst->
alg.cra_ablkcipher.ivsize = balg.ivsize;
682 inst->
alg.cra_ablkcipher.min_keysize = balg.min_keysize;
683 inst->
alg.cra_ablkcipher.max_keysize = balg.max_keysize;
684 inst->
alg.cra_ablkcipher.geniv = balg.geniv;
686 inst->
alg.cra_ablkcipher.setkey = balg.setkey;
687 inst->
alg.cra_ablkcipher.encrypt = balg.encrypt;
688 inst->
alg.cra_ablkcipher.decrypt = balg.decrypt;
694 crypto_drop_skcipher(spawn);
704 crypto_drop_skcipher(crypto_instance_ctx(inst));
714 cipher = crypto_spawn_skcipher(crypto_instance_ctx(inst));
716 return PTR_ERR(cipher);
718 tfm->crt_ablkcipher.base =
cipher;
719 tfm->crt_ablkcipher.reqsize += crypto_ablkcipher_reqsize(cipher);
727 crypto_free_ablkcipher(tfm->crt_ablkcipher.base);