20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/rtnetlink.h>
23 #include <linux/sched.h>
24 #include <linux/slab.h>
56 ablkcipher_buffer_write(p);
73 static inline u8 *ablkcipher_get_spot(
u8 *
start,
unsigned int len)
75 u8 *end_page = (
u8 *)(((
unsigned long)(start + len - 1)) &
PAGE_MASK);
76 return max(start, end_page);
79 static inline unsigned int ablkcipher_done_slow(
struct ablkcipher_walk *walk,
82 unsigned int n = bsize;
85 unsigned int len_this_page = scatterwalk_pagelen(&walk->
out);
87 if (len_this_page > n)
89 scatterwalk_advance(&walk->
out, n);
90 if (n == len_this_page)
99 static inline unsigned int ablkcipher_done_fast(
struct ablkcipher_walk *walk,
102 scatterwalk_advance(&walk->
in, n);
103 scatterwalk_advance(&walk->
out, n);
121 n = ablkcipher_done_fast(walk, n);
126 n = ablkcipher_done_slow(walk, n);
140 crypto_yield(req->
base.flags);
141 return ablkcipher_walk_next(req, walk);
144 if (walk->
iv != req->
info)
145 memcpy(req->
info, walk->
iv, tfm->crt_ablkcipher.ivsize);
155 unsigned int alignmask,
156 void **src_p,
void **dst_p)
158 unsigned aligned_bsize =
ALIGN(bsize, alignmask + 1);
164 n += (aligned_bsize * 3 - (alignmask + 1) +
165 (alignmask & ~(crypto_tfm_ctx_alignment() - 1)));
173 dst = (
u8 *)
ALIGN((
unsigned long)base, alignmask + 1);
174 src = dst = ablkcipher_get_spot(dst, bsize);
181 ablkcipher_queue_write(walk, p);
194 unsigned int alignmask)
197 unsigned int ivsize = tfm->crt_ablkcipher.ivsize;
198 unsigned aligned_bs =
ALIGN(bs, alignmask + 1);
199 unsigned int size = aligned_bs * 2 + ivsize +
max(aligned_bs, ivsize) -
203 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
209 iv = ablkcipher_get_spot(iv, bs) + aligned_bs;
210 iv = ablkcipher_get_spot(iv, bs) + aligned_bs;
211 iv = ablkcipher_get_spot(iv, ivsize);
220 walk->
src.page = scatterwalk_page(&walk->
in);
222 walk->
dst.page = scatterwalk_page(&walk->
out);
232 unsigned int alignmask, bsize,
n;
236 alignmask = crypto_tfm_alg_alignmask(tfm);
238 if (
unlikely(n < crypto_tfm_alg_blocksize(tfm))) {
247 n = scatterwalk_clamp(&walk->
in, n);
248 n = scatterwalk_clamp(&walk->
out, n);
251 !scatterwalk_aligned(&walk->
in, alignmask) ||
252 !scatterwalk_aligned(&walk->
out, alignmask)) {
253 err = ablkcipher_next_slow(req, walk, bsize, alignmask,
255 goto set_phys_lowmem;
260 return ablkcipher_next_fast(req, walk);
277 unsigned int alignmask;
279 alignmask = crypto_tfm_alg_alignmask(tfm);
289 if (
unlikely(((
unsigned long)walk->
iv & alignmask))) {
290 int err = ablkcipher_copy_iv(walk, tfm, alignmask);
298 return ablkcipher_walk_next(req, walk);
305 return ablkcipher_walk_first(req, walk);
313 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm);
316 unsigned long absize;
318 absize = keylen + alignmask;
323 alignbuffer = (
u8 *)
ALIGN((
unsigned long)
buffer, alignmask + 1);
324 memcpy(alignbuffer, key, keylen);
325 ret = cipher->
setkey(tfm, alignbuffer, keylen);
326 memset(alignbuffer, 0, keylen);
335 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm);
342 if ((
unsigned long)key & alignmask)
343 return setkey_unaligned(tfm, key, keylen);
345 return cipher->
setkey(tfm, key, keylen);
356 return crypto_ablkcipher_encrypt(&req->
creq);
361 return crypto_ablkcipher_decrypt(&req->
creq);
380 crt->
base = __crypto_ablkcipher_cast(tfm);
393 alg->cra_ablkcipher.geniv ?:
"<default>");
396 rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
397 rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
398 rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
402 goto nla_put_failure;
432 .ctxsize = crypto_ablkcipher_ctxsize,
433 .init = crypto_init_ablkcipher_ops,
434 #ifdef CONFIG_PROC_FS
435 .show = crypto_ablkcipher_show,
437 .report = crypto_ablkcipher_report,
446 static int crypto_init_givcipher_ops(
struct crypto_tfm *tfm,
u32 type,
461 crt->
base = __crypto_ablkcipher_cast(tfm);
474 alg->cra_ablkcipher.geniv ?:
"<built-in>");
477 rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
478 rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
479 rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
483 goto nla_put_failure;
513 .ctxsize = crypto_ablkcipher_ctxsize,
514 .init = crypto_init_givcipher_ops,
515 #ifdef CONFIG_PROC_FS
516 .show = crypto_givcipher_show,
518 .report = crypto_givcipher_report,
526 alg->cra_ablkcipher.ivsize) !=
531 "eseqiv" : skcipher_default_geniv;
534 static int crypto_givcipher_default(
struct crypto_alg *alg,
u32 type,
u32 mask)
555 err = PTR_ERR(larval);
560 if (!crypto_is_larval(larval))
563 ptype.attr.rta_len =
sizeof(ptype);
570 palg.attr.
rta_len =
sizeof(palg);
580 geniv = alg->cra_blkcipher.geniv;
582 geniv = alg->cra_ablkcipher.geniv;
592 inst = tmpl->
alloc(tb);
606 crypto_tmpl_put(tmpl);
630 alg->cra_ablkcipher.ivsize))
650 alg->cra_ablkcipher.ivsize));
652 return ERR_PTR(crypto_givcipher_default(alg, type, mask));
662 type = crypto_skcipher_type(type);
663 mask = crypto_skcipher_mask(mask);
681 type = crypto_skcipher_type(type);
682 mask = crypto_skcipher_mask(mask);
695 return __crypto_ablkcipher_cast(tfm);
713 static int __init skcipher_module_init(
void)
716 "eseqiv" :
"chainiv";
720 static void skcipher_module_exit(
void)