Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
camellia_glue.c
Go to the documentation of this file.
1 /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
2  *
3  * Copyright (C) 2012 David S. Miller <[email protected]>
4  */
5 
6 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
7 
8 #include <linux/crypto.h>
9 #include <linux/init.h>
10 #include <linux/module.h>
11 #include <linux/mm.h>
12 #include <linux/types.h>
13 #include <crypto/algapi.h>
14 
15 #include <asm/fpumacro.h>
16 #include <asm/pstate.h>
17 #include <asm/elf.h>
18 
19 #include "opcodes.h"
20 
21 #define CAMELLIA_MIN_KEY_SIZE 16
22 #define CAMELLIA_MAX_KEY_SIZE 32
23 #define CAMELLIA_BLOCK_SIZE 16
24 #define CAMELLIA_TABLE_BYTE_LEN 272
25 
29  int key_len;
30 };
31 
32 extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
33  unsigned int key_len, u64 *decrypt_key);
34 
35 static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
36  unsigned int key_len)
37 {
38  struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
39  const u32 *in_key = (const u32 *) _in_key;
40  u32 *flags = &tfm->crt_flags;
41 
42  if (key_len != 16 && key_len != 24 && key_len != 32) {
44  return -EINVAL;
45  }
46 
47  ctx->key_len = key_len;
48 
50  key_len, &ctx->decrypt_key[0]);
51  return 0;
52 }
53 
54 extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
55  u32 *output, unsigned int key_len);
56 
57 static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
58 {
59  struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
60 
62  (const u32 *) src,
63  (u32 *) dst, ctx->key_len);
64 }
65 
66 static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
67 {
68  struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
69 
71  (const u32 *) src,
72  (u32 *) dst, ctx->key_len);
73 }
74 
75 extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
76 
77 typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
78  const u64 *key);
79 
82 
83 #define CAMELLIA_BLOCK_MASK (~(CAMELLIA_BLOCK_SIZE - 1))
84 
85 static int __ecb_crypt(struct blkcipher_desc *desc,
86  struct scatterlist *dst, struct scatterlist *src,
87  unsigned int nbytes, bool encrypt)
88 {
89  struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
90  struct blkcipher_walk walk;
92  const u64 *key;
93  int err;
94 
95  op = camellia_sparc64_ecb_crypt_3_grand_rounds;
96  if (ctx->key_len != 16)
97  op = camellia_sparc64_ecb_crypt_4_grand_rounds;
98 
99  blkcipher_walk_init(&walk, dst, src, nbytes);
100  err = blkcipher_walk_virt(desc, &walk);
101 
102  if (encrypt)
103  key = &ctx->encrypt_key[0];
104  else
105  key = &ctx->decrypt_key[0];
107  while ((nbytes = walk.nbytes)) {
108  unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
109 
110  if (likely(block_len)) {
111  const u64 *src64;
112  u64 *dst64;
113 
114  src64 = (const u64 *)walk.src.virt.addr;
115  dst64 = (u64 *) walk.dst.virt.addr;
116  op(src64, dst64, block_len, key);
117  }
118  nbytes &= CAMELLIA_BLOCK_SIZE - 1;
119  err = blkcipher_walk_done(desc, &walk, nbytes);
120  }
121  fprs_write(0);
122  return err;
123 }
124 
125 static int ecb_encrypt(struct blkcipher_desc *desc,
126  struct scatterlist *dst, struct scatterlist *src,
127  unsigned int nbytes)
128 {
129  return __ecb_crypt(desc, dst, src, nbytes, true);
130 }
131 
132 static int ecb_decrypt(struct blkcipher_desc *desc,
133  struct scatterlist *dst, struct scatterlist *src,
134  unsigned int nbytes)
135 {
136  return __ecb_crypt(desc, dst, src, nbytes, false);
137 }
138 
139 typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
140  const u64 *key, u64 *iv);
141 
146 
147 static int cbc_encrypt(struct blkcipher_desc *desc,
148  struct scatterlist *dst, struct scatterlist *src,
149  unsigned int nbytes)
150 {
151  struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
152  struct blkcipher_walk walk;
153  cbc_crypt_op *op;
154  const u64 *key;
155  int err;
156 
157  op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
158  if (ctx->key_len != 16)
159  op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
160 
161  blkcipher_walk_init(&walk, dst, src, nbytes);
162  err = blkcipher_walk_virt(desc, &walk);
163 
164  key = &ctx->encrypt_key[0];
166  while ((nbytes = walk.nbytes)) {
167  unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
168 
169  if (likely(block_len)) {
170  const u64 *src64;
171  u64 *dst64;
172 
173  src64 = (const u64 *)walk.src.virt.addr;
174  dst64 = (u64 *) walk.dst.virt.addr;
175  op(src64, dst64, block_len, key,
176  (u64 *) walk.iv);
177  }
178  nbytes &= CAMELLIA_BLOCK_SIZE - 1;
179  err = blkcipher_walk_done(desc, &walk, nbytes);
180  }
181  fprs_write(0);
182  return err;
183 }
184 
185 static int cbc_decrypt(struct blkcipher_desc *desc,
186  struct scatterlist *dst, struct scatterlist *src,
187  unsigned int nbytes)
188 {
189  struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
190  struct blkcipher_walk walk;
191  cbc_crypt_op *op;
192  const u64 *key;
193  int err;
194 
195  op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
196  if (ctx->key_len != 16)
197  op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
198 
199  blkcipher_walk_init(&walk, dst, src, nbytes);
200  err = blkcipher_walk_virt(desc, &walk);
201 
202  key = &ctx->decrypt_key[0];
204  while ((nbytes = walk.nbytes)) {
205  unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
206 
207  if (likely(block_len)) {
208  const u64 *src64;
209  u64 *dst64;
210 
211  src64 = (const u64 *)walk.src.virt.addr;
212  dst64 = (u64 *) walk.dst.virt.addr;
213  op(src64, dst64, block_len, key,
214  (u64 *) walk.iv);
215  }
216  nbytes &= CAMELLIA_BLOCK_SIZE - 1;
217  err = blkcipher_walk_done(desc, &walk, nbytes);
218  }
219  fprs_write(0);
220  return err;
221 }
222 
223 static struct crypto_alg algs[] = { {
224  .cra_name = "camellia",
225  .cra_driver_name = "camellia-sparc64",
226  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
227  .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
228  .cra_blocksize = CAMELLIA_BLOCK_SIZE,
229  .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
230  .cra_alignmask = 3,
231  .cra_module = THIS_MODULE,
232  .cra_u = {
233  .cipher = {
234  .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
235  .cia_max_keysize = CAMELLIA_MAX_KEY_SIZE,
236  .cia_setkey = camellia_set_key,
237  .cia_encrypt = camellia_encrypt,
238  .cia_decrypt = camellia_decrypt
239  }
240  }
241 }, {
242  .cra_name = "ecb(camellia)",
243  .cra_driver_name = "ecb-camellia-sparc64",
244  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
245  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
246  .cra_blocksize = CAMELLIA_BLOCK_SIZE,
247  .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
248  .cra_alignmask = 7,
249  .cra_type = &crypto_blkcipher_type,
250  .cra_module = THIS_MODULE,
251  .cra_u = {
252  .blkcipher = {
253  .min_keysize = CAMELLIA_MIN_KEY_SIZE,
254  .max_keysize = CAMELLIA_MAX_KEY_SIZE,
255  .setkey = camellia_set_key,
256  .encrypt = ecb_encrypt,
257  .decrypt = ecb_decrypt,
258  },
259  },
260 }, {
261  .cra_name = "cbc(camellia)",
262  .cra_driver_name = "cbc-camellia-sparc64",
263  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
264  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
265  .cra_blocksize = CAMELLIA_BLOCK_SIZE,
266  .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
267  .cra_alignmask = 7,
268  .cra_type = &crypto_blkcipher_type,
269  .cra_module = THIS_MODULE,
270  .cra_u = {
271  .blkcipher = {
272  .min_keysize = CAMELLIA_MIN_KEY_SIZE,
273  .max_keysize = CAMELLIA_MAX_KEY_SIZE,
274  .setkey = camellia_set_key,
275  .encrypt = cbc_encrypt,
276  .decrypt = cbc_decrypt,
277  },
278  },
279 }
280 };
281 
282 static bool __init sparc64_has_camellia_opcode(void)
283 {
284  unsigned long cfr;
285 
287  return false;
288 
289  __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
290  if (!(cfr & CFR_CAMELLIA))
291  return false;
292 
293  return true;
294 }
295 
296 static int __init camellia_sparc64_mod_init(void)
297 {
298  int i;
299 
300  for (i = 0; i < ARRAY_SIZE(algs); i++)
301  INIT_LIST_HEAD(&algs[i].cra_list);
302 
303  if (sparc64_has_camellia_opcode()) {
304  pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
305  return crypto_register_algs(algs, ARRAY_SIZE(algs));
306  }
307  pr_info("sparc64 camellia opcodes not available.\n");
308  return -ENODEV;
309 }
310 
311 static void __exit camellia_sparc64_mod_fini(void)
312 {
313  crypto_unregister_algs(algs, ARRAY_SIZE(algs));
314 }
315 
316 module_init(camellia_sparc64_mod_init);
317 module_exit(camellia_sparc64_mod_fini);
318 
319 MODULE_LICENSE("GPL");
320 MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
321 
322 MODULE_ALIAS("aes");
323 
324 #include "crop_devid.c"