Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
aes_glue.c
Go to the documentation of this file.
1 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
2  *
3  * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
4  *
5  * Copyright (C) 2008, Intel Corp.
6  * Author: Huang Ying <[email protected]>
7  *
8  * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9  * interface for 64-bit kernels.
10  * Authors: Adrian Hoban <[email protected]>
11  * Gabriele Paoloni <[email protected]>
12  * Tadeusz Struk ([email protected])
13  * Aidan O'Mahony ([email protected])
14  * Copyright (c) 2010, Intel Corporation.
15  */
16 
17 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
18 
19 #include <linux/crypto.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
22 #include <linux/mm.h>
23 #include <linux/types.h>
24 #include <crypto/algapi.h>
25 #include <crypto/aes.h>
26 
27 #include <asm/fpumacro.h>
28 #include <asm/pstate.h>
29 #include <asm/elf.h>
30 
31 #include "opcodes.h"
32 
33 struct aes_ops {
34  void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
35  void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
38  void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
39  unsigned int len);
40  void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
41  unsigned int len);
42  void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
43  unsigned int len, u64 *iv);
44  void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
45  unsigned int len, u64 *iv);
46  void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
47  unsigned int len, u64 *iv);
48 };
49 
51  struct aes_ops *ops;
55 };
56 
57 extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
58  u32 *output);
59 extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
60  u32 *output);
61 extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
62  u32 *output);
63 
64 extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
65  u32 *output);
66 extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
67  u32 *output);
68 extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
69  u32 *output);
70 
71 extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
72 extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
73 extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
74 
75 extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
76 extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
77 extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
78 
79 extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
80  u64 *output, unsigned int len);
81 extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
82  u64 *output, unsigned int len);
83 extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
84  u64 *output, unsigned int len);
85 
86 extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
87  u64 *output, unsigned int len);
88 extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
89  u64 *output, unsigned int len);
90 extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
91  u64 *output, unsigned int len);
92 
93 extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
94  u64 *output, unsigned int len,
95  u64 *iv);
96 
97 extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
98  u64 *output, unsigned int len,
99  u64 *iv);
100 
101 extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
102  u64 *output, unsigned int len,
103  u64 *iv);
104 
105 extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
106  u64 *output, unsigned int len,
107  u64 *iv);
108 
109 extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
110  u64 *output, unsigned int len,
111  u64 *iv);
112 
113 extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
114  u64 *output, unsigned int len,
115  u64 *iv);
116 
117 extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
118  u64 *output, unsigned int len,
119  u64 *iv);
120 extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
121  u64 *output, unsigned int len,
122  u64 *iv);
123 extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
124  u64 *output, unsigned int len,
125  u64 *iv);
126 
127 struct aes_ops aes128_ops = {
128  .encrypt = aes_sparc64_encrypt_128,
129  .decrypt = aes_sparc64_decrypt_128,
130  .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
131  .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
132  .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
133  .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
134  .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
135  .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
136  .ctr_crypt = aes_sparc64_ctr_crypt_128,
137 };
138 
139 struct aes_ops aes192_ops = {
140  .encrypt = aes_sparc64_encrypt_192,
141  .decrypt = aes_sparc64_decrypt_192,
142  .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
143  .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
144  .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
145  .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
146  .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
147  .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
148  .ctr_crypt = aes_sparc64_ctr_crypt_192,
149 };
150 
151 struct aes_ops aes256_ops = {
152  .encrypt = aes_sparc64_encrypt_256,
153  .decrypt = aes_sparc64_decrypt_256,
154  .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
155  .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
156  .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
157  .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
158  .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
159  .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
160  .ctr_crypt = aes_sparc64_ctr_crypt_256,
161 };
162 
163 extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
164  unsigned int key_len);
165 
166 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
167  unsigned int key_len)
168 {
169  struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
170  u32 *flags = &tfm->crt_flags;
171 
172  switch (key_len) {
173  case AES_KEYSIZE_128:
174  ctx->expanded_key_length = 0xb0;
175  ctx->ops = &aes128_ops;
176  break;
177 
178  case AES_KEYSIZE_192:
179  ctx->expanded_key_length = 0xd0;
180  ctx->ops = &aes192_ops;
181  break;
182 
183  case AES_KEYSIZE_256:
184  ctx->expanded_key_length = 0xf0;
185  ctx->ops = &aes256_ops;
186  break;
187 
188  default:
189  *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
190  return -EINVAL;
191  }
192 
193  aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194  ctx->key_length = key_len;
195 
196  return 0;
197 }
198 
199 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
200 {
201  struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
202 
203  ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
204 }
205 
206 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
207 {
208  struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
209 
210  ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
211 }
212 
213 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
214 
215 static int ecb_encrypt(struct blkcipher_desc *desc,
216  struct scatterlist *dst, struct scatterlist *src,
217  unsigned int nbytes)
218 {
219  struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
220  struct blkcipher_walk walk;
221  int err;
222 
223  blkcipher_walk_init(&walk, dst, src, nbytes);
224  err = blkcipher_walk_virt(desc, &walk);
225 
226  ctx->ops->load_encrypt_keys(&ctx->key[0]);
227  while ((nbytes = walk.nbytes)) {
228  unsigned int block_len = nbytes & AES_BLOCK_MASK;
229 
230  if (likely(block_len)) {
231  ctx->ops->ecb_encrypt(&ctx->key[0],
232  (const u64 *)walk.src.virt.addr,
233  (u64 *) walk.dst.virt.addr,
234  block_len);
235  }
236  nbytes &= AES_BLOCK_SIZE - 1;
237  err = blkcipher_walk_done(desc, &walk, nbytes);
238  }
239  fprs_write(0);
240  return err;
241 }
242 
243 static int ecb_decrypt(struct blkcipher_desc *desc,
244  struct scatterlist *dst, struct scatterlist *src,
245  unsigned int nbytes)
246 {
247  struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
248  struct blkcipher_walk walk;
249  u64 *key_end;
250  int err;
251 
252  blkcipher_walk_init(&walk, dst, src, nbytes);
253  err = blkcipher_walk_virt(desc, &walk);
254 
255  ctx->ops->load_decrypt_keys(&ctx->key[0]);
256  key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
257  while ((nbytes = walk.nbytes)) {
258  unsigned int block_len = nbytes & AES_BLOCK_MASK;
259 
260  if (likely(block_len)) {
261  ctx->ops->ecb_decrypt(key_end,
262  (const u64 *) walk.src.virt.addr,
263  (u64 *) walk.dst.virt.addr, block_len);
264  }
265  nbytes &= AES_BLOCK_SIZE - 1;
266  err = blkcipher_walk_done(desc, &walk, nbytes);
267  }
268  fprs_write(0);
269 
270  return err;
271 }
272 
273 static int cbc_encrypt(struct blkcipher_desc *desc,
274  struct scatterlist *dst, struct scatterlist *src,
275  unsigned int nbytes)
276 {
277  struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
278  struct blkcipher_walk walk;
279  int err;
280 
281  blkcipher_walk_init(&walk, dst, src, nbytes);
282  err = blkcipher_walk_virt(desc, &walk);
283 
284  ctx->ops->load_encrypt_keys(&ctx->key[0]);
285  while ((nbytes = walk.nbytes)) {
286  unsigned int block_len = nbytes & AES_BLOCK_MASK;
287 
288  if (likely(block_len)) {
289  ctx->ops->cbc_encrypt(&ctx->key[0],
290  (const u64 *)walk.src.virt.addr,
291  (u64 *) walk.dst.virt.addr,
292  block_len, (u64 *) walk.iv);
293  }
294  nbytes &= AES_BLOCK_SIZE - 1;
295  err = blkcipher_walk_done(desc, &walk, nbytes);
296  }
297  fprs_write(0);
298  return err;
299 }
300 
301 static int cbc_decrypt(struct blkcipher_desc *desc,
302  struct scatterlist *dst, struct scatterlist *src,
303  unsigned int nbytes)
304 {
305  struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
306  struct blkcipher_walk walk;
307  u64 *key_end;
308  int err;
309 
310  blkcipher_walk_init(&walk, dst, src, nbytes);
311  err = blkcipher_walk_virt(desc, &walk);
312 
313  ctx->ops->load_decrypt_keys(&ctx->key[0]);
314  key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
315  while ((nbytes = walk.nbytes)) {
316  unsigned int block_len = nbytes & AES_BLOCK_MASK;
317 
318  if (likely(block_len)) {
319  ctx->ops->cbc_decrypt(key_end,
320  (const u64 *) walk.src.virt.addr,
321  (u64 *) walk.dst.virt.addr,
322  block_len, (u64 *) walk.iv);
323  }
324  nbytes &= AES_BLOCK_SIZE - 1;
325  err = blkcipher_walk_done(desc, &walk, nbytes);
326  }
327  fprs_write(0);
328 
329  return err;
330 }
331 
332 static int ctr_crypt(struct blkcipher_desc *desc,
333  struct scatterlist *dst, struct scatterlist *src,
334  unsigned int nbytes)
335 {
336  struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
337  struct blkcipher_walk walk;
338  int err;
339 
340  blkcipher_walk_init(&walk, dst, src, nbytes);
341  err = blkcipher_walk_virt(desc, &walk);
342 
343  ctx->ops->load_encrypt_keys(&ctx->key[0]);
344  while ((nbytes = walk.nbytes)) {
345  unsigned int block_len = nbytes & AES_BLOCK_MASK;
346 
347  if (likely(block_len)) {
348  ctx->ops->ctr_crypt(&ctx->key[0],
349  (const u64 *)walk.src.virt.addr,
350  (u64 *) walk.dst.virt.addr,
351  block_len, (u64 *) walk.iv);
352  }
353  nbytes &= AES_BLOCK_SIZE - 1;
354  err = blkcipher_walk_done(desc, &walk, nbytes);
355  }
356  fprs_write(0);
357  return err;
358 }
359 
360 static struct crypto_alg algs[] = { {
361  .cra_name = "aes",
362  .cra_driver_name = "aes-sparc64",
363  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
364  .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
365  .cra_blocksize = AES_BLOCK_SIZE,
366  .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
367  .cra_alignmask = 3,
368  .cra_module = THIS_MODULE,
369  .cra_u = {
370  .cipher = {
371  .cia_min_keysize = AES_MIN_KEY_SIZE,
372  .cia_max_keysize = AES_MAX_KEY_SIZE,
373  .cia_setkey = aes_set_key,
374  .cia_encrypt = aes_encrypt,
375  .cia_decrypt = aes_decrypt
376  }
377  }
378 }, {
379  .cra_name = "ecb(aes)",
380  .cra_driver_name = "ecb-aes-sparc64",
381  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
382  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
383  .cra_blocksize = AES_BLOCK_SIZE,
384  .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
385  .cra_alignmask = 7,
386  .cra_type = &crypto_blkcipher_type,
387  .cra_module = THIS_MODULE,
388  .cra_u = {
389  .blkcipher = {
390  .min_keysize = AES_MIN_KEY_SIZE,
391  .max_keysize = AES_MAX_KEY_SIZE,
392  .setkey = aes_set_key,
393  .encrypt = ecb_encrypt,
394  .decrypt = ecb_decrypt,
395  },
396  },
397 }, {
398  .cra_name = "cbc(aes)",
399  .cra_driver_name = "cbc-aes-sparc64",
400  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
401  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
402  .cra_blocksize = AES_BLOCK_SIZE,
403  .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
404  .cra_alignmask = 7,
405  .cra_type = &crypto_blkcipher_type,
406  .cra_module = THIS_MODULE,
407  .cra_u = {
408  .blkcipher = {
409  .min_keysize = AES_MIN_KEY_SIZE,
410  .max_keysize = AES_MAX_KEY_SIZE,
411  .setkey = aes_set_key,
412  .encrypt = cbc_encrypt,
413  .decrypt = cbc_decrypt,
414  },
415  },
416 }, {
417  .cra_name = "ctr(aes)",
418  .cra_driver_name = "ctr-aes-sparc64",
419  .cra_priority = SPARC_CR_OPCODE_PRIORITY,
420  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
421  .cra_blocksize = AES_BLOCK_SIZE,
422  .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
423  .cra_alignmask = 7,
424  .cra_type = &crypto_blkcipher_type,
425  .cra_module = THIS_MODULE,
426  .cra_u = {
427  .blkcipher = {
428  .min_keysize = AES_MIN_KEY_SIZE,
429  .max_keysize = AES_MAX_KEY_SIZE,
430  .setkey = aes_set_key,
431  .encrypt = ctr_crypt,
432  .decrypt = ctr_crypt,
433  },
434  },
435 } };
436 
437 static bool __init sparc64_has_aes_opcode(void)
438 {
439  unsigned long cfr;
440 
442  return false;
443 
444  __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
445  if (!(cfr & CFR_AES))
446  return false;
447 
448  return true;
449 }
450 
451 static int __init aes_sparc64_mod_init(void)
452 {
453  int i;
454 
455  for (i = 0; i < ARRAY_SIZE(algs); i++)
456  INIT_LIST_HEAD(&algs[i].cra_list);
457 
458  if (sparc64_has_aes_opcode()) {
459  pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
460  return crypto_register_algs(algs, ARRAY_SIZE(algs));
461  }
462  pr_info("sparc64 aes opcodes not available.\n");
463  return -ENODEV;
464 }
465 
466 static void __exit aes_sparc64_mod_fini(void)
467 {
468  crypto_unregister_algs(algs, ARRAY_SIZE(algs));
469 }
470 
471 module_init(aes_sparc64_mod_init);
472 module_exit(aes_sparc64_mod_fini);
473 
474 MODULE_LICENSE("GPL");
475 MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");
476 
477 MODULE_ALIAS("aes");
478 
479 #include "crop_devid.c"