Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
des_s390.c
Go to the documentation of this file.
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the DES Cipher Algorithm.
5  *
6  * Copyright IBM Corp. 2003, 2011
7  * Author(s): Thomas Spatzier
8  * Jan Glauber ([email protected])
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation; either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  */
16 
17 #include <linux/init.h>
18 #include <linux/module.h>
19 #include <linux/crypto.h>
20 #include <crypto/algapi.h>
21 #include <crypto/des.h>
22 
23 #include "crypt_s390.h"
24 
25 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
26 
27 static u8 *ctrblk;
28 
29 struct s390_des_ctx {
32 };
33 
34 static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
35  unsigned int key_len)
36 {
37  struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
38  u32 *flags = &tfm->crt_flags;
40 
41  /* check for weak keys */
42  if (!des_ekey(tmp, key) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
43  *flags |= CRYPTO_TFM_RES_WEAK_KEY;
44  return -EINVAL;
45  }
46 
47  memcpy(ctx->key, key, key_len);
48  return 0;
49 }
50 
51 static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
52 {
53  struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
54 
55  crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
56 }
57 
58 static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
59 {
60  struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
61 
62  crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
63 }
64 
65 static struct crypto_alg des_alg = {
66  .cra_name = "des",
67  .cra_driver_name = "des-s390",
68  .cra_priority = CRYPT_S390_PRIORITY,
69  .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
70  .cra_blocksize = DES_BLOCK_SIZE,
71  .cra_ctxsize = sizeof(struct s390_des_ctx),
72  .cra_module = THIS_MODULE,
73  .cra_u = {
74  .cipher = {
75  .cia_min_keysize = DES_KEY_SIZE,
76  .cia_max_keysize = DES_KEY_SIZE,
77  .cia_setkey = des_setkey,
78  .cia_encrypt = des_encrypt,
79  .cia_decrypt = des_decrypt,
80  }
81  }
82 };
83 
84 static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
85  u8 *key, struct blkcipher_walk *walk)
86 {
87  int ret = blkcipher_walk_virt(desc, walk);
88  unsigned int nbytes;
89 
90  while ((nbytes = walk->nbytes)) {
91  /* only use complete blocks */
92  unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
93  u8 *out = walk->dst.virt.addr;
94  u8 *in = walk->src.virt.addr;
95 
96  ret = crypt_s390_km(func, key, out, in, n);
97  BUG_ON((ret < 0) || (ret != n));
98 
99  nbytes &= DES_BLOCK_SIZE - 1;
100  ret = blkcipher_walk_done(desc, walk, nbytes);
101  }
102 
103  return ret;
104 }
105 
106 static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
107  u8 *iv, struct blkcipher_walk *walk)
108 {
109  int ret = blkcipher_walk_virt(desc, walk);
110  unsigned int nbytes = walk->nbytes;
111 
112  if (!nbytes)
113  goto out;
114 
115  memcpy(iv, walk->iv, DES_BLOCK_SIZE);
116  do {
117  /* only use complete blocks */
118  unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
119  u8 *out = walk->dst.virt.addr;
120  u8 *in = walk->src.virt.addr;
121 
122  ret = crypt_s390_kmc(func, iv, out, in, n);
123  BUG_ON((ret < 0) || (ret != n));
124 
125  nbytes &= DES_BLOCK_SIZE - 1;
126  ret = blkcipher_walk_done(desc, walk, nbytes);
127  } while ((nbytes = walk->nbytes));
128  memcpy(walk->iv, iv, DES_BLOCK_SIZE);
129 
130 out:
131  return ret;
132 }
133 
134 static int ecb_des_encrypt(struct blkcipher_desc *desc,
135  struct scatterlist *dst, struct scatterlist *src,
136  unsigned int nbytes)
137 {
138  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
139  struct blkcipher_walk walk;
140 
141  blkcipher_walk_init(&walk, dst, src, nbytes);
142  return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk);
143 }
144 
145 static int ecb_des_decrypt(struct blkcipher_desc *desc,
146  struct scatterlist *dst, struct scatterlist *src,
147  unsigned int nbytes)
148 {
149  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
150  struct blkcipher_walk walk;
151 
152  blkcipher_walk_init(&walk, dst, src, nbytes);
153  return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk);
154 }
155 
156 static struct crypto_alg ecb_des_alg = {
157  .cra_name = "ecb(des)",
158  .cra_driver_name = "ecb-des-s390",
159  .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
160  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
161  .cra_blocksize = DES_BLOCK_SIZE,
162  .cra_ctxsize = sizeof(struct s390_des_ctx),
163  .cra_type = &crypto_blkcipher_type,
164  .cra_module = THIS_MODULE,
165  .cra_u = {
166  .blkcipher = {
167  .min_keysize = DES_KEY_SIZE,
168  .max_keysize = DES_KEY_SIZE,
169  .setkey = des_setkey,
170  .encrypt = ecb_des_encrypt,
171  .decrypt = ecb_des_decrypt,
172  }
173  }
174 };
175 
176 static int cbc_des_encrypt(struct blkcipher_desc *desc,
177  struct scatterlist *dst, struct scatterlist *src,
178  unsigned int nbytes)
179 {
180  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
181  struct blkcipher_walk walk;
182 
183  blkcipher_walk_init(&walk, dst, src, nbytes);
184  return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, ctx->iv, &walk);
185 }
186 
187 static int cbc_des_decrypt(struct blkcipher_desc *desc,
188  struct scatterlist *dst, struct scatterlist *src,
189  unsigned int nbytes)
190 {
191  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
192  struct blkcipher_walk walk;
193 
194  blkcipher_walk_init(&walk, dst, src, nbytes);
195  return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, ctx->iv, &walk);
196 }
197 
198 static struct crypto_alg cbc_des_alg = {
199  .cra_name = "cbc(des)",
200  .cra_driver_name = "cbc-des-s390",
201  .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
202  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
203  .cra_blocksize = DES_BLOCK_SIZE,
204  .cra_ctxsize = sizeof(struct s390_des_ctx),
205  .cra_type = &crypto_blkcipher_type,
206  .cra_module = THIS_MODULE,
207  .cra_u = {
208  .blkcipher = {
209  .min_keysize = DES_KEY_SIZE,
210  .max_keysize = DES_KEY_SIZE,
211  .ivsize = DES_BLOCK_SIZE,
212  .setkey = des_setkey,
213  .encrypt = cbc_des_encrypt,
214  .decrypt = cbc_des_decrypt,
215  }
216  }
217 };
218 
219 /*
220  * RFC2451:
221  *
222  * For DES-EDE3, there is no known need to reject weak or
223  * complementation keys. Any weakness is obviated by the use of
224  * multiple keys.
225  *
226  * However, if the first two or last two independent 64-bit keys are
227  * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
228  * same as DES. Implementers MUST reject keys that exhibit this
229  * property.
230  *
231  */
232 static int des3_setkey(struct crypto_tfm *tfm, const u8 *key,
233  unsigned int key_len)
234 {
235  struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
236  u32 *flags = &tfm->crt_flags;
237 
238  if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
239  memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
240  DES_KEY_SIZE)) &&
241  (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
242  *flags |= CRYPTO_TFM_RES_WEAK_KEY;
243  return -EINVAL;
244  }
245  memcpy(ctx->key, key, key_len);
246  return 0;
247 }
248 
249 static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
250 {
251  struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
252 
253  crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
254 }
255 
256 static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
257 {
258  struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
259 
260  crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
261 }
262 
263 static struct crypto_alg des3_alg = {
264  .cra_name = "des3_ede",
265  .cra_driver_name = "des3_ede-s390",
266  .cra_priority = CRYPT_S390_PRIORITY,
267  .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
268  .cra_blocksize = DES_BLOCK_SIZE,
269  .cra_ctxsize = sizeof(struct s390_des_ctx),
270  .cra_module = THIS_MODULE,
271  .cra_u = {
272  .cipher = {
273  .cia_min_keysize = DES3_KEY_SIZE,
274  .cia_max_keysize = DES3_KEY_SIZE,
275  .cia_setkey = des3_setkey,
276  .cia_encrypt = des3_encrypt,
277  .cia_decrypt = des3_decrypt,
278  }
279  }
280 };
281 
282 static int ecb_des3_encrypt(struct blkcipher_desc *desc,
283  struct scatterlist *dst, struct scatterlist *src,
284  unsigned int nbytes)
285 {
286  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
287  struct blkcipher_walk walk;
288 
289  blkcipher_walk_init(&walk, dst, src, nbytes);
290  return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk);
291 }
292 
293 static int ecb_des3_decrypt(struct blkcipher_desc *desc,
294  struct scatterlist *dst, struct scatterlist *src,
295  unsigned int nbytes)
296 {
297  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
298  struct blkcipher_walk walk;
299 
300  blkcipher_walk_init(&walk, dst, src, nbytes);
301  return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk);
302 }
303 
304 static struct crypto_alg ecb_des3_alg = {
305  .cra_name = "ecb(des3_ede)",
306  .cra_driver_name = "ecb-des3_ede-s390",
307  .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
308  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
309  .cra_blocksize = DES_BLOCK_SIZE,
310  .cra_ctxsize = sizeof(struct s390_des_ctx),
311  .cra_type = &crypto_blkcipher_type,
312  .cra_module = THIS_MODULE,
313  .cra_u = {
314  .blkcipher = {
315  .min_keysize = DES3_KEY_SIZE,
316  .max_keysize = DES3_KEY_SIZE,
317  .setkey = des3_setkey,
318  .encrypt = ecb_des3_encrypt,
319  .decrypt = ecb_des3_decrypt,
320  }
321  }
322 };
323 
324 static int cbc_des3_encrypt(struct blkcipher_desc *desc,
325  struct scatterlist *dst, struct scatterlist *src,
326  unsigned int nbytes)
327 {
328  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
329  struct blkcipher_walk walk;
330 
331  blkcipher_walk_init(&walk, dst, src, nbytes);
332  return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, ctx->iv, &walk);
333 }
334 
335 static int cbc_des3_decrypt(struct blkcipher_desc *desc,
336  struct scatterlist *dst, struct scatterlist *src,
337  unsigned int nbytes)
338 {
339  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
340  struct blkcipher_walk walk;
341 
342  blkcipher_walk_init(&walk, dst, src, nbytes);
343  return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, ctx->iv, &walk);
344 }
345 
346 static struct crypto_alg cbc_des3_alg = {
347  .cra_name = "cbc(des3_ede)",
348  .cra_driver_name = "cbc-des3_ede-s390",
349  .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
350  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
351  .cra_blocksize = DES_BLOCK_SIZE,
352  .cra_ctxsize = sizeof(struct s390_des_ctx),
353  .cra_type = &crypto_blkcipher_type,
354  .cra_module = THIS_MODULE,
355  .cra_u = {
356  .blkcipher = {
357  .min_keysize = DES3_KEY_SIZE,
358  .max_keysize = DES3_KEY_SIZE,
359  .ivsize = DES_BLOCK_SIZE,
360  .setkey = des3_setkey,
361  .encrypt = cbc_des3_encrypt,
362  .decrypt = cbc_des3_decrypt,
363  }
364  }
365 };
366 
367 static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
368  struct s390_des_ctx *ctx, struct blkcipher_walk *walk)
369 {
370  int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
371  unsigned int i, n, nbytes;
373  u8 *out, *in;
374 
375  memcpy(ctrblk, walk->iv, DES_BLOCK_SIZE);
376  while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
377  out = walk->dst.virt.addr;
378  in = walk->src.virt.addr;
379  while (nbytes >= DES_BLOCK_SIZE) {
380  /* align to block size, max. PAGE_SIZE */
381  n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
382  nbytes & ~(DES_BLOCK_SIZE - 1);
383  for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) {
384  memcpy(ctrblk + i, ctrblk + i - DES_BLOCK_SIZE,
386  crypto_inc(ctrblk + i, DES_BLOCK_SIZE);
387  }
388  ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk);
389  BUG_ON((ret < 0) || (ret != n));
390  if (n > DES_BLOCK_SIZE)
391  memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE,
393  crypto_inc(ctrblk, DES_BLOCK_SIZE);
394  out += n;
395  in += n;
396  nbytes -= n;
397  }
398  ret = blkcipher_walk_done(desc, walk, nbytes);
399  }
400 
401  /* final block may be < DES_BLOCK_SIZE, copy only nbytes */
402  if (nbytes) {
403  out = walk->dst.virt.addr;
404  in = walk->src.virt.addr;
405  ret = crypt_s390_kmctr(func, ctx->key, buf, in,
406  DES_BLOCK_SIZE, ctrblk);
407  BUG_ON(ret < 0 || ret != DES_BLOCK_SIZE);
408  memcpy(out, buf, nbytes);
409  crypto_inc(ctrblk, DES_BLOCK_SIZE);
410  ret = blkcipher_walk_done(desc, walk, 0);
411  }
412  memcpy(walk->iv, ctrblk, DES_BLOCK_SIZE);
413  return ret;
414 }
415 
416 static int ctr_des_encrypt(struct blkcipher_desc *desc,
417  struct scatterlist *dst, struct scatterlist *src,
418  unsigned int nbytes)
419 {
420  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
421  struct blkcipher_walk walk;
422 
423  blkcipher_walk_init(&walk, dst, src, nbytes);
424  return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk);
425 }
426 
427 static int ctr_des_decrypt(struct blkcipher_desc *desc,
428  struct scatterlist *dst, struct scatterlist *src,
429  unsigned int nbytes)
430 {
431  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
432  struct blkcipher_walk walk;
433 
434  blkcipher_walk_init(&walk, dst, src, nbytes);
435  return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk);
436 }
437 
438 static struct crypto_alg ctr_des_alg = {
439  .cra_name = "ctr(des)",
440  .cra_driver_name = "ctr-des-s390",
441  .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
442  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
443  .cra_blocksize = 1,
444  .cra_ctxsize = sizeof(struct s390_des_ctx),
445  .cra_type = &crypto_blkcipher_type,
446  .cra_module = THIS_MODULE,
447  .cra_u = {
448  .blkcipher = {
449  .min_keysize = DES_KEY_SIZE,
450  .max_keysize = DES_KEY_SIZE,
451  .ivsize = DES_BLOCK_SIZE,
452  .setkey = des_setkey,
453  .encrypt = ctr_des_encrypt,
454  .decrypt = ctr_des_decrypt,
455  }
456  }
457 };
458 
459 static int ctr_des3_encrypt(struct blkcipher_desc *desc,
460  struct scatterlist *dst, struct scatterlist *src,
461  unsigned int nbytes)
462 {
463  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
464  struct blkcipher_walk walk;
465 
466  blkcipher_walk_init(&walk, dst, src, nbytes);
467  return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk);
468 }
469 
470 static int ctr_des3_decrypt(struct blkcipher_desc *desc,
471  struct scatterlist *dst, struct scatterlist *src,
472  unsigned int nbytes)
473 {
474  struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
475  struct blkcipher_walk walk;
476 
477  blkcipher_walk_init(&walk, dst, src, nbytes);
478  return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk);
479 }
480 
481 static struct crypto_alg ctr_des3_alg = {
482  .cra_name = "ctr(des3_ede)",
483  .cra_driver_name = "ctr-des3_ede-s390",
484  .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
485  .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
486  .cra_blocksize = 1,
487  .cra_ctxsize = sizeof(struct s390_des_ctx),
488  .cra_type = &crypto_blkcipher_type,
489  .cra_module = THIS_MODULE,
490  .cra_u = {
491  .blkcipher = {
492  .min_keysize = DES3_KEY_SIZE,
493  .max_keysize = DES3_KEY_SIZE,
494  .ivsize = DES_BLOCK_SIZE,
495  .setkey = des3_setkey,
496  .encrypt = ctr_des3_encrypt,
497  .decrypt = ctr_des3_decrypt,
498  }
499  }
500 };
501 
502 static int __init des_s390_init(void)
503 {
504  int ret;
505 
506  if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) ||
507  !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA))
508  return -EOPNOTSUPP;
509 
510  ret = crypto_register_alg(&des_alg);
511  if (ret)
512  goto des_err;
513  ret = crypto_register_alg(&ecb_des_alg);
514  if (ret)
515  goto ecb_des_err;
516  ret = crypto_register_alg(&cbc_des_alg);
517  if (ret)
518  goto cbc_des_err;
519  ret = crypto_register_alg(&des3_alg);
520  if (ret)
521  goto des3_err;
522  ret = crypto_register_alg(&ecb_des3_alg);
523  if (ret)
524  goto ecb_des3_err;
525  ret = crypto_register_alg(&cbc_des3_alg);
526  if (ret)
527  goto cbc_des3_err;
528 
529  if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
531  crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
533  ret = crypto_register_alg(&ctr_des_alg);
534  if (ret)
535  goto ctr_des_err;
536  ret = crypto_register_alg(&ctr_des3_alg);
537  if (ret)
538  goto ctr_des3_err;
539  ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
540  if (!ctrblk) {
541  ret = -ENOMEM;
542  goto ctr_mem_err;
543  }
544  }
545 out:
546  return ret;
547 
548 ctr_mem_err:
549  crypto_unregister_alg(&ctr_des3_alg);
550 ctr_des3_err:
551  crypto_unregister_alg(&ctr_des_alg);
552 ctr_des_err:
553  crypto_unregister_alg(&cbc_des3_alg);
554 cbc_des3_err:
555  crypto_unregister_alg(&ecb_des3_alg);
556 ecb_des3_err:
557  crypto_unregister_alg(&des3_alg);
558 des3_err:
559  crypto_unregister_alg(&cbc_des_alg);
560 cbc_des_err:
561  crypto_unregister_alg(&ecb_des_alg);
562 ecb_des_err:
563  crypto_unregister_alg(&des_alg);
564 des_err:
565  goto out;
566 }
567 
568 static void __exit des_s390_exit(void)
569 {
570  if (ctrblk) {
571  crypto_unregister_alg(&ctr_des_alg);
572  crypto_unregister_alg(&ctr_des3_alg);
573  free_page((unsigned long) ctrblk);
574  }
575  crypto_unregister_alg(&cbc_des3_alg);
576  crypto_unregister_alg(&ecb_des3_alg);
577  crypto_unregister_alg(&des3_alg);
578  crypto_unregister_alg(&cbc_des_alg);
579  crypto_unregister_alg(&ecb_des_alg);
580  crypto_unregister_alg(&des_alg);
581 }
582 
583 module_init(des_s390_init);
584 module_exit(des_s390_exit);
585 
586 MODULE_ALIAS("des");
587 MODULE_ALIAS("des3_ede");
588 
589 MODULE_LICENSE("GPL");
590 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");