16 #include <linux/kernel.h>
17 #include <linux/rtnetlink.h>
21 #include <linux/module.h>
25 #include <crypto/aes.h>
26 #include <crypto/sha.h>
38 #define NPE_CTX_LEN 80
39 #define AES_BLOCK128 16
41 #define NPE_OP_HASH_VERIFY 0x01
42 #define NPE_OP_CCM_ENABLE 0x04
43 #define NPE_OP_CRYPT_ENABLE 0x08
44 #define NPE_OP_HASH_ENABLE 0x10
45 #define NPE_OP_NOT_IN_PLACE 0x20
46 #define NPE_OP_HMAC_DISABLE 0x40
47 #define NPE_OP_CRYPT_ENCRYPT 0x80
49 #define NPE_OP_CCM_GEN_MIC 0xcc
50 #define NPE_OP_HASH_GEN_ICV 0x50
51 #define NPE_OP_ENC_GEN_KEY 0xc9
53 #define MOD_ECB 0x0000
54 #define MOD_CTR 0x1000
55 #define MOD_CBC_ENC 0x2000
56 #define MOD_CBC_DEC 0x3000
57 #define MOD_CCM_ENC 0x4000
58 #define MOD_CCM_DEC 0x5000
64 #define CIPH_DECR 0x0000
65 #define CIPH_ENCR 0x0400
67 #define MOD_DES 0x0000
68 #define MOD_TDEA2 0x0100
69 #define MOD_3DES 0x0200
70 #define MOD_AES 0x0800
71 #define MOD_AES128 (0x0800 | KEYLEN_128)
72 #define MOD_AES192 (0x0900 | KEYLEN_192)
73 #define MOD_AES256 (0x0a00 | KEYLEN_256)
80 #define NPE_QLEN_TOTAL 64
85 #define CTL_FLAG_UNUSED 0x0000
86 #define CTL_FLAG_USED 0x1000
87 #define CTL_FLAG_PERFORM_ABLK 0x0001
88 #define CTL_FLAG_GEN_ICV 0x0002
89 #define CTL_FLAG_GEN_REVAES 0x0004
90 #define CTL_FLAG_PERFORM_AEAD 0x0008
91 #define CTL_FLAG_MASK 0x000f
93 #define HMAC_IPAD_VALUE 0x36
94 #define HMAC_OPAD_VALUE 0x5C
95 #define HMAC_PAD_BLOCKLEN SHA1_BLOCK_SIZE
97 #define MD5_DIGEST_SIZE 16
202 .cfgword = 0xAA010004,
203 .icv =
"\x01\x23\x45\x67\x89\xAB\xCD\xEF"
204 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
207 .cfgword = 0x00000005,
208 .icv =
"\x67\x45\x23\x01\xEF\xCD\xAB\x89\x98\xBA"
209 "\xDC\xFE\x10\x32\x54\x76\xC3\xD2\xE1\xF0",
212 static struct npe *npe_c;
219 static int support_aes = 1;
221 static void dev_release(
struct device *
dev)
226 #define DRIVER_NAME "ixp4xx_crypto"
233 .release = dev_release,
241 return crypt_phys + (virt - crypt_virt) *
sizeof(
struct crypt_ctl);
246 return crypt_virt + (phys - crypt_phys) /
sizeof(
struct crypt_ctl);
264 static int setup_crypt_desc(
void)
277 static struct crypt_ctl *get_crypt_desc(
void)
288 spin_unlock_irqrestore(&desc_lock, flags);
296 spin_unlock_irqrestore(&desc_lock, flags);
297 return crypt_virt +
i;
299 spin_unlock_irqrestore(&desc_lock, flags);
305 static struct crypt_ctl *get_crypt_desc_emerg(
void)
312 desc = get_crypt_desc();
324 spin_unlock_irqrestore(&emerg_lock, flags);
325 return crypt_virt +
i;
327 spin_unlock_irqrestore(&emerg_lock, flags);
349 static void finish_scattered_hmac(
struct crypt_ctl *crypt)
352 struct aead_ctx *req_ctx = aead_request_ctx(req);
354 int authsize = crypto_aead_authsize(tfm);
355 int decryptlen = req->
cryptlen - authsize;
359 req->
src, decryptlen, authsize, 1);
370 failed = phys & 0x1 ? -
EBADMSG : 0;
372 crypt = crypt_phys2virt(phys);
377 struct aead_ctx *req_ctx = aead_request_ctx(req);
381 finish_scattered_hmac(crypt);
383 req->
base.complete(&req->
base, failed);
388 struct ablk_ctx *req_ctx = ablkcipher_request_ctx(req);
391 free_buf_chain(dev, req_ctx->
dst, crypt->
dst_buf);
393 free_buf_chain(dev, req_ctx->
src, crypt->
src_buf);
394 req->
base.complete(&req->
base, failed);
398 ctx = crypto_tfm_ctx(crypt->
data.
tfm);
406 ctx = crypto_tfm_ctx(crypt->
data.
tfm);
417 static void irqhandler(
void *
_unused)
419 tasklet_schedule(&crypto_done_tasklet);
422 static void crypto_done_action(
unsigned long arg)
432 tasklet_schedule(&crypto_done_tasklet);
435 static int init_ixp_crypto(
void)
464 switch ((msg[1]>>16) & 0xff) {
495 "ixp_crypto:out",
NULL);
499 "ixp_crypto:in",
NULL);
505 tasklet_init(&crypto_done_tasklet, crypto_done_action, 0);
522 static void release_ixp_crypto(
void)
538 crypt_virt, crypt_phys);
543 static void reset_sa_dir(
struct ix_sa_dir *dir)
550 static int init_sa_dir(
struct ix_sa_dir *dir)
560 static void free_sa_dir(
struct ix_sa_dir *dir)
568 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
572 ret = init_sa_dir(&ctx->
encrypt);
575 ret = init_sa_dir(&ctx->
decrypt);
582 static int init_tfm_ablk(
struct crypto_tfm *tfm)
584 tfm->crt_ablkcipher.reqsize =
sizeof(
struct ablk_ctx);
585 return init_tfm(tfm);
588 static int init_tfm_aead(
struct crypto_tfm *tfm)
590 tfm->crt_aead.reqsize =
sizeof(
struct aead_ctx);
591 return init_tfm(tfm);
596 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
604 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
609 u32 pad_phys, buf_phys;
620 crypt = get_crypt_desc_emerg();
627 memcpy(pad, key, key_len);
652 qmgr_put_entry(
SEND_QID, crypt_virt2phys(crypt));
660 u32 itarget, otarget, npe_ctx_addr;
661 unsigned char *cinfo;
662 int init_len, ret = 0;
665 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
673 cfgword = algo->
cfgword | ( authsize << 6);
675 cfgword ^= 0xAA000000;
697 init_len, npe_ctx_addr, key, key_len);
701 init_len, npe_ctx_addr, key, key_len);
704 static int gen_rev_aes_key(
struct crypto_tfm *tfm)
707 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
710 crypt = get_crypt_desc_emerg();
727 qmgr_put_entry(
SEND_QID, crypt_virt2phys(crypt));
732 static int setup_cipher(
struct crypto_tfm *tfm,
int encrypt,
733 const u8 *key,
int key_len)
739 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
746 cipher_cfg = cipher_cfg_enc(tfm);
749 cipher_cfg = cipher_cfg_dec(tfm);
760 cipher_cfg |= keylen_cfg;
762 const u32 *
K = (
const u32 *)key;
763 if (
unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
764 !((K[2] ^ K[4]) | (K[3] ^ K[5]))))
777 cinfo +=
sizeof(cipher_cfg);
780 memcpy(cinfo, key, key_len);
788 if ((cipher_cfg & MOD_AES) && !
encrypt) {
789 return gen_rev_aes_key(tfm);
799 for (;nbytes > 0; sg = scatterwalk_sg_next(sg)) {
827 unsigned int key_len)
829 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
830 u32 *flags = &tfm->
base.crt_flags;
842 ret = setup_cipher(&tfm->
base, 0, key, key_len);
845 ret = setup_cipher(&tfm->
base, 1, key, key_len);
853 *flags &= ~CRYPTO_TFM_RES_WEAK_KEY;
863 unsigned int key_len)
865 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
875 return ablk_setkey(tfm, key, key_len);
881 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
882 unsigned ivsize = crypto_ablkcipher_ivsize(tfm);
885 unsigned int nbytes = req->
nbytes;
887 struct ablk_ctx *req_ctx = ablkcipher_request_ctx(req);
899 crypt = get_crypt_desc();
913 if (req->
src != req->
dst) {
920 if (!chainup_buffers(dev, req->
dst, nbytes, &dst_hook,
924 req_ctx->
dst = dst_hook.next;
925 crypt->
dst_buf = dst_hook.phys_next;
930 if (!chainup_buffers(dev, req->
src, nbytes, &src_hook,
931 flags, src_direction))
934 req_ctx->
src = src_hook.next;
935 crypt->
src_buf = src_hook.phys_next;
937 qmgr_put_entry(
SEND_QID, crypt_virt2phys(crypt));
942 free_buf_chain(dev, req_ctx->
src, crypt->
src_buf);
944 if (req->
src != req->
dst) {
945 free_buf_chain(dev, req_ctx->
dst, crypt->
dst_buf);
953 return ablk_perform(req, 1);
958 return ablk_perform(req, 0);
964 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
978 ret = ablk_perform(req, 1);
992 if (start < offset + sg->
length)
996 sg = scatterwalk_sg_next(sg);
998 return (start + nbytes > offset + sg->
length);
1001 static int aead_perform(
struct aead_request *req,
int encrypt,
1002 int cryptoffset,
int eff_cryptlen,
u8 *iv)
1004 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1006 unsigned ivsize = crypto_aead_ivsize(tfm);
1007 unsigned authsize = crypto_aead_authsize(tfm);
1010 unsigned int cryptlen;
1012 struct aead_ctx *req_ctx = aead_request_ctx(req);
1027 cryptlen = req->
cryptlen -authsize;
1028 eff_cryptlen -= authsize;
1030 crypt = get_crypt_desc();
1047 if (req->
src != req->
dst) {
1052 buf = chainup_buffers(dev, req->
assoc, req->
assoclen, &src_hook,
1060 sg_set_buf(&req_ctx->
ivlist, iv, ivsize);
1061 buf = chainup_buffers(dev, &req_ctx->
ivlist, ivsize, buf, flags,
1065 if (
unlikely(hmac_inconsistent(req->
src, cryptlen, authsize))) {
1074 req->
src, cryptlen, authsize, 0);
1081 buf = chainup_buffers(dev, req->
src, cryptlen + authsize, buf, flags,
1084 goto free_hmac_virt;
1090 qmgr_put_entry(
SEND_QID, crypt_virt2phys(crypt));
1105 static int aead_setup(
struct crypto_aead *tfm,
unsigned int authsize)
1107 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1108 u32 *flags = &tfm->
base.crt_flags;
1109 unsigned digest_len = crypto_aead_alg(tfm)->maxauthsize;
1126 ret = setup_auth(&tfm->
base, 0, authsize, ctx->
authkey,
1130 ret = setup_auth(&tfm->
base, 1, authsize, ctx->
authkey,
1135 if (*flags & CRYPTO_TFM_RES_WEAK_KEY) {
1140 *flags &= ~CRYPTO_TFM_RES_WEAK_KEY;
1149 static int aead_setauthsize(
struct crypto_aead *tfm,
unsigned int authsize)
1151 int max = crypto_aead_alg(tfm)->maxauthsize >> 2;
1153 if ((authsize>>2) < 1 || (authsize>>2) > max || (authsize & 3))
1155 return aead_setup(tfm, authsize);
1158 static int aead_setkey(
struct crypto_aead *tfm,
const u8 *key,
1159 unsigned int keylen)
1161 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1165 if (!
RTA_OK(rta, keylen))
1178 if (keylen < ctx->enckey_len)
1185 return aead_setup(tfm, crypto_aead_authsize(tfm));
1194 unsigned ivsize = crypto_aead_ivsize(crypto_aead_reqtfm(req));
1195 return aead_perform(req, 1, req->
assoclen + ivsize,
1201 unsigned ivsize = crypto_aead_ivsize(crypto_aead_reqtfm(req));
1202 return aead_perform(req, 0, req->
assoclen + ivsize,
1208 struct crypto_aead *tfm = aead_givcrypt_reqtfm(req);
1209 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1210 unsigned len, ivsize = crypto_aead_ivsize(tfm);
1220 if (ivsize >
sizeof(
u64)) {
1225 memcpy(req->
giv + ivsize - len, &seq, len);
1226 return aead_perform(&req->
areq, 1, req->
areq.assoclen,
1227 req->
areq.cryptlen +ivsize, req->
giv);
1230 static struct ixp_alg ixp4xx_algos[] = {
1233 .cra_name =
"cbc(des)",
1235 .cra_u = { .ablkcipher = {
1248 .cra_name =
"ecb(des)",
1250 .cra_u = { .ablkcipher = {
1260 .cra_name =
"cbc(des3_ede)",
1262 .cra_u = { .ablkcipher = {
1274 .cra_name =
"ecb(des3_ede)",
1276 .cra_u = { .ablkcipher = {
1286 .cra_name =
"cbc(aes)",
1288 .cra_u = { .ablkcipher = {
1300 .cra_name =
"ecb(aes)",
1302 .cra_u = { .ablkcipher = {
1312 .cra_name =
"ctr(aes)",
1314 .cra_u = { .ablkcipher = {
1326 .cra_name =
"rfc3686(ctr(aes))",
1328 .cra_u = { .ablkcipher = {
1333 .setkey = ablk_rfc3686_setkey,
1334 .encrypt = ablk_rfc3686_crypt,
1335 .decrypt = ablk_rfc3686_crypt }
1342 .cra_name =
"authenc(hmac(md5),cbc(des))",
1344 .cra_u = { .aead = {
1350 .hash = &hash_alg_md5,
1355 .cra_name =
"authenc(hmac(md5),cbc(des3_ede))",
1357 .cra_u = { .aead = {
1363 .hash = &hash_alg_md5,
1368 .cra_name =
"authenc(hmac(sha1),cbc(des))",
1370 .cra_u = { .aead = {
1376 .hash = &hash_alg_sha1,
1381 .cra_name =
"authenc(hmac(sha1),cbc(des3_ede))",
1383 .cra_u = { .aead = {
1389 .hash = &hash_alg_sha1,
1394 .cra_name =
"authenc(hmac(md5),cbc(aes))",
1396 .cra_u = { .aead = {
1402 .hash = &hash_alg_md5,
1407 .cra_name =
"authenc(hmac(sha1),cbc(aes))",
1409 .cra_u = { .aead = {
1415 .hash = &hash_alg_sha1,
1420 #define IXP_POSTFIX "-ixp4xx"
1421 static int __init ixp_module_init(
void)
1432 err = init_ixp_crypto();
1437 for (i=0; i< num; i++) {
1446 if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES)) {
1449 if (!ixp4xx_algos[i].
hash) {
1455 if (!cra->cra_ablkcipher.setkey)
1456 cra->cra_ablkcipher.setkey = ablk_setkey;
1457 if (!cra->cra_ablkcipher.encrypt)
1459 if (!cra->cra_ablkcipher.decrypt)
1468 cra->cra_aead.setkey = aead_setkey;
1469 cra->cra_aead.setauthsize = aead_setauthsize;
1470 cra->cra_aead.encrypt = aead_encrypt;
1471 cra->cra_aead.decrypt = aead_decrypt;
1472 cra->cra_aead.givencrypt = aead_givencrypt;
1489 static void __exit ixp_module_exit(
void)
1494 for (i=0; i< num; i++) {
1495 if (ixp4xx_algos[i].registered)
1498 release_ixp_crypto();