60 #define CAAM_CRA_PRIORITY 3000
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 SHA512_DIGEST_SIZE * 2)
65 #define CAAM_MAX_IV_LENGTH 16
68 #define DESC_JOB_IO_LEN (CAAM_CMD_SZ * 5 + CAAM_PTR_SZ * 3)
70 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
71 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 16 * CAAM_CMD_SZ)
72 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 21 * CAAM_CMD_SZ)
73 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 7 * CAAM_CMD_SZ)
75 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
76 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
78 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
81 #define DESC_MAX_USED_BYTES (DESC_AEAD_GIVENC_LEN + \
83 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
87 #define xstr(s) str(s)
89 #define debug(format, arg...) printk(format, arg)
91 #define debug(format, arg...)
97 u32 *jump_cmd, *uncond_jump_cmd;
103 set_jump_tgt_here(desc, jump_cmd);
106 set_jump_tgt_here(desc, uncond_jump_cmd);
113 static inline void append_dec_shr_done(
u32 *
desc)
118 set_jump_tgt_here(desc, jump_cmd);
136 static inline void aead_append_ld_iv(
u32 *
desc,
int ivsize)
147 static inline void ablkcipher_append_src_dst(
u32 *
desc)
160 #define GIV_SRC_CONTIG 1
161 #define GIV_DST_CONTIG (1 << 1)
188 if (keys_fit_inline) {
192 append_key_as_imm(desc, (
void *)ctx->
key +
214 append_key_aead(desc, ctx, keys_fit_inline);
216 set_jump_tgt_here(desc, key_jump_cmd);
222 static int aead_set_sh_desc(
struct crypto_aead *aead)
227 bool keys_fit_inline =
false;
228 u32 *key_jump_cmd, *jump_cmd;
242 keys_fit_inline =
true;
247 init_sh_desc_key_aead(desc, ctx, keys_fit_inline);
265 aead_append_ld_iv(desc, tfm->
ivsize);
284 dev_err(jrdev,
"unable to map shared descriptor\n");
288 print_hex_dump(
KERN_ERR,
"aead enc shdesc@"xstr(__LINE__)
": ",
290 desc_bytes(desc), 1);
300 keys_fit_inline =
true;
311 append_key_aead(desc, ctx, keys_fit_inline);
315 set_jump_tgt_here(desc, key_jump_cmd);
317 set_jump_tgt_here(desc, jump_cmd);
334 aead_append_ld_iv(desc, tfm->ivsize);
336 append_dec_op1(desc, ctx->class1_alg_type);
346 append_dec_shr_done(desc);
352 dev_err(jrdev,
"unable to map shared descriptor\n");
356 print_hex_dump(
KERN_ERR,
"aead dec shdesc@"xstr(__LINE__)
": ",
358 desc_bytes(desc), 1);
368 keys_fit_inline =
true;
373 init_sh_desc_key_aead(desc, ctx, keys_fit_inline);
420 append_seq_fifo_load(desc, tfm->
ivsize,
435 dev_err(jrdev,
"unable to map shared descriptor\n");
439 print_hex_dump(
KERN_ERR,
"aead givenc shdesc@"xstr(__LINE__)
": ",
441 desc_bytes(desc), 1);
447 static int aead_setauthsize(
struct crypto_aead *authenc,
448 unsigned int authsize)
450 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
453 aead_set_sh_desc(authenc);
458 static u32 gen_split_aead_key(
struct caam_ctx *ctx,
const u8 *key_in,
467 const u8 *
key,
unsigned int keylen)
470 static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
471 struct caam_ctx *ctx = crypto_aead_ctx(aead);
473 struct rtattr *rta = (
void *)key;
475 unsigned int authkeylen;
485 if (keylen < enckeylen)
500 keylen, enckeylen, authkeylen);
507 ret = gen_split_aead_key(ctx, key, authkeylen);
518 dev_err(jrdev,
"unable to map key i/o memory\n");
529 ret = aead_set_sh_desc(aead);
542 const u8 *key,
unsigned int keylen)
544 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
548 u32 *key_jump_cmd, *jump_cmd;
560 dev_err(jrdev,
"unable to map key i/o memory\n");
573 append_key_as_imm(desc, (
void *)ctx->
key, ctx->
enckeylen,
577 set_jump_tgt_here(desc, key_jump_cmd);
591 ablkcipher_append_src_dst(desc);
597 dev_err(jrdev,
"unable to map shared descriptor\n");
601 print_hex_dump(
KERN_ERR,
"ablkcipher enc shdesc@"xstr(__LINE__)
": ",
603 desc_bytes(desc), 1);
614 append_key_as_imm(desc, (
void *)ctx->
key, ctx->
enckeylen,
620 set_jump_tgt_here(desc, key_jump_cmd);
622 set_jump_tgt_here(desc, jump_cmd);
632 ablkcipher_append_src_dst(desc);
635 append_dec_shr_done(desc);
641 dev_err(jrdev,
"unable to map shared descriptor\n");
646 print_hex_dump(
KERN_ERR,
"ablkcipher dec shdesc@"xstr(__LINE__)
": ",
648 desc_bytes(desc), 1);
708 bool src_chained,
int dst_nents,
bool dst_chained,
713 dma_unmap_sg_chained(dev, src, src_nents ? : 1,
DMA_TO_DEVICE,
718 dma_unmap_sg_chained(dev, src, src_nents ? : 1,
729 static void aead_unmap(
struct device *dev,
733 struct crypto_aead *aead = crypto_aead_reqtfm(req);
734 int ivsize = crypto_aead_ivsize(aead);
739 caam_unmap(dev, req->
src, req->
dst,
745 static void ablkcipher_unmap(
struct device *dev,
750 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
752 caam_unmap(dev, req->
src, req->
dst,
758 static void aead_encrypt_done(
struct device *jrdev,
u32 *desc,
u32 err,
764 struct crypto_aead *aead = crypto_aead_reqtfm(req);
765 struct caam_ctx *ctx = crypto_aead_ctx(aead);
766 int ivsize = crypto_aead_ivsize(aead);
768 dev_err(jrdev,
"%s %d: err 0x%x\n", __func__, __LINE__, err);
780 aead_unmap(jrdev, edesc, req);
797 aead_request_complete(req, err);
800 static void aead_decrypt_done(
struct device *jrdev,
u32 *desc,
u32 err,
806 struct crypto_aead *aead = crypto_aead_reqtfm(req);
807 struct caam_ctx *ctx = crypto_aead_ctx(aead);
808 int ivsize = crypto_aead_ivsize(aead);
810 dev_err(jrdev,
"%s %d: err 0x%x\n", __func__, __LINE__, err);
831 aead_unmap(jrdev, edesc, req);
842 ((
char *)sg_virt(req->
assoc) -
sizeof(
struct iphdr)),
848 print_hex_dump(
KERN_ERR,
"sglastout@"xstr(__LINE__)
": ",
856 aead_request_complete(req, err);
859 static void ablkcipher_encrypt_done(
struct device *jrdev,
u32 *desc,
u32 err,
866 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
868 dev_err(jrdev,
"%s %d: err 0x%x\n", __func__, __LINE__, err);
889 ablkcipher_unmap(jrdev, edesc, req);
892 ablkcipher_request_complete(req, err);
895 static void ablkcipher_decrypt_done(
struct device *jrdev,
u32 *desc,
u32 err,
902 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
904 dev_err(jrdev,
"%s %d: err 0x%x\n", __func__, __LINE__, err);
924 ablkcipher_unmap(jrdev, edesc, req);
927 ablkcipher_request_complete(req, err);
938 struct crypto_aead *aead = crypto_aead_reqtfm(req);
939 struct caam_ctx *ctx = crypto_aead_ctx(aead);
940 int ivsize = crypto_aead_ivsize(aead);
943 u32 out_options = 0, in_options;
945 int len, sec4_sg_index = 0;
948 debug(
"assoclen %d cryptlen %d authsize %d\n",
961 desc_bytes(sh_desc), 1);
964 len = desc_len(sh_desc);
977 append_seq_in_ptr(desc, src_dma, req->
assoclen + ivsize +
978 req->
cryptlen - authsize, in_options);
980 append_seq_in_ptr(desc, src_dma, req->
assoclen + ivsize +
1002 append_seq_out_ptr(desc, dst_dma, req->
cryptlen, out_options);
1004 append_seq_out_ptr(desc, dst_dma, req->
cryptlen - authsize,
1016 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1017 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1018 int ivsize = crypto_aead_ivsize(aead);
1021 u32 out_options = 0, in_options;
1023 int len, sec4_sg_index = 0;
1026 debug(
"assoclen %d cryptlen %d authsize %d\n",
1038 desc_bytes(sh_desc), 1);
1041 len = desc_len(sh_desc);
1052 append_seq_in_ptr(desc, src_dma, req->
assoclen + ivsize +
1053 req->
cryptlen - authsize, in_options);
1070 append_seq_out_ptr(desc, dst_dma, ivsize + req->
cryptlen, out_options);
1076 static void init_ablkcipher_job(
u32 *sh_desc,
dma_addr_t ptr,
1082 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1084 u32 out_options = 0, in_options;
1086 int len, sec4_sg_index = 0;
1097 len = desc_len(sh_desc);
1105 sec4_sg_index += (iv_contig ? 0 : 1) + edesc->
src_nents;
1108 append_seq_in_ptr(desc, src_dma, req->
nbytes + ivsize, in_options);
1127 append_seq_out_ptr(desc, dst_dma, req->
nbytes, out_options);
1134 int desc_bytes,
bool *all_contig_ptr)
1136 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1137 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1141 int assoc_nents, src_nents, dst_nents = 0;
1145 bool all_contig =
true;
1146 bool assoc_chained =
false, src_chained =
false, dst_chained =
false;
1147 int ivsize = crypto_aead_ivsize(aead);
1156 sgc = dma_map_sg_chained(jrdev, req->
assoc, assoc_nents ? : 1,
1159 sgc = dma_map_sg_chained(jrdev, req->
src, src_nents ? : 1,
1162 sgc = dma_map_sg_chained(jrdev, req->
src, src_nents ? : 1,
1164 sgc = dma_map_sg_chained(jrdev, req->
dst, dst_nents ? : 1,
1171 iv_dma || src_nents || iv_dma + ivsize !=
1174 assoc_nents = assoc_nents ? : 1;
1175 src_nents = src_nents ? : 1;
1176 sec4_sg_len = assoc_nents + 1 +
src_nents;
1184 sec4_sg_bytes,
GFP_DMA | flags);
1186 dev_err(jrdev,
"could not allocate extended descriptor\n");
1190 edesc->assoc_nents = assoc_nents;
1191 edesc->assoc_chained = assoc_chained;
1192 edesc->src_nents = src_nents;
1193 edesc->src_chained = src_chained;
1194 edesc->dst_nents = dst_nents;
1195 edesc->dst_chained = dst_chained;
1196 edesc->iv_dma = iv_dma;
1197 edesc->sec4_sg_bytes = sec4_sg_bytes;
1198 edesc->sec4_sg = (
void *)edesc +
sizeof(
struct aead_edesc) +
1202 *all_contig_ptr = all_contig;
1206 sg_to_sec4_sg(req->
assoc,
1207 (assoc_nents ? : 1),
1210 sec4_sg_index += assoc_nents ? : 1;
1211 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
1214 sg_to_sec4_sg_last(req->
src,
1218 sec4_sg_index += src_nents ? : 1;
1221 sg_to_sec4_sg_last(req->
dst, dst_nents,
1222 edesc->sec4_sg + sec4_sg_index, 0);
1231 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1232 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1244 return PTR_ERR(edesc);
1250 print_hex_dump(
KERN_ERR,
"aead jobdesc@"xstr(__LINE__)
": ",
1252 desc_bytes(edesc->
hw_desc), 1);
1260 aead_unmap(jrdev, edesc, req);
1270 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1271 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1281 return PTR_ERR(edesc);
1293 print_hex_dump(
KERN_ERR,
"aead jobdesc@"xstr(__LINE__)
": ",
1295 desc_bytes(edesc->
hw_desc), 1);
1303 aead_unmap(jrdev, edesc, req);
1314 *greq,
int desc_bytes,
1318 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1319 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1323 int assoc_nents, src_nents, dst_nents = 0;
1328 int ivsize = crypto_aead_ivsize(aead);
1329 bool assoc_chained =
false, src_chained =
false, dst_chained =
false;
1338 sgc = dma_map_sg_chained(jrdev, req->
assoc, assoc_nents ? : 1,
1341 sgc = dma_map_sg_chained(jrdev, req->
src, src_nents ? : 1,
1344 sgc = dma_map_sg_chained(jrdev, req->
src, src_nents ? : 1,
1346 sgc = dma_map_sg_chained(jrdev, req->
dst, dst_nents ? : 1,
1354 contig &= ~GIV_SRC_CONTIG;
1356 contig &= ~GIV_DST_CONTIG;
1358 dst_nents = dst_nents ? : 1;
1361 if (!(contig & GIV_SRC_CONTIG)) {
1362 assoc_nents = assoc_nents ? : 1;
1363 src_nents = src_nents ? : 1;
1364 sec4_sg_len += assoc_nents + 1 +
src_nents;
1366 contig &= ~GIV_DST_CONTIG;
1374 sec4_sg_bytes,
GFP_DMA | flags);
1376 dev_err(jrdev,
"could not allocate extended descriptor\n");
1380 edesc->assoc_nents = assoc_nents;
1381 edesc->assoc_chained = assoc_chained;
1382 edesc->src_nents = src_nents;
1383 edesc->src_chained = src_chained;
1384 edesc->dst_nents = dst_nents;
1385 edesc->dst_chained = dst_chained;
1386 edesc->iv_dma = iv_dma;
1387 edesc->sec4_sg_bytes = sec4_sg_bytes;
1388 edesc->sec4_sg = (
void *)edesc +
sizeof(
struct aead_edesc) +
1392 *contig_ptr = contig;
1395 if (!(contig & GIV_SRC_CONTIG)) {
1396 sg_to_sec4_sg(req->
assoc, assoc_nents,
1399 sec4_sg_index += assoc_nents;
1400 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
1403 sg_to_sec4_sg_last(req->
src, src_nents,
1406 sec4_sg_index += src_nents;
1408 if (
unlikely(req->
src != req->
dst && !(contig & GIV_DST_CONTIG))) {
1409 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
1412 sg_to_sec4_sg_last(req->
dst, dst_nents,
1413 edesc->sec4_sg + sec4_sg_index, 0);
1423 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1424 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1437 return PTR_ERR(edesc);
1449 print_hex_dump(
KERN_ERR,
"aead jobdesc@"xstr(__LINE__)
": ",
1451 desc_bytes(edesc->
hw_desc), 1);
1459 aead_unmap(jrdev, edesc, req);
1470 *req,
int desc_bytes,
1471 bool *iv_contig_out)
1474 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1479 int src_nents, dst_nents = 0, sec4_sg_bytes;
1482 bool iv_contig =
false;
1484 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1485 bool src_chained =
false, dst_chained =
false;
1490 if (req->
dst != req->
src)
1494 sgc = dma_map_sg_chained(jrdev, req->
src, src_nents ? : 1,
1497 sgc = dma_map_sg_chained(jrdev, req->
src, src_nents ? : 1,
1499 sgc = dma_map_sg_chained(jrdev, req->
dst, dst_nents ? : 1,
1511 src_nents = src_nents ? : 1;
1512 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
1517 sec4_sg_bytes,
GFP_DMA | flags);
1519 dev_err(jrdev,
"could not allocate extended descriptor\n");
1533 dma_to_sec4_sg_one(edesc->
sec4_sg, iv_dma, ivsize, 0);
1534 sg_to_sec4_sg_last(req->
src, src_nents,
1536 sec4_sg_index += 1 + src_nents;
1540 sg_to_sec4_sg_last(req->
dst, dst_nents,
1541 edesc->
sec4_sg + sec4_sg_index, 0);
1549 print_hex_dump(
KERN_ERR,
"ablkcipher sec4_sg@"xstr(__LINE__)
": ",
1554 *iv_contig_out = iv_contig;
1562 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1572 return PTR_ERR(edesc);
1578 print_hex_dump(
KERN_ERR,
"ablkcipher jobdesc@"xstr(__LINE__)
": ",
1580 desc_bytes(edesc->
hw_desc), 1);
1588 ablkcipher_unmap(jrdev, edesc, req);
1599 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1609 return PTR_ERR(edesc);
1616 print_hex_dump(
KERN_ERR,
"ablkcipher jobdesc@"xstr(__LINE__)
": ",
1618 desc_bytes(edesc->
hw_desc), 1);
1625 ablkcipher_unmap(jrdev, edesc, req);
1632 #define template_aead template_u.aead
1633 #define template_ablkcipher template_u.ablkcipher
1659 .name =
"authenc(hmac(md5),cbc(aes))",
1660 .driver_name =
"authenc-hmac-md5-cbc-aes-caam",
1664 .setkey = aead_setkey,
1665 .setauthsize = aead_setauthsize,
1666 .encrypt = aead_encrypt,
1667 .decrypt = aead_decrypt,
1668 .givencrypt = aead_givencrypt,
1669 .geniv =
"<built-in>",
1678 .name =
"authenc(hmac(sha1),cbc(aes))",
1679 .driver_name =
"authenc-hmac-sha1-cbc-aes-caam",
1683 .setkey = aead_setkey,
1684 .setauthsize = aead_setauthsize,
1685 .encrypt = aead_encrypt,
1686 .decrypt = aead_decrypt,
1687 .givencrypt = aead_givencrypt,
1688 .geniv =
"<built-in>",
1697 .name =
"authenc(hmac(sha224),cbc(aes))",
1698 .driver_name =
"authenc-hmac-sha224-cbc-aes-caam",
1701 .setkey = aead_setkey,
1702 .setauthsize = aead_setauthsize,
1703 .encrypt = aead_encrypt,
1704 .decrypt = aead_decrypt,
1705 .givencrypt = aead_givencrypt,
1706 .geniv =
"<built-in>",
1716 .name =
"authenc(hmac(sha256),cbc(aes))",
1717 .driver_name =
"authenc-hmac-sha256-cbc-aes-caam",
1721 .setkey = aead_setkey,
1722 .setauthsize = aead_setauthsize,
1723 .encrypt = aead_encrypt,
1724 .decrypt = aead_decrypt,
1725 .givencrypt = aead_givencrypt,
1726 .geniv =
"<built-in>",
1736 .name =
"authenc(hmac(sha384),cbc(aes))",
1737 .driver_name =
"authenc-hmac-sha384-cbc-aes-caam",
1740 .setkey = aead_setkey,
1741 .setauthsize = aead_setauthsize,
1742 .encrypt = aead_encrypt,
1743 .decrypt = aead_decrypt,
1744 .givencrypt = aead_givencrypt,
1745 .geniv =
"<built-in>",
1756 .name =
"authenc(hmac(sha512),cbc(aes))",
1757 .driver_name =
"authenc-hmac-sha512-cbc-aes-caam",
1761 .setkey = aead_setkey,
1762 .setauthsize = aead_setauthsize,
1763 .encrypt = aead_encrypt,
1764 .decrypt = aead_decrypt,
1765 .givencrypt = aead_givencrypt,
1766 .geniv =
"<built-in>",
1776 .name =
"authenc(hmac(md5),cbc(des3_ede))",
1777 .driver_name =
"authenc-hmac-md5-cbc-des3_ede-caam",
1781 .setkey = aead_setkey,
1782 .setauthsize = aead_setauthsize,
1783 .encrypt = aead_encrypt,
1784 .decrypt = aead_decrypt,
1785 .givencrypt = aead_givencrypt,
1786 .geniv =
"<built-in>",
1795 .name =
"authenc(hmac(sha1),cbc(des3_ede))",
1796 .driver_name =
"authenc-hmac-sha1-cbc-des3_ede-caam",
1800 .setkey = aead_setkey,
1801 .setauthsize = aead_setauthsize,
1802 .encrypt = aead_encrypt,
1803 .decrypt = aead_decrypt,
1804 .givencrypt = aead_givencrypt,
1805 .geniv =
"<built-in>",
1814 .name =
"authenc(hmac(sha224),cbc(des3_ede))",
1815 .driver_name =
"authenc-hmac-sha224-cbc-des3_ede-caam",
1818 .setkey = aead_setkey,
1819 .setauthsize = aead_setauthsize,
1820 .encrypt = aead_encrypt,
1821 .decrypt = aead_decrypt,
1822 .givencrypt = aead_givencrypt,
1823 .geniv =
"<built-in>",
1833 .name =
"authenc(hmac(sha256),cbc(des3_ede))",
1834 .driver_name =
"authenc-hmac-sha256-cbc-des3_ede-caam",
1838 .setkey = aead_setkey,
1839 .setauthsize = aead_setauthsize,
1840 .encrypt = aead_encrypt,
1841 .decrypt = aead_decrypt,
1842 .givencrypt = aead_givencrypt,
1843 .geniv =
"<built-in>",
1853 .name =
"authenc(hmac(sha384),cbc(des3_ede))",
1854 .driver_name =
"authenc-hmac-sha384-cbc-des3_ede-caam",
1857 .setkey = aead_setkey,
1858 .setauthsize = aead_setauthsize,
1859 .encrypt = aead_encrypt,
1860 .decrypt = aead_decrypt,
1861 .givencrypt = aead_givencrypt,
1862 .geniv =
"<built-in>",
1872 .name =
"authenc(hmac(sha512),cbc(des3_ede))",
1873 .driver_name =
"authenc-hmac-sha512-cbc-des3_ede-caam",
1877 .setkey = aead_setkey,
1878 .setauthsize = aead_setauthsize,
1879 .encrypt = aead_encrypt,
1880 .decrypt = aead_decrypt,
1881 .givencrypt = aead_givencrypt,
1882 .geniv =
"<built-in>",
1892 .name =
"authenc(hmac(md5),cbc(des))",
1893 .driver_name =
"authenc-hmac-md5-cbc-des-caam",
1897 .setkey = aead_setkey,
1898 .setauthsize = aead_setauthsize,
1899 .encrypt = aead_encrypt,
1900 .decrypt = aead_decrypt,
1901 .givencrypt = aead_givencrypt,
1902 .geniv =
"<built-in>",
1911 .name =
"authenc(hmac(sha1),cbc(des))",
1912 .driver_name =
"authenc-hmac-sha1-cbc-des-caam",
1916 .setkey = aead_setkey,
1917 .setauthsize = aead_setauthsize,
1918 .encrypt = aead_encrypt,
1919 .decrypt = aead_decrypt,
1920 .givencrypt = aead_givencrypt,
1921 .geniv =
"<built-in>",
1930 .name =
"authenc(hmac(sha224),cbc(des))",
1931 .driver_name =
"authenc-hmac-sha224-cbc-des-caam",
1934 .setkey = aead_setkey,
1935 .setauthsize = aead_setauthsize,
1936 .encrypt = aead_encrypt,
1937 .decrypt = aead_decrypt,
1938 .givencrypt = aead_givencrypt,
1939 .geniv =
"<built-in>",
1949 .name =
"authenc(hmac(sha256),cbc(des))",
1950 .driver_name =
"authenc-hmac-sha256-cbc-des-caam",
1954 .setkey = aead_setkey,
1955 .setauthsize = aead_setauthsize,
1956 .encrypt = aead_encrypt,
1957 .decrypt = aead_decrypt,
1958 .givencrypt = aead_givencrypt,
1959 .geniv =
"<built-in>",
1969 .name =
"authenc(hmac(sha384),cbc(des))",
1970 .driver_name =
"authenc-hmac-sha384-cbc-des-caam",
1973 .setkey = aead_setkey,
1974 .setauthsize = aead_setauthsize,
1975 .encrypt = aead_encrypt,
1976 .decrypt = aead_decrypt,
1977 .givencrypt = aead_givencrypt,
1978 .geniv =
"<built-in>",
1988 .name =
"authenc(hmac(sha512),cbc(des))",
1989 .driver_name =
"authenc-hmac-sha512-cbc-des-caam",
1993 .setkey = aead_setkey,
1994 .setauthsize = aead_setauthsize,
1995 .encrypt = aead_encrypt,
1996 .decrypt = aead_decrypt,
1997 .givencrypt = aead_givencrypt,
1998 .geniv =
"<built-in>",
2010 .driver_name =
"cbc-aes-caam",
2013 .template_ablkcipher = {
2014 .setkey = ablkcipher_setkey,
2015 .encrypt = ablkcipher_encrypt,
2016 .decrypt = ablkcipher_decrypt,
2025 .name =
"cbc(des3_ede)",
2026 .driver_name =
"cbc-3des-caam",
2029 .template_ablkcipher = {
2030 .setkey = ablkcipher_setkey,
2031 .encrypt = ablkcipher_encrypt,
2032 .decrypt = ablkcipher_decrypt,
2042 .driver_name =
"cbc-des-caam",
2045 .template_ablkcipher = {
2046 .setkey = ablkcipher_setkey,
2047 .encrypt = ablkcipher_encrypt,
2048 .decrypt = ablkcipher_decrypt,
2067 static int caam_cra_init(
struct crypto_tfm *tfm)
2072 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
2090 static void caam_cra_exit(
struct crypto_tfm *tfm)
2092 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
2109 static void __exit caam_algapi_exit(
void)
2129 ctrldev = &pdev->
dev;
2130 of_node_put(dev_node);
2152 dev_err(ctrldev,
"failed to allocate t_alg\n");
2173 alg->cra_ablkcipher =
template->template_ablkcipher;
2177 alg->cra_aead =
template->template_aead;
2183 t_alg->
alg_op =
template->alg_op;
2189 static int __init caam_algapi_init(
void)
2208 ctrldev = &pdev->
dev;
2210 of_node_put(dev_node);
2217 for (i = 0; i <
ARRAY_SIZE(driver_algs); i++) {
2223 t_alg = caam_alg_alloc(ctrldev, &driver_algs[i]);
2224 if (IS_ERR(t_alg)) {
2225 err = PTR_ERR(t_alg);
2226 dev_warn(ctrldev,
"%s alg allocation failed\n",
2233 dev_warn(ctrldev,
"%s alg registration failed\n",
2239 !
memcmp(driver_algs[i].
name,
"authenc", 7) &&
2243 name = driver_algs[
i].
name;
2245 memcpy(name + 7,
"esn", 3);
2249 memcpy(name + 7,
"esn", 3);
2257 dev_info(ctrldev,
"%s algorithms registered in /proc/crypto\n",