30 #include <linux/module.h>
32 #include <linux/errno.h>
33 #include <linux/kernel.h>
47 #include <crypto/aes.h>
52 #define FLAGS_MODE_MASK 0x00FF
53 #define FLAGS_ENCRYPT BIT(0)
54 #define FLAGS_CBC BIT(1)
55 #define FLAGS_GIV BIT(2)
56 #define FLAGS_RNG BIT(3)
57 #define FLAGS_OFB BIT(4)
58 #define FLAGS_NEW_KEY BIT(5)
59 #define FLAGS_NEW_IV BIT(6)
60 #define FLAGS_INIT BIT(7)
61 #define FLAGS_FAST BIT(8)
71 #define AES_HW_DMA_BUFFER_SIZE_BYTES 0x4000
78 #define AES_HW_KEY_TABLE_LENGTH_BYTES 64
89 #define AES_HW_IV_SIZE 16
90 #define AES_HW_KEYSCHEDULE_LEN 256
91 #define AES_IVKEY_SIZE (AES_HW_KEY_TABLE_LENGTH_BYTES + AES_HW_KEYSCHEDULE_LEN)
110 #define MEMDMA_DIR_DTOVRAM 0
111 #define MEMDMA_DIR_VTODRAM 1
112 #define MEMDMA_DIR_SHIFT 25
113 #define MEMDMA_NUM_WORDS_SHIFT 12
129 #define SSK_SLOT_NUM 4
131 #define AES_NR_KEYSLOTS 8
132 #define TEGRA_AES_QUEUE_LENGTH 50
133 #define DEFAULT_RNG_BLK_SZ 16
136 #define AES_HW_MAX_ICQ_LENGTH 5
217 int nblocks,
int mode,
bool upd_iv)
220 int i, eng_busy, icq_empty,
ret;
245 ((dd->
ctx->keylen * 8) <<
269 dev_dbg(dd->
dev,
"secure_in_sel=0x%x", value);
280 }
while (eng_busy & (!icq_empty));
331 int eng_busy, icq_empty, dma_busy;
332 bool use_ssk =
false;
335 if (!dd->
ctx->slot) {
337 dd->
ctx->slot = &ssk;
370 }
while (eng_busy & (!icq_empty) & dma_busy);
384 }
while (eng_busy & (!icq_empty));
398 int ret = 0, nblocks, total;
407 backlog = crypto_get_backlog(&dd->
queue);
411 spin_unlock_irqrestore(&dd->
lock, flags);
419 req = ablkcipher_request_cast(async_req);
421 dev_dbg(dd->
dev,
"%s: get new req\n", __func__);
423 if (!req->
src || !req->
dst)
442 tfm = crypto_ablkcipher_reqtfm(req);
443 rctx = ablkcipher_request_ctx(req);
444 ctx = crypto_ablkcipher_ctx(tfm);
449 dd->
ivlen = crypto_ablkcipher_ivsize(tfm);
463 if (((dd->
flags & FLAGS_CBC) || (dd->
flags & FLAGS_OFB)) && dd->
iv) {
473 dev_err(dd->
dev,
"aes_start_crypt fail(%d)\n", ret);
501 ret = aes_start_crypt(dd, addr_in, addr_out, nblocks,
508 dev_err(dd->
dev,
"aes_start_crypt fail(%d)\n", ret);
517 WARN_ON(((total != 0) && (!in_sg || !out_sg)));
525 if (dd->
req->base.complete)
526 dd->
req->base.complete(&dd->
req->base, ret);
552 key_slot = aes_find_key_slot();
558 ctx->
slot = key_slot;
575 ret = clk_prepare_enable(dd->
aes_clk);
577 BUG_ON(
"clock enable failed");
581 ret = tegra_aes_handle_req(dd);
584 clk_disable_unprepare(dd->
aes_clk);
618 dev_dbg(dd->
dev,
"nbytes: %d, enc: %d, cbc: %d, ofb: %d\n",
620 !!(mode & FLAGS_CBC), !!(mode & FLAGS_OFB));
625 err = ablkcipher_enqueue_request(&dd->
queue, req);
627 spin_unlock_irqrestore(&dd->
lock, flags);
642 return tegra_aes_crypt(req, 0);
652 return tegra_aes_crypt(req, FLAGS_CBC);
662 return tegra_aes_crypt(req, FLAGS_OFB);
665 static int tegra_aes_get_random(
struct crypto_rng *tfm,
u8 *rdata,
676 ret = clk_prepare_enable(dd->
aes_clk);
689 dev_err(dd->
dev,
"aes_start_crypt fail(%d)\n", ret);
703 clk_disable_unprepare(dd->
aes_clk);
710 static int tegra_aes_rng_reset(
struct crypto_rng *tfm,
u8 *seed,
723 (
unsigned int)ctx, (
unsigned int)dd);
736 key_slot = aes_find_key_slot();
742 ctx->
slot = key_slot;
761 ret = clk_prepare_enable(dd->
aes_clk);
772 dev_err(dd->
dev,
"aes_start_crypt fail(%d)\n", ret);
780 nsec = timespec_to_ns(&
ts);
782 nsec ^= dd->
ctr << 56;
791 clk_disable_unprepare(dd->
aes_clk);
798 static int tegra_aes_cra_init(
struct crypto_tfm *tfm)
810 if (ctx && ctx->
slot)
811 aes_release_key_slot(ctx->
slot);
816 .cra_name =
"ecb(aes)",
817 .cra_driver_name =
"ecb-aes-tegra",
823 .cra_u.ablkcipher = {
826 .setkey = tegra_aes_setkey,
827 .encrypt = tegra_aes_ecb_encrypt,
828 .decrypt = tegra_aes_ecb_decrypt,
831 .cra_name =
"cbc(aes)",
832 .cra_driver_name =
"cbc-aes-tegra",
838 .cra_u.ablkcipher = {
842 .setkey = tegra_aes_setkey,
843 .encrypt = tegra_aes_cbc_encrypt,
844 .decrypt = tegra_aes_cbc_decrypt,
847 .cra_name =
"ofb(aes)",
848 .cra_driver_name =
"ofb-aes-tegra",
854 .cra_u.ablkcipher = {
858 .setkey = tegra_aes_setkey,
859 .encrypt = tegra_aes_ofb_encrypt,
860 .decrypt = tegra_aes_ofb_decrypt,
863 .cra_name =
"ansi_cprng",
864 .cra_driver_name =
"rng-aes-tegra",
869 .rng_make_random = tegra_aes_get_random,
870 .rng_reset = tegra_aes_rng_reset,
885 dev_err(dev,
"unable to alloc data struct.\n");
890 platform_set_drvdata(pdev, dd);
895 dev_err(dev,
"unable to alloc slot struct.\n");
905 dev_err(dev,
"invalid resource type: base\n");
912 dev_name(&pdev->
dev))) {
913 dev_err(&pdev->
dev,
"Couldn't request MEM resource\n");
919 dev_err(dev,
"can't ioremap register space\n");
927 dev_err(dev,
"iclock intialization failed.\n");
934 dev_err(dd->
dev,
"iclk set_rate fail(%d)\n", err);
947 dev_err(dev,
"can not allocate iv/key buffer\n");
955 dev_err(dev,
"can not allocate dma-in buffer\n");
963 dev_err(dev,
"can not allocate dma-out buffer\n");
971 dev_err(dev,
"alloc_workqueue failed\n");
979 dev_err(dev,
"invalid resource type: base\n");
988 dev_err(dev,
"request_irq failed\n");
1001 INIT_LIST_HEAD(&dd->
slots[i].node);
1023 for (
j = 0;
j <
i;
j++)
1044 dev_err(dev,
"%s: initialization failed.\n", __func__);
1076 { .compatible =
"nvidia,tegra20-aes", },
1077 { .compatible =
"nvidia,tegra30-aes", },
1082 .probe = tegra_aes_probe,
1085 .name =
"tegra-aes",
1087 .of_match_table = tegra_aes_of_match,