15 #define pr_fmt(fmt) "%s: " fmt, __func__
18 #include <linux/module.h>
20 #include <linux/errno.h>
21 #include <linux/kernel.h>
30 #include <crypto/aes.h>
37 #define FLD_MASK(start, end) (((1 << ((start) - (end) + 1)) - 1) << (end))
38 #define FLD_VAL(val, start, end) (((val) << (end)) & FLD_MASK(start, end))
40 #define AES_REG_KEY(x) (0x1C - ((x ^ 0x01) * 0x04))
41 #define AES_REG_IV(x) (0x20 + ((x) * 0x04))
43 #define AES_REG_CTRL 0x30
44 #define AES_REG_CTRL_CTR_WIDTH (1 << 7)
45 #define AES_REG_CTRL_CTR (1 << 6)
46 #define AES_REG_CTRL_CBC (1 << 5)
47 #define AES_REG_CTRL_KEY_SIZE (3 << 3)
48 #define AES_REG_CTRL_DIRECTION (1 << 2)
49 #define AES_REG_CTRL_INPUT_READY (1 << 1)
50 #define AES_REG_CTRL_OUTPUT_READY (1 << 0)
52 #define AES_REG_DATA 0x34
53 #define AES_REG_DATA_N(x) (0x34 + ((x) * 0x04))
55 #define AES_REG_REV 0x44
56 #define AES_REG_REV_MAJOR 0xF0
57 #define AES_REG_REV_MINOR 0x0F
59 #define AES_REG_MASK 0x48
60 #define AES_REG_MASK_SIDLE (1 << 6)
61 #define AES_REG_MASK_START (1 << 5)
62 #define AES_REG_MASK_DMA_OUT_EN (1 << 3)
63 #define AES_REG_MASK_DMA_IN_EN (1 << 2)
64 #define AES_REG_MASK_SOFTRESET (1 << 1)
65 #define AES_REG_AUTOIDLE (1 << 0)
67 #define AES_REG_SYSSTATUS 0x4C
68 #define AES_REG_SYSSTATUS_RESETDONE (1 << 0)
70 #define DEFAULT_TIMEOUT (5*HZ)
72 #define FLAGS_MODE_MASK 0x000f
73 #define FLAGS_ENCRYPT BIT(0)
74 #define FLAGS_CBC BIT(1)
75 #define FLAGS_GIV BIT(2)
77 #define FLAGS_INIT BIT(4)
78 #define FLAGS_FAST BIT(5)
79 #define FLAGS_BUSY BIT(6)
93 #define OMAP_AES_QUEUE_LENGTH 1
94 #define OMAP_AES_CACHE_SIZE 0
151 val = omap_aes_read(dd, offset);
154 omap_aes_write(dd, offset, val);
160 for (; count--; value++, offset += 4)
161 omap_aes_write(dd, offset, *value);
168 while (!(omap_aes_read(dd, offset) & bit)) {
214 err = omap_aes_hw_init(dd);
228 key32 = dd->
ctx->keylen /
sizeof(
u32);
231 for (i = 0; i < key32; i++) {
239 val =
FLD_VAL(((dd->
ctx->keylen >> 3) - 1), 4, 3);
288 static void omap_aes_dma_callback(
int lch,
u16 ch_status,
void *
data)
293 pr_err(
"omap-aes DMA error status: 0x%hx\n", ch_status);
341 dev_err(dd->
dev,
"Unable to request DMA channel\n");
347 dev_err(dd->
dev,
"Unable to request DMA channel\n");
365 pr_err(
"error: %d\n", err);
369 static void omap_aes_dma_cleanup(
struct omap_aes_dev *dd)
389 scatterwalk_advance(&walk, start);
394 static int sg_copy(
struct scatterlist **sg,
size_t *offset,
void *buf,
395 size_t buflen,
size_t total,
int out)
397 unsigned int count, off = 0;
399 while (buflen && total) {
400 count =
min((*sg)->length - *offset, total);
401 count =
min(count, buflen);
411 sg_copy_buf(buf + off, *sg, *offset, count, out);
418 if (*offset == (*sg)->length) {
473 static int omap_aes_crypt_dma_start(
struct omap_aes_dev *dd)
475 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(
476 crypto_ablkcipher_reqtfm(dd->
req));
495 if (count != dd->
total) {
496 pr_err(
"request length != buffer length\n");
534 err = omap_aes_crypt_dma(tfm, addr_in, addr_out, count);
543 static void omap_aes_finish_req(
struct omap_aes_dev *dd,
int err)
552 req->
base.complete(&req->
base, err);
555 static int omap_aes_crypt_dma_stop(
struct omap_aes_dev *dd)
579 pr_err(
"not all data converted: %u\n", count);
586 static int omap_aes_handle_queue(
struct omap_aes_dev *dd,
597 ret = ablkcipher_enqueue_request(&dd->
queue, req);
599 spin_unlock_irqrestore(&dd->
lock, flags);
602 backlog = crypto_get_backlog(&dd->
queue);
606 spin_unlock_irqrestore(&dd->
lock, flags);
614 req = ablkcipher_request_cast(async_req);
624 rctx = ablkcipher_request_ctx(req);
625 ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
632 err = omap_aes_write_ctrl(dd);
634 err = omap_aes_crypt_dma_start(dd);
637 omap_aes_finish_req(dd, err);
644 static void omap_aes_done_task(
unsigned long data)
651 err = omap_aes_crypt_dma_stop(dd);
655 if (dd->
total && !err) {
656 err = omap_aes_crypt_dma_start(dd);
661 omap_aes_finish_req(dd, err);
662 omap_aes_handle_queue(dd,
NULL);
667 static void omap_aes_queue_task(
unsigned long data)
671 omap_aes_handle_queue(dd,
NULL);
677 crypto_ablkcipher_reqtfm(req));
686 pr_err(
"request size is not exact amount of AES blocks\n");
690 dd = omap_aes_find_dev(ctx);
696 return omap_aes_handle_queue(dd, req);
710 pr_debug(
"enter, keylen: %d\n", keylen);
725 return omap_aes_crypt(req, 0);
738 static int omap_aes_cra_init(
struct crypto_tfm *tfm)
747 static void omap_aes_cra_exit(
struct crypto_tfm *tfm)
756 .cra_name =
"ecb(aes)",
757 .cra_driver_name =
"ecb-aes-omap",
767 .cra_init = omap_aes_cra_init,
768 .cra_exit = omap_aes_cra_exit,
769 .cra_u.ablkcipher = {
772 .setkey = omap_aes_setkey,
773 .encrypt = omap_aes_ecb_encrypt,
774 .decrypt = omap_aes_ecb_decrypt,
778 .cra_name =
"cbc(aes)",
779 .cra_driver_name =
"cbc-aes-omap",
789 .cra_init = omap_aes_cra_init,
790 .cra_exit = omap_aes_cra_exit,
791 .cra_u.ablkcipher = {
795 .setkey = omap_aes_setkey,
796 .encrypt = omap_aes_cbc_encrypt,
797 .decrypt = omap_aes_cbc_decrypt,
812 dev_err(dev,
"unable to alloc data struct.\n");
816 platform_set_drvdata(pdev, dd);
824 dev_err(dev,
"invalid resource type\n");
846 if (IS_ERR(dd->
iclk)) {
847 dev_err(dev,
"clock intialization failed.\n");
848 err = PTR_ERR(dd->
iclk);
854 dev_err(dev,
"can't ioremap\n");
861 dev_info(dev,
"OMAP AES hw accel rev: %u.%u\n",
868 err = omap_aes_dma_init(dd);
872 INIT_LIST_HEAD(&dd->
list);
888 for (j = 0; j <
i; j++)
890 omap_aes_dma_cleanup(dd);
901 dev_err(dev,
"initialization failed.\n");
922 omap_aes_dma_cleanup(dd);
932 .probe = omap_aes_probe,
933 .remove = omap_aes_remove,
940 static int __init omap_aes_mod_init(
void)
942 pr_info(
"loading %s driver\n",
"omap-aes");
945 pr_err(
"Unsupported cpu\n");
952 static void __exit omap_aes_mod_exit(
void)