13 #include <linux/slab.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
23 #include <mach/pcie.h>
24 #include <mach/misc_regs.h>
26 #define IN0_MEM_SIZE (200 * 1024 * 1024 - 1)
30 #define IN1_MEM_SIZE (0 * 1024 * 1024 - 1)
31 #define IN_IO_SIZE (20 * 1024 * 1024 - 1)
32 #define IN_CFG0_SIZE (12 * 1024 * 1024 - 1)
33 #define IN_CFG1_SIZE (12 * 1024 * 1024 - 1)
34 #define IN_MSG_SIZE (12 * 1024 * 1024 - 1)
37 #define INBOUND_ADDR_MASK (SPEAR13XX_SYSRAM1_SIZE - 1)
39 #define INT_TYPE_NO_INT 0
40 #define INT_TYPE_INTX 1
41 #define INT_TYPE_MSI 2
68 static void enable_dbi_access(
struct pcie_app_reg
__iomem *app_reg)
71 writel(
readl(&app_reg->slv_armisc) | (1 << AXI_OP_DBI_ACCESS_ID),
72 &app_reg->slv_armisc);
73 writel(
readl(&app_reg->slv_awmisc) | (1 << AXI_OP_DBI_ACCESS_ID),
74 &app_reg->slv_awmisc);
78 static void disable_dbi_access(
struct pcie_app_reg
__iomem *app_reg)
81 writel(
readl(&app_reg->slv_armisc) & ~(1 << AXI_OP_DBI_ACCESS_ID),
82 &app_reg->slv_armisc);
83 writel(
readl(&app_reg->slv_awmisc) & ~(1 << AXI_OP_DBI_ACCESS_ID),
84 &app_reg->slv_awmisc);
95 enable_dbi_access(app_reg);
99 *val =
readl(va_address);
102 *val = (*val >> (8 * (where & 3))) & 0xff;
104 *val = (*val >> (8 * (where & 3))) & 0xffff;
107 disable_dbi_access(app_reg);
117 enable_dbi_access(app_reg);
124 writew(val, va_address + (where & 2));
126 writeb(val, va_address + (where & 3));
129 disable_dbi_access(app_reg);
132 #define PCI_FIND_CAP_TTL 48
140 spear_dbi_read_reg(config, pos, 1, &pos);
159 return pci_find_own_next_cap_ttl(config, pos, cap, &ttl);
167 spear_dbi_read_reg(config,
PCI_STATUS, 2, &status);
207 pos = pci_find_own_cap_start(config, hdr_type);
209 pos = pci_find_own_next_cap(config, pos, cap);
222 static ssize_t pcie_gadget_show_link(
228 if (
readl(&app_reg->app_status_1) & ((
u32)1 << XMLH_LINK_UP_ID))
234 static ssize_t pcie_gadget_store_link(
236 const char *buf,
size_t count)
241 writel(
readl(&app_reg->app_ctrl_0) | (1 << APP_LTSSM_ENABLE_ID),
242 &app_reg->app_ctrl_0);
245 & ~(1 << APP_LTSSM_ENABLE_ID),
246 &app_reg->app_ctrl_0);
252 static ssize_t pcie_gadget_show_int_type(
259 static ssize_t pcie_gadget_store_int_type(
261 const char *buf,
size_t count)
290 static ssize_t pcie_gadget_show_no_of_msi(
298 if ((
readl(&app_reg->msg_status) & (1 << CFG_MSI_EN_ID))
299 != (1 << CFG_MSI_EN_ID))
312 return sprintf(buf,
"%lu", vector);
315 static ssize_t pcie_gadget_store_no_of_msi(
317 const char *buf,
size_t count)
327 static ssize_t pcie_gadget_store_inta(
329 const char *buf,
size_t count)
338 writel(
readl(&app_reg->app_ctrl_0) | (1 << SYS_INT_ID),
339 &app_reg->app_ctrl_0);
341 writel(
readl(&app_reg->app_ctrl_0) & ~(1 << SYS_INT_ID),
342 &app_reg->app_ctrl_0);
347 static ssize_t pcie_gadget_store_send_msi(
349 const char *buf,
size_t count)
364 ven_msi =
readl(&app_reg->ven_msi_1);
365 ven_msi &= ~VEN_MSI_FUN_NUM_MASK;
366 ven_msi |= 0 << VEN_MSI_FUN_NUM_ID;
367 ven_msi &= ~VEN_MSI_TC_MASK;
368 ven_msi |= 0 << VEN_MSI_TC_ID;
369 ven_msi &= ~VEN_MSI_VECTOR_MASK;
370 ven_msi |= vector << VEN_MSI_VECTOR_ID;
373 ven_msi |= VEN_MSI_REQ_EN;
374 writel(ven_msi, &app_reg->ven_msi_1);
376 ven_msi &= ~VEN_MSI_REQ_EN;
377 writel(ven_msi, &app_reg->ven_msi_1);
382 static ssize_t pcie_gadget_show_vendor_id(
393 static ssize_t pcie_gadget_store_vendor_id(
395 const char *buf,
size_t count)
407 static ssize_t pcie_gadget_show_device_id(
418 static ssize_t pcie_gadget_store_device_id(
420 const char *buf,
size_t count)
432 static ssize_t pcie_gadget_show_bar0_size(
439 static ssize_t pcie_gadget_store_bar0_size(
441 const char *buf,
size_t count)
453 else if (size >= 0x100000)
471 spear_dbi_write_reg(config, PCIE_BAR0_MASK_REG, 4, size - 1);
476 static ssize_t pcie_gadget_show_bar0_address(
484 return sprintf(buf,
"%x", address);
487 static ssize_t pcie_gadget_store_bar0_address(
489 const char *buf,
size_t count)
504 writel(address, &app_reg->pim0_mem_addr_start);
509 static ssize_t pcie_gadget_show_bar0_rw_offset(
516 static ssize_t pcie_gadget_store_bar0_rw_offset(
518 const char *buf,
size_t count)
533 static ssize_t pcie_gadget_show_bar0_data(
544 return sprintf(buf,
"%lx", data);
547 static ssize_t pcie_gadget_store_bar0_data(
549 const char *buf,
size_t count)
568 #define PCIE_GADGET_TARGET_ATTR_RO(_name) \
569 static struct pcie_gadget_target_attr pcie_gadget_target_##_name = \
570 __CONFIGFS_ATTR(_name, S_IRUGO, pcie_gadget_show_##_name, NULL)
572 #define PCIE_GADGET_TARGET_ATTR_WO(_name) \
573 static struct pcie_gadget_target_attr pcie_gadget_target_##_name = \
574 __CONFIGFS_ATTR(_name, S_IWUSR, NULL, pcie_gadget_store_##_name)
576 #define PCIE_GADGET_TARGET_ATTR_RW(_name) \
577 static struct pcie_gadget_target_attr pcie_gadget_target_##_name = \
578 __CONFIGFS_ATTR(_name, S_IRUGO | S_IWUSR, pcie_gadget_show_##_name, \
579 pcie_gadget_store_##_name)
593 &pcie_gadget_target_link.attr,
594 &pcie_gadget_target_int_type.attr,
595 &pcie_gadget_target_no_of_msi.attr,
596 &pcie_gadget_target_inta.attr,
597 &pcie_gadget_target_send_msi.attr,
598 &pcie_gadget_target_vendor_id.attr,
599 &pcie_gadget_target_device_id.attr,
600 &pcie_gadget_target_bar0_size.attr,
601 &pcie_gadget_target_bar0_address.attr,
602 &pcie_gadget_target_bar0_rw_offset.attr,
603 &pcie_gadget_target_bar0_data.attr,
610 container_of(to_configfs_subsystem(to_config_group(item)),
648 .show_attribute = pcie_gadget_target_attr_show,
649 .store_attribute = pcie_gadget_target_attr_store,
653 .ct_attrs = pcie_gadget_target_attrs,
654 .ct_item_ops = &pcie_gadget_target_item_ops,
664 writel(config->
base, &app_reg->in0_mem_addr_start);
666 &app_reg->in0_mem_addr_limit);
667 writel(app_reg->in0_mem_addr_limit + 1, &app_reg->in1_mem_addr_start);
669 &app_reg->in1_mem_addr_limit);
670 writel(app_reg->in1_mem_addr_limit + 1, &app_reg->in_io_addr_start);
672 &app_reg->in_io_addr_limit);
673 writel(app_reg->in_io_addr_limit + 1, &app_reg->in_cfg0_addr_start);
675 &app_reg->in_cfg0_addr_limit);
676 writel(app_reg->in_cfg0_addr_limit + 1, &app_reg->in_cfg1_addr_start);
678 &app_reg->in_cfg1_addr_limit);
679 writel(app_reg->in_cfg1_addr_limit + 1, &app_reg->in_msg_addr_start);
681 &app_reg->in_msg_addr_limit);
683 writel(app_reg->in0_mem_addr_start, &app_reg->pom0_mem_addr_start);
684 writel(app_reg->in1_mem_addr_start, &app_reg->pom1_mem_addr_start);
685 writel(app_reg->in_io_addr_start, &app_reg->pom_io_addr_start);
696 writel(SPEAR13XX_SYSRAM1_BASE, &app_reg->pim0_mem_addr_start);
697 writel(0, &app_reg->pim1_mem_addr_start);
700 writel(0x0, &app_reg->pim_io_addr_start);
701 writel(0x0, &app_reg->pim_io_addr_start);
702 writel(0x0, &app_reg->pim_rom_addr_start);
704 writel(DEVICE_TYPE_EP | (1 << MISCTRL_EN_ID)
705 | ((
u32)1 << REG_TRANSLATION_ENABLE),
706 &app_reg->app_ctrl_0);
708 writel(0, &app_reg->int_mask);
717 unsigned int status = 0;
734 dev_err(&pdev->
dev,
"pcie gadget region already claimed\n");
746 dev_err(&pdev->
dev,
"pcie gadget region already claimed\n");
750 target = kzalloc(
sizeof(*target),
GFP_KERNEL);
757 cg_item = &target->
subsys.su_group.cg_item;
759 cg_item->
ci_type = &pcie_gadget_target_type;
762 resource_size(res0));
772 resource_size(res1));
776 goto err_iounmap_app;
791 "pcie gadget interrupt IRQ%d already claimed\n", irq);
816 pr_err(
"%s:couldn't get clk for pcie1\n", __func__);
820 pr_err(
"%s:couldn't enable clk for pcie1\n", __func__);
823 }
else if (pdev->
id == 2) {
830 pr_err(
"%s:couldn't get clk for pcie2\n", __func__);
834 pr_err(
"%s:couldn't enable clk for pcie2\n", __func__);
838 spear13xx_pcie_device_init(config);
885 .probe = spear_pcie_gadget_probe,
886 .remove = spear_pcie_gadget_remove,
887 .shutdown = spear_pcie_gadget_shutdown,
889 .name =
"pcie-gadget-spear",