2 * drivers/misc/spear13xx_pcie_gadget.c
4 * Copyright (C) 2010 ST Microelectronics
5 * Pratyush Anand<pratyush.anand@st.com>
7 * This file is licensed under the terms of the GNU General Public
8 * License version 2. This program is licensed "as is" without any
9 * warranty of any kind, whether express or implied.
12 #include <linux/clk.h>
13 #include <linux/slab.h>
14 #include <linux/delay.h>
16 #include <linux/interrupt.h>
17 #include <linux/irq.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/platform_device.h>
21 #include <linux/pci_regs.h>
22 #include <linux/configfs.h>
23 #include <mach/pcie.h>
24 #include <mach/misc_regs.h>
26 #define IN0_MEM_SIZE (200 * 1024 * 1024 - 1)
27 /* In current implementation address translation is done using IN0 only.
28 * So IN1 start address and IN0 end address has been kept same
30 #define IN1_MEM_SIZE (0 * 1024 * 1024 - 1)
31 #define IN_IO_SIZE (20 * 1024 * 1024 - 1)
32 #define IN_CFG0_SIZE (12 * 1024 * 1024 - 1)
33 #define IN_CFG1_SIZE (12 * 1024 * 1024 - 1)
34 #define IN_MSG_SIZE (12 * 1024 * 1024 - 1)
35 /* Keep default BAR size as 4K*/
36 /* AORAM would be mapped by default*/
37 #define INBOUND_ADDR_MASK (SPEAR13XX_SYSRAM1_SIZE - 1)
39 #define INT_TYPE_NO_INT 0
40 #define INT_TYPE_INTX 1
41 #define INT_TYPE_MSI 2
42 struct spear_pcie_gadget_config
{
44 void __iomem
*va_app_base
;
45 void __iomem
*va_dbi_base
;
51 void __iomem
*va_bar0_address
;
54 struct pcie_gadget_target
{
55 struct configfs_subsystem subsys
;
56 struct spear_pcie_gadget_config config
;
59 struct pcie_gadget_target_attr
{
60 struct configfs_attribute attr
;
61 ssize_t (*show
)(struct spear_pcie_gadget_config
*config
,
63 ssize_t (*store
)(struct spear_pcie_gadget_config
*config
,
68 static void enable_dbi_access(struct pcie_app_reg __iomem
*app_reg
)
70 /* Enable DBI access */
71 writel(readl(&app_reg
->slv_armisc
) | (1 << AXI_OP_DBI_ACCESS_ID
),
72 &app_reg
->slv_armisc
);
73 writel(readl(&app_reg
->slv_awmisc
) | (1 << AXI_OP_DBI_ACCESS_ID
),
74 &app_reg
->slv_awmisc
);
78 static void disable_dbi_access(struct pcie_app_reg __iomem
*app_reg
)
80 /* disable DBI access */
81 writel(readl(&app_reg
->slv_armisc
) & ~(1 << AXI_OP_DBI_ACCESS_ID
),
82 &app_reg
->slv_armisc
);
83 writel(readl(&app_reg
->slv_awmisc
) & ~(1 << AXI_OP_DBI_ACCESS_ID
),
84 &app_reg
->slv_awmisc
);
88 static void spear_dbi_read_reg(struct spear_pcie_gadget_config
*config
,
89 int where
, int size
, u32
*val
)
91 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
94 /* Enable DBI access */
95 enable_dbi_access(app_reg
);
97 va_address
= (ulong
)config
->va_dbi_base
+ (where
& ~0x3);
99 *val
= readl(va_address
);
102 *val
= (*val
>> (8 * (where
& 3))) & 0xff;
104 *val
= (*val
>> (8 * (where
& 3))) & 0xffff;
106 /* Disable DBI access */
107 disable_dbi_access(app_reg
);
110 static void spear_dbi_write_reg(struct spear_pcie_gadget_config
*config
,
111 int where
, int size
, u32 val
)
113 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
116 /* Enable DBI access */
117 enable_dbi_access(app_reg
);
119 va_address
= (ulong
)config
->va_dbi_base
+ (where
& ~0x3);
122 writel(val
, va_address
);
124 writew(val
, va_address
+ (where
& 2));
126 writeb(val
, va_address
+ (where
& 3));
128 /* Disable DBI access */
129 disable_dbi_access(app_reg
);
132 #define PCI_FIND_CAP_TTL 48
134 static int pci_find_own_next_cap_ttl(struct spear_pcie_gadget_config
*config
,
135 u32 pos
, int cap
, int *ttl
)
140 spear_dbi_read_reg(config
, pos
, 1, &pos
);
144 spear_dbi_read_reg(config
, pos
+ PCI_CAP_LIST_ID
, 1, &id
);
149 pos
+= PCI_CAP_LIST_NEXT
;
154 static int pci_find_own_next_cap(struct spear_pcie_gadget_config
*config
,
157 int ttl
= PCI_FIND_CAP_TTL
;
159 return pci_find_own_next_cap_ttl(config
, pos
, cap
, &ttl
);
162 static int pci_find_own_cap_start(struct spear_pcie_gadget_config
*config
,
167 spear_dbi_read_reg(config
, PCI_STATUS
, 2, &status
);
168 if (!(status
& PCI_STATUS_CAP_LIST
))
172 case PCI_HEADER_TYPE_NORMAL
:
173 case PCI_HEADER_TYPE_BRIDGE
:
174 return PCI_CAPABILITY_LIST
;
175 case PCI_HEADER_TYPE_CARDBUS
:
176 return PCI_CB_CAPABILITY_LIST
;
185 * Tell if a device supports a given PCI capability.
186 * Returns the address of the requested capability structure within the
187 * device's PCI configuration space or 0 in case the device does not
188 * support it. Possible values for @cap:
190 * %PCI_CAP_ID_PM Power Management
191 * %PCI_CAP_ID_AGP Accelerated Graphics Port
192 * %PCI_CAP_ID_VPD Vital Product Data
193 * %PCI_CAP_ID_SLOTID Slot Identification
194 * %PCI_CAP_ID_MSI Message Signalled Interrupts
195 * %PCI_CAP_ID_CHSWP CompactPCI HotSwap
196 * %PCI_CAP_ID_PCIX PCI-X
197 * %PCI_CAP_ID_EXP PCI Express
199 static int pci_find_own_capability(struct spear_pcie_gadget_config
*config
,
205 spear_dbi_read_reg(config
, PCI_HEADER_TYPE
, 1, &hdr_type
);
207 pos
= pci_find_own_cap_start(config
, hdr_type
);
209 pos
= pci_find_own_next_cap(config
, pos
, cap
);
214 static irqreturn_t
spear_pcie_gadget_irq(int irq
, void *dev_id
)
220 * configfs interfaces show/store functions
222 static ssize_t
pcie_gadget_show_link(
223 struct spear_pcie_gadget_config
*config
,
226 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
228 if (readl(&app_reg
->app_status_1
) & ((u32
)1 << XMLH_LINK_UP_ID
))
229 return sprintf(buf
, "UP");
231 return sprintf(buf
, "DOWN");
234 static ssize_t
pcie_gadget_store_link(
235 struct spear_pcie_gadget_config
*config
,
236 const char *buf
, size_t count
)
238 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
240 if (sysfs_streq(buf
, "UP"))
241 writel(readl(&app_reg
->app_ctrl_0
) | (1 << APP_LTSSM_ENABLE_ID
),
242 &app_reg
->app_ctrl_0
);
243 else if (sysfs_streq(buf
, "DOWN"))
244 writel(readl(&app_reg
->app_ctrl_0
)
245 & ~(1 << APP_LTSSM_ENABLE_ID
),
246 &app_reg
->app_ctrl_0
);
252 static ssize_t
pcie_gadget_show_int_type(
253 struct spear_pcie_gadget_config
*config
,
256 return sprintf(buf
, "%s", config
->int_type
);
259 static ssize_t
pcie_gadget_store_int_type(
260 struct spear_pcie_gadget_config
*config
,
261 const char *buf
, size_t count
)
266 if (sysfs_streq(buf
, "INTA"))
267 spear_dbi_write_reg(config
, PCI_INTERRUPT_LINE
, 1, 1);
269 else if (sysfs_streq(buf
, "MSI")) {
270 vector
= config
->requested_msi
;
276 spear_dbi_write_reg(config
, PCI_INTERRUPT_LINE
, 1, 0);
277 cap
= pci_find_own_capability(config
, PCI_CAP_ID_MSI
);
278 spear_dbi_read_reg(config
, cap
+ PCI_MSI_FLAGS
, 1, &flags
);
279 flags
&= ~PCI_MSI_FLAGS_QMASK
;
281 spear_dbi_write_reg(config
, cap
+ PCI_MSI_FLAGS
, 1, flags
);
285 strcpy(config
->int_type
, buf
);
290 static ssize_t
pcie_gadget_show_no_of_msi(
291 struct spear_pcie_gadget_config
*config
,
294 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
298 if ((readl(&app_reg
->msg_status
) & (1 << CFG_MSI_EN_ID
))
299 != (1 << CFG_MSI_EN_ID
))
302 cap
= pci_find_own_capability(config
, PCI_CAP_ID_MSI
);
303 spear_dbi_read_reg(config
, cap
+ PCI_MSI_FLAGS
, 1, &flags
);
304 flags
&= ~PCI_MSI_FLAGS_QSIZE
;
310 config
->configured_msi
= vector
;
312 return sprintf(buf
, "%lu", vector
);
315 static ssize_t
pcie_gadget_store_no_of_msi(
316 struct spear_pcie_gadget_config
*config
,
317 const char *buf
, size_t count
)
321 ret
= kstrtoul(buf
, 0, &config
->requested_msi
);
325 if (config
->requested_msi
> 32)
326 config
->requested_msi
= 32;
331 static ssize_t
pcie_gadget_store_inta(
332 struct spear_pcie_gadget_config
*config
,
333 const char *buf
, size_t count
)
335 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
339 ret
= kstrtoul(buf
, 0, &en
);
344 writel(readl(&app_reg
->app_ctrl_0
) | (1 << SYS_INT_ID
),
345 &app_reg
->app_ctrl_0
);
347 writel(readl(&app_reg
->app_ctrl_0
) & ~(1 << SYS_INT_ID
),
348 &app_reg
->app_ctrl_0
);
353 static ssize_t
pcie_gadget_store_send_msi(
354 struct spear_pcie_gadget_config
*config
,
355 const char *buf
, size_t count
)
357 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
362 ret
= kstrtoul(buf
, 0, &vector
);
366 if (!config
->configured_msi
)
369 if (vector
>= config
->configured_msi
)
372 ven_msi
= readl(&app_reg
->ven_msi_1
);
373 ven_msi
&= ~VEN_MSI_FUN_NUM_MASK
;
374 ven_msi
|= 0 << VEN_MSI_FUN_NUM_ID
;
375 ven_msi
&= ~VEN_MSI_TC_MASK
;
376 ven_msi
|= 0 << VEN_MSI_TC_ID
;
377 ven_msi
&= ~VEN_MSI_VECTOR_MASK
;
378 ven_msi
|= vector
<< VEN_MSI_VECTOR_ID
;
380 /* generating interrupt for msi vector */
381 ven_msi
|= VEN_MSI_REQ_EN
;
382 writel(ven_msi
, &app_reg
->ven_msi_1
);
384 ven_msi
&= ~VEN_MSI_REQ_EN
;
385 writel(ven_msi
, &app_reg
->ven_msi_1
);
390 static ssize_t
pcie_gadget_show_vendor_id(
391 struct spear_pcie_gadget_config
*config
,
396 spear_dbi_read_reg(config
, PCI_VENDOR_ID
, 2, &id
);
398 return sprintf(buf
, "%x", id
);
401 static ssize_t
pcie_gadget_store_vendor_id(
402 struct spear_pcie_gadget_config
*config
,
403 const char *buf
, size_t count
)
408 ret
= kstrtoul(buf
, 0, &id
);
412 spear_dbi_write_reg(config
, PCI_VENDOR_ID
, 2, id
);
417 static ssize_t
pcie_gadget_show_device_id(
418 struct spear_pcie_gadget_config
*config
,
423 spear_dbi_read_reg(config
, PCI_DEVICE_ID
, 2, &id
);
425 return sprintf(buf
, "%x", id
);
428 static ssize_t
pcie_gadget_store_device_id(
429 struct spear_pcie_gadget_config
*config
,
430 const char *buf
, size_t count
)
435 ret
= kstrtoul(buf
, 0, &id
);
439 spear_dbi_write_reg(config
, PCI_DEVICE_ID
, 2, id
);
444 static ssize_t
pcie_gadget_show_bar0_size(
445 struct spear_pcie_gadget_config
*config
,
448 return sprintf(buf
, "%lx", config
->bar0_size
);
451 static ssize_t
pcie_gadget_store_bar0_size(
452 struct spear_pcie_gadget_config
*config
,
453 const char *buf
, size_t count
)
460 ret
= kstrtoul(buf
, 0, &size
);
464 /* min bar size is 256 */
467 /* max bar size is 1MB*/
468 else if (size
>= 0x100000)
474 pos
= find_next_bit((ulong
*)&size
, 21, pos
);
485 config
->bar0_size
= size
;
486 spear_dbi_write_reg(config
, PCIE_BAR0_MASK_REG
, 4, size
- 1);
491 static ssize_t
pcie_gadget_show_bar0_address(
492 struct spear_pcie_gadget_config
*config
,
495 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
497 u32 address
= readl(&app_reg
->pim0_mem_addr_start
);
499 return sprintf(buf
, "%x", address
);
502 static ssize_t
pcie_gadget_store_bar0_address(
503 struct spear_pcie_gadget_config
*config
,
504 const char *buf
, size_t count
)
506 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
510 ret
= kstrtoul(buf
, 0, &address
);
514 address
&= ~(config
->bar0_size
- 1);
515 if (config
->va_bar0_address
)
516 iounmap(config
->va_bar0_address
);
517 config
->va_bar0_address
= ioremap(address
, config
->bar0_size
);
518 if (!config
->va_bar0_address
)
521 writel(address
, &app_reg
->pim0_mem_addr_start
);
526 static ssize_t
pcie_gadget_show_bar0_rw_offset(
527 struct spear_pcie_gadget_config
*config
,
530 return sprintf(buf
, "%lx", config
->bar0_rw_offset
);
533 static ssize_t
pcie_gadget_store_bar0_rw_offset(
534 struct spear_pcie_gadget_config
*config
,
535 const char *buf
, size_t count
)
540 ret
= kstrtoul(buf
, 0, &offset
);
547 config
->bar0_rw_offset
= offset
;
552 static ssize_t
pcie_gadget_show_bar0_data(
553 struct spear_pcie_gadget_config
*config
,
558 if (!config
->va_bar0_address
)
561 data
= readl((ulong
)config
->va_bar0_address
+ config
->bar0_rw_offset
);
563 return sprintf(buf
, "%lx", data
);
566 static ssize_t
pcie_gadget_store_bar0_data(
567 struct spear_pcie_gadget_config
*config
,
568 const char *buf
, size_t count
)
573 ret
= kstrtoul(buf
, 0, &data
);
577 if (!config
->va_bar0_address
)
580 writel(data
, (ulong
)config
->va_bar0_address
+ config
->bar0_rw_offset
);
586 * Attribute definitions.
589 #define PCIE_GADGET_TARGET_ATTR_RO(_name) \
590 static struct pcie_gadget_target_attr pcie_gadget_target_##_name = \
591 __CONFIGFS_ATTR(_name, S_IRUGO, pcie_gadget_show_##_name, NULL)
593 #define PCIE_GADGET_TARGET_ATTR_WO(_name) \
594 static struct pcie_gadget_target_attr pcie_gadget_target_##_name = \
595 __CONFIGFS_ATTR(_name, S_IWUSR, NULL, pcie_gadget_store_##_name)
597 #define PCIE_GADGET_TARGET_ATTR_RW(_name) \
598 static struct pcie_gadget_target_attr pcie_gadget_target_##_name = \
599 __CONFIGFS_ATTR(_name, S_IRUGO | S_IWUSR, pcie_gadget_show_##_name, \
600 pcie_gadget_store_##_name)
601 PCIE_GADGET_TARGET_ATTR_RW(link
);
602 PCIE_GADGET_TARGET_ATTR_RW(int_type
);
603 PCIE_GADGET_TARGET_ATTR_RW(no_of_msi
);
604 PCIE_GADGET_TARGET_ATTR_WO(inta
);
605 PCIE_GADGET_TARGET_ATTR_WO(send_msi
);
606 PCIE_GADGET_TARGET_ATTR_RW(vendor_id
);
607 PCIE_GADGET_TARGET_ATTR_RW(device_id
);
608 PCIE_GADGET_TARGET_ATTR_RW(bar0_size
);
609 PCIE_GADGET_TARGET_ATTR_RW(bar0_address
);
610 PCIE_GADGET_TARGET_ATTR_RW(bar0_rw_offset
);
611 PCIE_GADGET_TARGET_ATTR_RW(bar0_data
);
613 static struct configfs_attribute
*pcie_gadget_target_attrs
[] = {
614 &pcie_gadget_target_link
.attr
,
615 &pcie_gadget_target_int_type
.attr
,
616 &pcie_gadget_target_no_of_msi
.attr
,
617 &pcie_gadget_target_inta
.attr
,
618 &pcie_gadget_target_send_msi
.attr
,
619 &pcie_gadget_target_vendor_id
.attr
,
620 &pcie_gadget_target_device_id
.attr
,
621 &pcie_gadget_target_bar0_size
.attr
,
622 &pcie_gadget_target_bar0_address
.attr
,
623 &pcie_gadget_target_bar0_rw_offset
.attr
,
624 &pcie_gadget_target_bar0_data
.attr
,
628 static struct pcie_gadget_target
*to_target(struct config_item
*item
)
631 container_of(to_configfs_subsystem(to_config_group(item
)),
632 struct pcie_gadget_target
, subsys
) : NULL
;
636 * Item operations and type for pcie_gadget_target.
639 static ssize_t
pcie_gadget_target_attr_show(struct config_item
*item
,
640 struct configfs_attribute
*attr
,
643 ssize_t ret
= -EINVAL
;
644 struct pcie_gadget_target
*target
= to_target(item
);
645 struct pcie_gadget_target_attr
*t_attr
=
646 container_of(attr
, struct pcie_gadget_target_attr
, attr
);
649 ret
= t_attr
->show(&target
->config
, buf
);
653 static ssize_t
pcie_gadget_target_attr_store(struct config_item
*item
,
654 struct configfs_attribute
*attr
,
658 ssize_t ret
= -EINVAL
;
659 struct pcie_gadget_target
*target
= to_target(item
);
660 struct pcie_gadget_target_attr
*t_attr
=
661 container_of(attr
, struct pcie_gadget_target_attr
, attr
);
664 ret
= t_attr
->store(&target
->config
, buf
, count
);
668 static struct configfs_item_operations pcie_gadget_target_item_ops
= {
669 .show_attribute
= pcie_gadget_target_attr_show
,
670 .store_attribute
= pcie_gadget_target_attr_store
,
673 static struct config_item_type pcie_gadget_target_type
= {
674 .ct_attrs
= pcie_gadget_target_attrs
,
675 .ct_item_ops
= &pcie_gadget_target_item_ops
,
676 .ct_owner
= THIS_MODULE
,
679 static void spear13xx_pcie_device_init(struct spear_pcie_gadget_config
*config
)
681 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
683 /*setup registers for outbound translation */
685 writel(config
->base
, &app_reg
->in0_mem_addr_start
);
686 writel(app_reg
->in0_mem_addr_start
+ IN0_MEM_SIZE
,
687 &app_reg
->in0_mem_addr_limit
);
688 writel(app_reg
->in0_mem_addr_limit
+ 1, &app_reg
->in1_mem_addr_start
);
689 writel(app_reg
->in1_mem_addr_start
+ IN1_MEM_SIZE
,
690 &app_reg
->in1_mem_addr_limit
);
691 writel(app_reg
->in1_mem_addr_limit
+ 1, &app_reg
->in_io_addr_start
);
692 writel(app_reg
->in_io_addr_start
+ IN_IO_SIZE
,
693 &app_reg
->in_io_addr_limit
);
694 writel(app_reg
->in_io_addr_limit
+ 1, &app_reg
->in_cfg0_addr_start
);
695 writel(app_reg
->in_cfg0_addr_start
+ IN_CFG0_SIZE
,
696 &app_reg
->in_cfg0_addr_limit
);
697 writel(app_reg
->in_cfg0_addr_limit
+ 1, &app_reg
->in_cfg1_addr_start
);
698 writel(app_reg
->in_cfg1_addr_start
+ IN_CFG1_SIZE
,
699 &app_reg
->in_cfg1_addr_limit
);
700 writel(app_reg
->in_cfg1_addr_limit
+ 1, &app_reg
->in_msg_addr_start
);
701 writel(app_reg
->in_msg_addr_start
+ IN_MSG_SIZE
,
702 &app_reg
->in_msg_addr_limit
);
704 writel(app_reg
->in0_mem_addr_start
, &app_reg
->pom0_mem_addr_start
);
705 writel(app_reg
->in1_mem_addr_start
, &app_reg
->pom1_mem_addr_start
);
706 writel(app_reg
->in_io_addr_start
, &app_reg
->pom_io_addr_start
);
708 /*setup registers for inbound translation */
710 /* Keep AORAM mapped at BAR0 as default */
711 config
->bar0_size
= INBOUND_ADDR_MASK
+ 1;
712 spear_dbi_write_reg(config
, PCIE_BAR0_MASK_REG
, 4, INBOUND_ADDR_MASK
);
713 spear_dbi_write_reg(config
, PCI_BASE_ADDRESS_0
, 4, 0xC);
714 config
->va_bar0_address
= ioremap(SPEAR13XX_SYSRAM1_BASE
,
717 writel(SPEAR13XX_SYSRAM1_BASE
, &app_reg
->pim0_mem_addr_start
);
718 writel(0, &app_reg
->pim1_mem_addr_start
);
719 writel(INBOUND_ADDR_MASK
+ 1, &app_reg
->mem0_addr_offset_limit
);
721 writel(0x0, &app_reg
->pim_io_addr_start
);
722 writel(0x0, &app_reg
->pim_io_addr_start
);
723 writel(0x0, &app_reg
->pim_rom_addr_start
);
725 writel(DEVICE_TYPE_EP
| (1 << MISCTRL_EN_ID
)
726 | ((u32
)1 << REG_TRANSLATION_ENABLE
),
727 &app_reg
->app_ctrl_0
);
728 /* disable all rx interrupts */
729 writel(0, &app_reg
->int_mask
);
731 /* Select INTA as default*/
732 spear_dbi_write_reg(config
, PCI_INTERRUPT_LINE
, 1, 1);
735 static int spear_pcie_gadget_probe(struct platform_device
*pdev
)
737 struct resource
*res0
, *res1
;
738 unsigned int status
= 0;
741 static struct pcie_gadget_target
*target
;
742 struct spear_pcie_gadget_config
*config
;
743 struct config_item
*cg_item
;
744 struct configfs_subsystem
*subsys
;
746 /* get resource for application registers*/
748 res0
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
750 dev_err(&pdev
->dev
, "no resource defined\n");
753 if (!request_mem_region(res0
->start
, resource_size(res0
),
755 dev_err(&pdev
->dev
, "pcie gadget region already claimed\n");
758 /* get resource for dbi registers*/
760 res1
= platform_get_resource(pdev
, IORESOURCE_MEM
, 1);
762 dev_err(&pdev
->dev
, "no resource defined\n");
765 if (!request_mem_region(res1
->start
, resource_size(res1
),
767 dev_err(&pdev
->dev
, "pcie gadget region already claimed\n");
771 target
= kzalloc(sizeof(*target
), GFP_KERNEL
);
773 dev_err(&pdev
->dev
, "out of memory\n");
778 cg_item
= &target
->subsys
.su_group
.cg_item
;
779 sprintf(cg_item
->ci_namebuf
, "pcie_gadget.%d", pdev
->id
);
780 cg_item
->ci_type
= &pcie_gadget_target_type
;
781 config
= &target
->config
;
782 config
->va_app_base
= (void __iomem
*)ioremap(res0
->start
,
783 resource_size(res0
));
784 if (!config
->va_app_base
) {
785 dev_err(&pdev
->dev
, "ioremap fail\n");
790 config
->base
= (void __iomem
*)res1
->start
;
792 config
->va_dbi_base
= (void __iomem
*)ioremap(res1
->start
,
793 resource_size(res1
));
794 if (!config
->va_dbi_base
) {
795 dev_err(&pdev
->dev
, "ioremap fail\n");
797 goto err_iounmap_app
;
800 platform_set_drvdata(pdev
, target
);
802 irq
= platform_get_irq(pdev
, 0);
804 dev_err(&pdev
->dev
, "no update irq?\n");
809 status
= request_irq(irq
, spear_pcie_gadget_irq
, 0, pdev
->name
, NULL
);
812 "pcie gadget interrupt IRQ%d already claimed\n", irq
);
816 /* Register configfs hooks */
817 subsys
= &target
->subsys
;
818 config_group_init(&subsys
->su_group
);
819 mutex_init(&subsys
->su_mutex
);
820 status
= configfs_register_subsystem(subsys
);
825 * init basic pcie application registers
826 * do not enable clock if it is PCIE0.Ideally , all controller should
827 * have been independent from others with respect to clock. But PCIE1
828 * and 2 depends on PCIE0.So PCIE0 clk is provided during board init.
832 * Ideally CFG Clock should have been also enabled here. But
833 * it is done currently during board init routne
835 clk
= clk_get_sys("pcie1", NULL
);
837 pr_err("%s:couldn't get clk for pcie1\n", __func__
);
838 status
= PTR_ERR(clk
);
841 status
= clk_enable(clk
);
843 pr_err("%s:couldn't enable clk for pcie1\n", __func__
);
846 } else if (pdev
->id
== 2) {
848 * Ideally CFG Clock should have been also enabled here. But
849 * it is done currently during board init routne
851 clk
= clk_get_sys("pcie2", NULL
);
853 pr_err("%s:couldn't get clk for pcie2\n", __func__
);
854 status
= PTR_ERR(clk
);
857 status
= clk_enable(clk
);
859 pr_err("%s:couldn't enable clk for pcie2\n", __func__
);
863 spear13xx_pcie_device_init(config
);
869 iounmap(config
->va_dbi_base
);
871 iounmap(config
->va_app_base
);
875 release_mem_region(res1
->start
, resource_size(res1
));
877 release_mem_region(res0
->start
, resource_size(res0
));
881 static int spear_pcie_gadget_remove(struct platform_device
*pdev
)
883 struct resource
*res0
, *res1
;
884 static struct pcie_gadget_target
*target
;
885 struct spear_pcie_gadget_config
*config
;
888 res0
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
889 res1
= platform_get_resource(pdev
, IORESOURCE_MEM
, 1);
890 irq
= platform_get_irq(pdev
, 0);
891 target
= platform_get_drvdata(pdev
);
892 config
= &target
->config
;
895 iounmap(config
->va_dbi_base
);
896 iounmap(config
->va_app_base
);
897 release_mem_region(res1
->start
, resource_size(res1
));
898 release_mem_region(res0
->start
, resource_size(res0
));
899 configfs_unregister_subsystem(&target
->subsys
);
905 static void spear_pcie_gadget_shutdown(struct platform_device
*pdev
)
909 static struct platform_driver spear_pcie_gadget_driver
= {
910 .probe
= spear_pcie_gadget_probe
,
911 .remove
= spear_pcie_gadget_remove
,
912 .shutdown
= spear_pcie_gadget_shutdown
,
914 .name
= "pcie-gadget-spear",
915 .bus
= &platform_bus_type
919 module_platform_driver(spear_pcie_gadget_driver
);
921 MODULE_ALIAS("platform:pcie-gadget-spear");
922 MODULE_AUTHOR("Pratyush Anand");
923 MODULE_LICENSE("GPL");