2 * drivers/misc/spear13xx_pcie_gadget.c
4 * Copyright (C) 2010 ST Microelectronics
5 * Pratyush Anand<pratyush.anand@gmail.com>
7 * This file is licensed under the terms of the GNU General Public
8 * License version 2. This program is licensed "as is" without any
9 * warranty of any kind, whether express or implied.
12 #include <linux/device.h>
13 #include <linux/clk.h>
14 #include <linux/slab.h>
15 #include <linux/delay.h>
17 #include <linux/interrupt.h>
18 #include <linux/irq.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/platform_device.h>
22 #include <linux/pci_regs.h>
23 #include <linux/configfs.h>
24 #include <mach/pcie.h>
25 #include <mach/misc_regs.h>
27 #define IN0_MEM_SIZE (200 * 1024 * 1024 - 1)
28 /* In current implementation address translation is done using IN0 only.
29 * So IN1 start address and IN0 end address has been kept same
31 #define IN1_MEM_SIZE (0 * 1024 * 1024 - 1)
32 #define IN_IO_SIZE (20 * 1024 * 1024 - 1)
33 #define IN_CFG0_SIZE (12 * 1024 * 1024 - 1)
34 #define IN_CFG1_SIZE (12 * 1024 * 1024 - 1)
35 #define IN_MSG_SIZE (12 * 1024 * 1024 - 1)
36 /* Keep default BAR size as 4K*/
37 /* AORAM would be mapped by default*/
38 #define INBOUND_ADDR_MASK (SPEAR13XX_SYSRAM1_SIZE - 1)
40 #define INT_TYPE_NO_INT 0
41 #define INT_TYPE_INTX 1
42 #define INT_TYPE_MSI 2
43 struct spear_pcie_gadget_config
{
45 void __iomem
*va_app_base
;
46 void __iomem
*va_dbi_base
;
52 void __iomem
*va_bar0_address
;
55 struct pcie_gadget_target
{
56 struct configfs_subsystem subsys
;
57 struct spear_pcie_gadget_config config
;
60 struct pcie_gadget_target_attr
{
61 struct configfs_attribute attr
;
62 ssize_t (*show
)(struct spear_pcie_gadget_config
*config
,
64 ssize_t (*store
)(struct spear_pcie_gadget_config
*config
,
69 static void enable_dbi_access(struct pcie_app_reg __iomem
*app_reg
)
71 /* Enable DBI access */
72 writel(readl(&app_reg
->slv_armisc
) | (1 << AXI_OP_DBI_ACCESS_ID
),
73 &app_reg
->slv_armisc
);
74 writel(readl(&app_reg
->slv_awmisc
) | (1 << AXI_OP_DBI_ACCESS_ID
),
75 &app_reg
->slv_awmisc
);
79 static void disable_dbi_access(struct pcie_app_reg __iomem
*app_reg
)
81 /* disable DBI access */
82 writel(readl(&app_reg
->slv_armisc
) & ~(1 << AXI_OP_DBI_ACCESS_ID
),
83 &app_reg
->slv_armisc
);
84 writel(readl(&app_reg
->slv_awmisc
) & ~(1 << AXI_OP_DBI_ACCESS_ID
),
85 &app_reg
->slv_awmisc
);
89 static void spear_dbi_read_reg(struct spear_pcie_gadget_config
*config
,
90 int where
, int size
, u32
*val
)
92 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
95 /* Enable DBI access */
96 enable_dbi_access(app_reg
);
98 va_address
= (ulong
)config
->va_dbi_base
+ (where
& ~0x3);
100 *val
= readl(va_address
);
103 *val
= (*val
>> (8 * (where
& 3))) & 0xff;
105 *val
= (*val
>> (8 * (where
& 3))) & 0xffff;
107 /* Disable DBI access */
108 disable_dbi_access(app_reg
);
111 static void spear_dbi_write_reg(struct spear_pcie_gadget_config
*config
,
112 int where
, int size
, u32 val
)
114 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
117 /* Enable DBI access */
118 enable_dbi_access(app_reg
);
120 va_address
= (ulong
)config
->va_dbi_base
+ (where
& ~0x3);
123 writel(val
, va_address
);
125 writew(val
, va_address
+ (where
& 2));
127 writeb(val
, va_address
+ (where
& 3));
129 /* Disable DBI access */
130 disable_dbi_access(app_reg
);
133 #define PCI_FIND_CAP_TTL 48
135 static int pci_find_own_next_cap_ttl(struct spear_pcie_gadget_config
*config
,
136 u32 pos
, int cap
, int *ttl
)
141 spear_dbi_read_reg(config
, pos
, 1, &pos
);
145 spear_dbi_read_reg(config
, pos
+ PCI_CAP_LIST_ID
, 1, &id
);
150 pos
+= PCI_CAP_LIST_NEXT
;
155 static int pci_find_own_next_cap(struct spear_pcie_gadget_config
*config
,
158 int ttl
= PCI_FIND_CAP_TTL
;
160 return pci_find_own_next_cap_ttl(config
, pos
, cap
, &ttl
);
163 static int pci_find_own_cap_start(struct spear_pcie_gadget_config
*config
,
168 spear_dbi_read_reg(config
, PCI_STATUS
, 2, &status
);
169 if (!(status
& PCI_STATUS_CAP_LIST
))
173 case PCI_HEADER_TYPE_NORMAL
:
174 case PCI_HEADER_TYPE_BRIDGE
:
175 return PCI_CAPABILITY_LIST
;
176 case PCI_HEADER_TYPE_CARDBUS
:
177 return PCI_CB_CAPABILITY_LIST
;
186 * Tell if a device supports a given PCI capability.
187 * Returns the address of the requested capability structure within the
188 * device's PCI configuration space or 0 in case the device does not
189 * support it. Possible values for @cap:
191 * %PCI_CAP_ID_PM Power Management
192 * %PCI_CAP_ID_AGP Accelerated Graphics Port
193 * %PCI_CAP_ID_VPD Vital Product Data
194 * %PCI_CAP_ID_SLOTID Slot Identification
195 * %PCI_CAP_ID_MSI Message Signalled Interrupts
196 * %PCI_CAP_ID_CHSWP CompactPCI HotSwap
197 * %PCI_CAP_ID_PCIX PCI-X
198 * %PCI_CAP_ID_EXP PCI Express
200 static int pci_find_own_capability(struct spear_pcie_gadget_config
*config
,
206 spear_dbi_read_reg(config
, PCI_HEADER_TYPE
, 1, &hdr_type
);
208 pos
= pci_find_own_cap_start(config
, hdr_type
);
210 pos
= pci_find_own_next_cap(config
, pos
, cap
);
215 static irqreturn_t
spear_pcie_gadget_irq(int irq
, void *dev_id
)
221 * configfs interfaces show/store functions
224 static struct pcie_gadget_target
*to_target(struct config_item
*item
)
227 container_of(to_configfs_subsystem(to_config_group(item
)),
228 struct pcie_gadget_target
, subsys
) : NULL
;
231 static ssize_t
pcie_gadget_link_show(struct config_item
*item
, char *buf
)
233 struct pcie_app_reg __iomem
*app_reg
= to_target(item
)->va_app_base
;
235 if (readl(&app_reg
->app_status_1
) & ((u32
)1 << XMLH_LINK_UP_ID
))
236 return sprintf(buf
, "UP");
238 return sprintf(buf
, "DOWN");
241 static ssize_t
pcie_gadget_link_store(struct config_item
*item
,
242 const char *buf
, size_t count
)
244 struct pcie_app_reg __iomem
*app_reg
= to_target(item
)->va_app_base
;
246 if (sysfs_streq(buf
, "UP"))
247 writel(readl(&app_reg
->app_ctrl_0
) | (1 << APP_LTSSM_ENABLE_ID
),
248 &app_reg
->app_ctrl_0
);
249 else if (sysfs_streq(buf
, "DOWN"))
250 writel(readl(&app_reg
->app_ctrl_0
)
251 & ~(1 << APP_LTSSM_ENABLE_ID
),
252 &app_reg
->app_ctrl_0
);
258 static ssize_t
pcie_gadget_int_type_show(struct config_item
*item
, char *buf
)
260 return sprintf(buf
, "%s", to_target(item
)->int_type
);
263 static ssize_t
pcie_gadget_int_type_store(struct config_item
*item
,
264 const char *buf
, size_t count
)
266 struct spear_pcie_gadget_config
*config
= to_target(item
)
270 if (sysfs_streq(buf
, "INTA"))
271 spear_dbi_write_reg(config
, PCI_INTERRUPT_LINE
, 1, 1);
273 else if (sysfs_streq(buf
, "MSI")) {
274 vector
= config
->requested_msi
;
280 spear_dbi_write_reg(config
, PCI_INTERRUPT_LINE
, 1, 0);
281 cap
= pci_find_own_capability(config
, PCI_CAP_ID_MSI
);
282 spear_dbi_read_reg(config
, cap
+ PCI_MSI_FLAGS
, 1, &flags
);
283 flags
&= ~PCI_MSI_FLAGS_QMASK
;
285 spear_dbi_write_reg(config
, cap
+ PCI_MSI_FLAGS
, 1, flags
);
289 strcpy(config
->int_type
, buf
);
294 static ssize_t
pcie_gadget_no_of_msi_show(struct config_item
*item
, char *buf
)
296 struct spear_pcie_gadget_config
*config
= to_target(item
)
297 struct pcie_app_reg __iomem
*app_reg
= to_target(item
)->va_app_base
;
301 if ((readl(&app_reg
->msg_status
) & (1 << CFG_MSI_EN_ID
))
302 != (1 << CFG_MSI_EN_ID
))
305 cap
= pci_find_own_capability(config
, PCI_CAP_ID_MSI
);
306 spear_dbi_read_reg(config
, cap
+ PCI_MSI_FLAGS
, 1, &flags
);
307 flags
&= ~PCI_MSI_FLAGS_QSIZE
;
313 config
->configured_msi
= vector
;
315 return sprintf(buf
, "%lu", vector
);
318 static ssize_t
pcie_gadget_no_of_msi_store(struct config_item
*item
,
319 const char *buf
, size_t count
)
323 ret
= kstrtoul(buf
, 0, &to_target(item
)->requested_msi
);
327 if (config
->requested_msi
> 32)
328 config
->requested_msi
= 32;
333 static ssize_t
pcie_gadget_inta_store(struct config_item
*item
,
334 const char *buf
, size_t count
)
336 struct pcie_app_reg __iomem
*app_reg
= to_target(item
)->va_app_base
;
340 ret
= kstrtoul(buf
, 0, &en
);
345 writel(readl(&app_reg
->app_ctrl_0
) | (1 << SYS_INT_ID
),
346 &app_reg
->app_ctrl_0
);
348 writel(readl(&app_reg
->app_ctrl_0
) & ~(1 << SYS_INT_ID
),
349 &app_reg
->app_ctrl_0
);
354 static ssize_t
pcie_gadget_send_msi_store(struct config_item
*item
,
355 const char *buf
, size_t count
)
357 struct spear_pcie_gadget_config
*config
= to_target(item
)
358 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
363 ret
= kstrtoul(buf
, 0, &vector
);
367 if (!config
->configured_msi
)
370 if (vector
>= config
->configured_msi
)
373 ven_msi
= readl(&app_reg
->ven_msi_1
);
374 ven_msi
&= ~VEN_MSI_FUN_NUM_MASK
;
375 ven_msi
|= 0 << VEN_MSI_FUN_NUM_ID
;
376 ven_msi
&= ~VEN_MSI_TC_MASK
;
377 ven_msi
|= 0 << VEN_MSI_TC_ID
;
378 ven_msi
&= ~VEN_MSI_VECTOR_MASK
;
379 ven_msi
|= vector
<< VEN_MSI_VECTOR_ID
;
381 /* generating interrupt for msi vector */
382 ven_msi
|= VEN_MSI_REQ_EN
;
383 writel(ven_msi
, &app_reg
->ven_msi_1
);
385 ven_msi
&= ~VEN_MSI_REQ_EN
;
386 writel(ven_msi
, &app_reg
->ven_msi_1
);
391 static ssize_t
pcie_gadget_vendor_id_show(struct config_item
*item
, char *buf
)
395 spear_dbi_read_reg(to_target(item
), PCI_VENDOR_ID
, 2, &id
);
397 return sprintf(buf
, "%x", id
);
400 static ssize_t
pcie_gadget_vendor_id_store(struct config_item
*item
,
401 const char *buf
, size_t count
)
406 ret
= kstrtoul(buf
, 0, &id
);
410 spear_dbi_write_reg(to_target(item
), PCI_VENDOR_ID
, 2, id
);
415 static ssize_t
pcie_gadget_device_id_show(struct config_item
*item
, char *buf
)
419 spear_dbi_read_reg(to_target(item
), PCI_DEVICE_ID
, 2, &id
);
421 return sprintf(buf
, "%x", id
);
424 static ssize_t
pcie_gadget_device_id_store(struct config_item
*item
,
425 const char *buf
, size_t count
)
430 ret
= kstrtoul(buf
, 0, &id
);
434 spear_dbi_write_reg(to_target(item
), PCI_DEVICE_ID
, 2, id
);
439 static ssize_t
pcie_gadget_bar0_size_show(struct config_item
*item
, char *buf
)
441 return sprintf(buf
, "%lx", to_target(item
)->bar0_size
);
444 static ssize_t
pcie_gadget_bar0_size_store(struct config_item
*item
,
445 const char *buf
, size_t count
)
447 struct spear_pcie_gadget_config
*config
= to_target(item
)
453 ret
= kstrtoul(buf
, 0, &size
);
457 /* min bar size is 256 */
460 /* max bar size is 1MB*/
461 else if (size
>= 0x100000)
467 pos
= find_next_bit((ulong
*)&size
, 21, pos
);
478 config
->bar0_size
= size
;
479 spear_dbi_write_reg(config
, PCIE_BAR0_MASK_REG
, 4, size
- 1);
484 static ssize_t
pcie_gadget_bar0_address_show(struct config_item
*item
,
487 struct pcie_app_reg __iomem
*app_reg
= to_target(item
)->va_app_base
;
489 u32 address
= readl(&app_reg
->pim0_mem_addr_start
);
491 return sprintf(buf
, "%x", address
);
494 static ssize_t
pcie_gadget_bar0_address_store(struct config_item
*item
,
495 const char *buf
, size_t count
)
497 struct spear_pcie_gadget_config
*config
= to_target(item
)
498 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
502 ret
= kstrtoul(buf
, 0, &address
);
506 address
&= ~(config
->bar0_size
- 1);
507 if (config
->va_bar0_address
)
508 iounmap(config
->va_bar0_address
);
509 config
->va_bar0_address
= ioremap(address
, config
->bar0_size
);
510 if (!config
->va_bar0_address
)
513 writel(address
, &app_reg
->pim0_mem_addr_start
);
518 static ssize_t
pcie_gadget_bar0_rw_offset_show(struct config_item
*item
,
521 return sprintf(buf
, "%lx", to_target(item
)->bar0_rw_offset
);
524 static ssize_t
pcie_gadget_bar0_rw_offset_store(struct config_item
*item
,
525 const char *buf
, size_t count
)
530 ret
= kstrtoul(buf
, 0, &offset
);
537 to_target(item
)->bar0_rw_offset
= offset
;
542 static ssize_t
pcie_gadget_bar0_data_show(struct config_item
*item
, char *buf
)
544 struct spear_pcie_gadget_config
*config
= to_target(item
)
547 if (!config
->va_bar0_address
)
550 data
= readl((ulong
)config
->va_bar0_address
+ config
->bar0_rw_offset
);
552 return sprintf(buf
, "%lx", data
);
555 static ssize_t
pcie_gadget_bar0_data_store(struct config_item
*item
,
556 const char *buf
, size_t count
)
558 struct spear_pcie_gadget_config
*config
= to_target(item
)
562 ret
= kstrtoul(buf
, 0, &data
);
566 if (!config
->va_bar0_address
)
569 writel(data
, (ulong
)config
->va_bar0_address
+ config
->bar0_rw_offset
);
574 CONFIGFS_ATTR(pcie_gadget_
, link
);
575 CONFIGFS_ATTR(pcie_gadget_
, int_type
);
576 CONFIGFS_ATTR(pcie_gadget_
, no_of_msi
);
577 CONFIGFS_ATTR_WO(pcie_gadget_
, inta
);
578 CONFIGFS_ATTR_WO(pcie_gadget_
, send_msi
);
579 CONFIGFS_ATTR(pcie_gadget_
, vendor_id
);
580 CONFIGFS_ATTR(pcie_gadget_
, device_id
);
581 CONFIGFS_ATTR(pcie_gadget_
, bar0_size
);
582 CONFIGFS_ATTR(pcie_gadget_
, bar0_address
);
583 CONFIGFS_ATTR(pcie_gadget_
, bar0_rw_offset
);
584 CONFIGFS_ATTR(pcie_gadget_
, bar0_data
);
586 static struct configfs_attribute
*pcie_gadget_target_attrs
[] = {
587 &pcie_gadget_attr_link
,
588 &pcie_gadget_attr_int_type
,
589 &pcie_gadget_attr_no_of_msi
,
590 &pcie_gadget_attr_inta
,
591 &pcie_gadget_attr_send_msi
,
592 &pcie_gadget_attr_vendor_id
,
593 &pcie_gadget_attr_device_id
,
594 &pcie_gadget_attr_bar0_size
,
595 &pcie_gadget_attr_bar0_address
,
596 &pcie_gadget_attr_bar0_rw_offset
,
597 &pcie_gadget_attr_bar0_data
,
601 static struct config_item_type pcie_gadget_target_type
= {
602 .ct_attrs
= pcie_gadget_target_attrs
,
603 .ct_owner
= THIS_MODULE
,
606 static void spear13xx_pcie_device_init(struct spear_pcie_gadget_config
*config
)
608 struct pcie_app_reg __iomem
*app_reg
= config
->va_app_base
;
610 /*setup registers for outbound translation */
612 writel(config
->base
, &app_reg
->in0_mem_addr_start
);
613 writel(app_reg
->in0_mem_addr_start
+ IN0_MEM_SIZE
,
614 &app_reg
->in0_mem_addr_limit
);
615 writel(app_reg
->in0_mem_addr_limit
+ 1, &app_reg
->in1_mem_addr_start
);
616 writel(app_reg
->in1_mem_addr_start
+ IN1_MEM_SIZE
,
617 &app_reg
->in1_mem_addr_limit
);
618 writel(app_reg
->in1_mem_addr_limit
+ 1, &app_reg
->in_io_addr_start
);
619 writel(app_reg
->in_io_addr_start
+ IN_IO_SIZE
,
620 &app_reg
->in_io_addr_limit
);
621 writel(app_reg
->in_io_addr_limit
+ 1, &app_reg
->in_cfg0_addr_start
);
622 writel(app_reg
->in_cfg0_addr_start
+ IN_CFG0_SIZE
,
623 &app_reg
->in_cfg0_addr_limit
);
624 writel(app_reg
->in_cfg0_addr_limit
+ 1, &app_reg
->in_cfg1_addr_start
);
625 writel(app_reg
->in_cfg1_addr_start
+ IN_CFG1_SIZE
,
626 &app_reg
->in_cfg1_addr_limit
);
627 writel(app_reg
->in_cfg1_addr_limit
+ 1, &app_reg
->in_msg_addr_start
);
628 writel(app_reg
->in_msg_addr_start
+ IN_MSG_SIZE
,
629 &app_reg
->in_msg_addr_limit
);
631 writel(app_reg
->in0_mem_addr_start
, &app_reg
->pom0_mem_addr_start
);
632 writel(app_reg
->in1_mem_addr_start
, &app_reg
->pom1_mem_addr_start
);
633 writel(app_reg
->in_io_addr_start
, &app_reg
->pom_io_addr_start
);
635 /*setup registers for inbound translation */
637 /* Keep AORAM mapped at BAR0 as default */
638 config
->bar0_size
= INBOUND_ADDR_MASK
+ 1;
639 spear_dbi_write_reg(config
, PCIE_BAR0_MASK_REG
, 4, INBOUND_ADDR_MASK
);
640 spear_dbi_write_reg(config
, PCI_BASE_ADDRESS_0
, 4, 0xC);
641 config
->va_bar0_address
= ioremap(SPEAR13XX_SYSRAM1_BASE
,
644 writel(SPEAR13XX_SYSRAM1_BASE
, &app_reg
->pim0_mem_addr_start
);
645 writel(0, &app_reg
->pim1_mem_addr_start
);
646 writel(INBOUND_ADDR_MASK
+ 1, &app_reg
->mem0_addr_offset_limit
);
648 writel(0x0, &app_reg
->pim_io_addr_start
);
649 writel(0x0, &app_reg
->pim_io_addr_start
);
650 writel(0x0, &app_reg
->pim_rom_addr_start
);
652 writel(DEVICE_TYPE_EP
| (1 << MISCTRL_EN_ID
)
653 | ((u32
)1 << REG_TRANSLATION_ENABLE
),
654 &app_reg
->app_ctrl_0
);
655 /* disable all rx interrupts */
656 writel(0, &app_reg
->int_mask
);
658 /* Select INTA as default*/
659 spear_dbi_write_reg(config
, PCI_INTERRUPT_LINE
, 1, 1);
662 static int spear_pcie_gadget_probe(struct platform_device
*pdev
)
664 struct resource
*res0
, *res1
;
665 unsigned int status
= 0;
668 static struct pcie_gadget_target
*target
;
669 struct spear_pcie_gadget_config
*config
;
670 struct config_item
*cg_item
;
671 struct configfs_subsystem
*subsys
;
673 target
= devm_kzalloc(&pdev
->dev
, sizeof(*target
), GFP_KERNEL
);
675 dev_err(&pdev
->dev
, "out of memory\n");
679 cg_item
= &target
->subsys
.su_group
.cg_item
;
680 sprintf(cg_item
->ci_namebuf
, "pcie_gadget.%d", pdev
->id
);
681 cg_item
->ci_type
= &pcie_gadget_target_type
;
682 config
= &target
->config
;
684 /* get resource for application registers*/
685 res0
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
686 config
->va_app_base
= devm_ioremap_resource(&pdev
->dev
, res0
);
687 if (IS_ERR(config
->va_app_base
)) {
688 dev_err(&pdev
->dev
, "ioremap fail\n");
689 return PTR_ERR(config
->va_app_base
);
692 /* get resource for dbi registers*/
693 res1
= platform_get_resource(pdev
, IORESOURCE_MEM
, 1);
694 config
->base
= (void __iomem
*)res1
->start
;
696 config
->va_dbi_base
= devm_ioremap_resource(&pdev
->dev
, res1
);
697 if (IS_ERR(config
->va_dbi_base
)) {
698 dev_err(&pdev
->dev
, "ioremap fail\n");
699 return PTR_ERR(config
->va_dbi_base
);
702 platform_set_drvdata(pdev
, target
);
704 irq
= platform_get_irq(pdev
, 0);
706 dev_err(&pdev
->dev
, "no update irq?\n");
710 status
= devm_request_irq(&pdev
->dev
, irq
, spear_pcie_gadget_irq
,
711 0, pdev
->name
, NULL
);
714 "pcie gadget interrupt IRQ%d already claimed\n", irq
);
718 /* Register configfs hooks */
719 subsys
= &target
->subsys
;
720 config_group_init(&subsys
->su_group
);
721 mutex_init(&subsys
->su_mutex
);
722 status
= configfs_register_subsystem(subsys
);
727 * init basic pcie application registers
728 * do not enable clock if it is PCIE0.Ideally , all controller should
729 * have been independent from others with respect to clock. But PCIE1
730 * and 2 depends on PCIE0.So PCIE0 clk is provided during board init.
734 * Ideally CFG Clock should have been also enabled here. But
735 * it is done currently during board init routne
737 clk
= clk_get_sys("pcie1", NULL
);
739 pr_err("%s:couldn't get clk for pcie1\n", __func__
);
742 status
= clk_enable(clk
);
744 pr_err("%s:couldn't enable clk for pcie1\n", __func__
);
747 } else if (pdev
->id
== 2) {
749 * Ideally CFG Clock should have been also enabled here. But
750 * it is done currently during board init routne
752 clk
= clk_get_sys("pcie2", NULL
);
754 pr_err("%s:couldn't get clk for pcie2\n", __func__
);
757 status
= clk_enable(clk
);
759 pr_err("%s:couldn't enable clk for pcie2\n", __func__
);
763 spear13xx_pcie_device_init(config
);
768 static int spear_pcie_gadget_remove(struct platform_device
*pdev
)
770 static struct pcie_gadget_target
*target
;
772 target
= platform_get_drvdata(pdev
);
774 configfs_unregister_subsystem(&target
->subsys
);
779 static void spear_pcie_gadget_shutdown(struct platform_device
*pdev
)
783 static struct platform_driver spear_pcie_gadget_driver
= {
784 .probe
= spear_pcie_gadget_probe
,
785 .remove
= spear_pcie_gadget_remove
,
786 .shutdown
= spear_pcie_gadget_shutdown
,
788 .name
= "pcie-gadget-spear",
789 .bus
= &platform_bus_type
793 module_platform_driver(spear_pcie_gadget_driver
);
795 MODULE_ALIAS("platform:pcie-gadget-spear");
796 MODULE_AUTHOR("Pratyush Anand");
797 MODULE_LICENSE("GPL");