x86: cpa self-test, WARN_ON()
[wrt350n-kernel.git] / drivers / ide / mips / au1xxx-ide.c
blob2d3e5115b83404db30e4d1a2fcd684fceaadd198
1 /*
2 * linux/drivers/ide/mips/au1xxx-ide.c version 01.30.00 Aug. 02 2005
4 * BRIEF MODULE DESCRIPTION
5 * AMD Alchemy Au1xxx IDE interface routines over the Static Bus
7 * Copyright (c) 2003-2005 AMD, Personal Connectivity Solutions
9 * This program is free software; you can redistribute it and/or modify it under
10 * the terms of the GNU General Public License as published by the Free Software
11 * Foundation; either version 2 of the License, or (at your option) any later
12 * version.
14 * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
15 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
16 * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
23 * POSSIBILITY OF SUCH DAMAGE.
25 * You should have received a copy of the GNU General Public License along with
26 * this program; if not, write to the Free Software Foundation, Inc.,
27 * 675 Mass Ave, Cambridge, MA 02139, USA.
29 * Note: for more information, please refer "AMD Alchemy Au1200/Au1550 IDE
30 * Interface and Linux Device Driver" Application Note.
32 #include <linux/types.h>
33 #include <linux/module.h>
34 #include <linux/kernel.h>
35 #include <linux/delay.h>
36 #include <linux/platform_device.h>
38 #include <linux/init.h>
39 #include <linux/ide.h>
40 #include <linux/sysdev.h>
42 #include <linux/dma-mapping.h>
44 #include "ide-timing.h"
46 #include <asm/io.h>
47 #include <asm/mach-au1x00/au1xxx.h>
48 #include <asm/mach-au1x00/au1xxx_dbdma.h>
50 #include <asm/mach-au1x00/au1xxx_ide.h>
52 #define DRV_NAME "au1200-ide"
53 #define DRV_VERSION "1.0"
54 #define DRV_AUTHOR "Enrico Walther <enrico.walther@amd.com> / Pete Popov <ppopov@embeddedalley.com>"
56 /* enable the burstmode in the dbdma */
57 #define IDE_AU1XXX_BURSTMODE 1
59 static _auide_hwif auide_hwif;
60 static int dbdma_init_done;
62 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
64 void auide_insw(unsigned long port, void *addr, u32 count)
66 _auide_hwif *ahwif = &auide_hwif;
67 chan_tab_t *ctp;
68 au1x_ddma_desc_t *dp;
70 if(!put_dest_flags(ahwif->rx_chan, (void*)addr, count << 1,
71 DDMA_FLAGS_NOIE)) {
72 printk(KERN_ERR "%s failed %d\n", __FUNCTION__, __LINE__);
73 return;
75 ctp = *((chan_tab_t **)ahwif->rx_chan);
76 dp = ctp->cur_ptr;
77 while (dp->dscr_cmd0 & DSCR_CMD0_V)
79 ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp);
82 void auide_outsw(unsigned long port, void *addr, u32 count)
84 _auide_hwif *ahwif = &auide_hwif;
85 chan_tab_t *ctp;
86 au1x_ddma_desc_t *dp;
88 if(!put_source_flags(ahwif->tx_chan, (void*)addr,
89 count << 1, DDMA_FLAGS_NOIE)) {
90 printk(KERN_ERR "%s failed %d\n", __FUNCTION__, __LINE__);
91 return;
93 ctp = *((chan_tab_t **)ahwif->tx_chan);
94 dp = ctp->cur_ptr;
95 while (dp->dscr_cmd0 & DSCR_CMD0_V)
97 ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp);
100 #endif
102 static void au1xxx_set_pio_mode(ide_drive_t *drive, const u8 pio)
104 int mem_sttime = 0, mem_stcfg = au_readl(MEM_STCFG2);
106 /* set pio mode! */
107 switch(pio) {
108 case 0:
109 mem_sttime = SBC_IDE_TIMING(PIO0);
111 /* set configuration for RCS2# */
112 mem_stcfg |= TS_MASK;
113 mem_stcfg &= ~TCSOE_MASK;
114 mem_stcfg &= ~TOECS_MASK;
115 mem_stcfg |= SBC_IDE_PIO0_TCSOE | SBC_IDE_PIO0_TOECS;
116 break;
118 case 1:
119 mem_sttime = SBC_IDE_TIMING(PIO1);
121 /* set configuration for RCS2# */
122 mem_stcfg |= TS_MASK;
123 mem_stcfg &= ~TCSOE_MASK;
124 mem_stcfg &= ~TOECS_MASK;
125 mem_stcfg |= SBC_IDE_PIO1_TCSOE | SBC_IDE_PIO1_TOECS;
126 break;
128 case 2:
129 mem_sttime = SBC_IDE_TIMING(PIO2);
131 /* set configuration for RCS2# */
132 mem_stcfg &= ~TS_MASK;
133 mem_stcfg &= ~TCSOE_MASK;
134 mem_stcfg &= ~TOECS_MASK;
135 mem_stcfg |= SBC_IDE_PIO2_TCSOE | SBC_IDE_PIO2_TOECS;
136 break;
138 case 3:
139 mem_sttime = SBC_IDE_TIMING(PIO3);
141 /* set configuration for RCS2# */
142 mem_stcfg &= ~TS_MASK;
143 mem_stcfg &= ~TCSOE_MASK;
144 mem_stcfg &= ~TOECS_MASK;
145 mem_stcfg |= SBC_IDE_PIO3_TCSOE | SBC_IDE_PIO3_TOECS;
147 break;
149 case 4:
150 mem_sttime = SBC_IDE_TIMING(PIO4);
152 /* set configuration for RCS2# */
153 mem_stcfg &= ~TS_MASK;
154 mem_stcfg &= ~TCSOE_MASK;
155 mem_stcfg &= ~TOECS_MASK;
156 mem_stcfg |= SBC_IDE_PIO4_TCSOE | SBC_IDE_PIO4_TOECS;
157 break;
160 au_writel(mem_sttime,MEM_STTIME2);
161 au_writel(mem_stcfg,MEM_STCFG2);
164 static void auide_set_dma_mode(ide_drive_t *drive, const u8 speed)
166 int mem_sttime = 0, mem_stcfg = au_readl(MEM_STCFG2);
168 switch(speed) {
169 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
170 case XFER_MW_DMA_2:
171 mem_sttime = SBC_IDE_TIMING(MDMA2);
173 /* set configuration for RCS2# */
174 mem_stcfg &= ~TS_MASK;
175 mem_stcfg &= ~TCSOE_MASK;
176 mem_stcfg &= ~TOECS_MASK;
177 mem_stcfg |= SBC_IDE_MDMA2_TCSOE | SBC_IDE_MDMA2_TOECS;
179 break;
180 case XFER_MW_DMA_1:
181 mem_sttime = SBC_IDE_TIMING(MDMA1);
183 /* set configuration for RCS2# */
184 mem_stcfg &= ~TS_MASK;
185 mem_stcfg &= ~TCSOE_MASK;
186 mem_stcfg &= ~TOECS_MASK;
187 mem_stcfg |= SBC_IDE_MDMA1_TCSOE | SBC_IDE_MDMA1_TOECS;
189 break;
190 case XFER_MW_DMA_0:
191 mem_sttime = SBC_IDE_TIMING(MDMA0);
193 /* set configuration for RCS2# */
194 mem_stcfg |= TS_MASK;
195 mem_stcfg &= ~TCSOE_MASK;
196 mem_stcfg &= ~TOECS_MASK;
197 mem_stcfg |= SBC_IDE_MDMA0_TCSOE | SBC_IDE_MDMA0_TOECS;
199 break;
200 #endif
203 au_writel(mem_sttime,MEM_STTIME2);
204 au_writel(mem_stcfg,MEM_STCFG2);
208 * Multi-Word DMA + DbDMA functions
211 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
213 static int auide_build_sglist(ide_drive_t *drive, struct request *rq)
215 ide_hwif_t *hwif = drive->hwif;
216 _auide_hwif *ahwif = (_auide_hwif*)hwif->hwif_data;
217 struct scatterlist *sg = hwif->sg_table;
219 ide_map_sg(drive, rq);
221 if (rq_data_dir(rq) == READ)
222 hwif->sg_dma_direction = DMA_FROM_DEVICE;
223 else
224 hwif->sg_dma_direction = DMA_TO_DEVICE;
226 return dma_map_sg(ahwif->dev, sg, hwif->sg_nents,
227 hwif->sg_dma_direction);
230 static int auide_build_dmatable(ide_drive_t *drive)
232 int i, iswrite, count = 0;
233 ide_hwif_t *hwif = HWIF(drive);
235 struct request *rq = HWGROUP(drive)->rq;
237 _auide_hwif *ahwif = (_auide_hwif*)hwif->hwif_data;
238 struct scatterlist *sg;
240 iswrite = (rq_data_dir(rq) == WRITE);
241 /* Save for interrupt context */
242 ahwif->drive = drive;
244 /* Build sglist */
245 hwif->sg_nents = i = auide_build_sglist(drive, rq);
247 if (!i)
248 return 0;
250 /* fill the descriptors */
251 sg = hwif->sg_table;
252 while (i && sg_dma_len(sg)) {
253 u32 cur_addr;
254 u32 cur_len;
256 cur_addr = sg_dma_address(sg);
257 cur_len = sg_dma_len(sg);
259 while (cur_len) {
260 u32 flags = DDMA_FLAGS_NOIE;
261 unsigned int tc = (cur_len < 0xfe00)? cur_len: 0xfe00;
263 if (++count >= PRD_ENTRIES) {
264 printk(KERN_WARNING "%s: DMA table too small\n",
265 drive->name);
266 goto use_pio_instead;
269 /* Lets enable intr for the last descriptor only */
270 if (1==i)
271 flags = DDMA_FLAGS_IE;
272 else
273 flags = DDMA_FLAGS_NOIE;
275 if (iswrite) {
276 if(!put_source_flags(ahwif->tx_chan,
277 (void*) sg_virt(sg),
278 tc, flags)) {
279 printk(KERN_ERR "%s failed %d\n",
280 __FUNCTION__, __LINE__);
282 } else
284 if(!put_dest_flags(ahwif->rx_chan,
285 (void*) sg_virt(sg),
286 tc, flags)) {
287 printk(KERN_ERR "%s failed %d\n",
288 __FUNCTION__, __LINE__);
292 cur_addr += tc;
293 cur_len -= tc;
295 sg = sg_next(sg);
296 i--;
299 if (count)
300 return 1;
302 use_pio_instead:
303 dma_unmap_sg(ahwif->dev,
304 hwif->sg_table,
305 hwif->sg_nents,
306 hwif->sg_dma_direction);
308 return 0; /* revert to PIO for this request */
311 static int auide_dma_end(ide_drive_t *drive)
313 ide_hwif_t *hwif = HWIF(drive);
314 _auide_hwif *ahwif = (_auide_hwif*)hwif->hwif_data;
316 if (hwif->sg_nents) {
317 dma_unmap_sg(ahwif->dev, hwif->sg_table, hwif->sg_nents,
318 hwif->sg_dma_direction);
319 hwif->sg_nents = 0;
322 return 0;
325 static void auide_dma_start(ide_drive_t *drive )
330 static void auide_dma_exec_cmd(ide_drive_t *drive, u8 command)
332 /* issue cmd to drive */
333 ide_execute_command(drive, command, &ide_dma_intr,
334 (2*WAIT_CMD), NULL);
337 static int auide_dma_setup(ide_drive_t *drive)
339 struct request *rq = HWGROUP(drive)->rq;
341 if (!auide_build_dmatable(drive)) {
342 ide_map_sg(drive, rq);
343 return 1;
346 drive->waiting_for_dma = 1;
347 return 0;
350 static u8 auide_mdma_filter(ide_drive_t *drive)
353 * FIXME: ->white_list and ->black_list are based on completely bogus
354 * ->ide_dma_check implementation which didn't set neither the host
355 * controller timings nor the device for the desired transfer mode.
357 * They should be either removed or 0x00 MWDMA mask should be
358 * returned for devices on the ->black_list.
361 if (dbdma_init_done == 0) {
362 auide_hwif.white_list = ide_in_drive_list(drive->id,
363 dma_white_list);
364 auide_hwif.black_list = ide_in_drive_list(drive->id,
365 dma_black_list);
366 auide_hwif.drive = drive;
367 auide_ddma_init(&auide_hwif);
368 dbdma_init_done = 1;
371 /* Is the drive in our DMA black list? */
372 if (auide_hwif.black_list)
373 printk(KERN_WARNING "%s: Disabling DMA for %s (blacklisted)\n",
374 drive->name, drive->id->model);
376 return drive->hwif->mwdma_mask;
379 static int auide_dma_test_irq(ide_drive_t *drive)
381 if (drive->waiting_for_dma == 0)
382 printk(KERN_WARNING "%s: ide_dma_test_irq \
383 called while not waiting\n", drive->name);
385 /* If dbdma didn't execute the STOP command yet, the
386 * active bit is still set
388 drive->waiting_for_dma++;
389 if (drive->waiting_for_dma >= DMA_WAIT_TIMEOUT) {
390 printk(KERN_WARNING "%s: timeout waiting for ddma to \
391 complete\n", drive->name);
392 return 1;
394 udelay(10);
395 return 0;
398 static void auide_dma_host_set(ide_drive_t *drive, int on)
402 static void auide_dma_lost_irq(ide_drive_t *drive)
404 printk(KERN_ERR "%s: IRQ lost\n", drive->name);
407 static void auide_ddma_tx_callback(int irq, void *param)
409 _auide_hwif *ahwif = (_auide_hwif*)param;
410 ahwif->drive->waiting_for_dma = 0;
413 static void auide_ddma_rx_callback(int irq, void *param)
415 _auide_hwif *ahwif = (_auide_hwif*)param;
416 ahwif->drive->waiting_for_dma = 0;
419 #endif /* end CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA */
421 static void auide_init_dbdma_dev(dbdev_tab_t *dev, u32 dev_id, u32 tsize, u32 devwidth, u32 flags)
423 dev->dev_id = dev_id;
424 dev->dev_physaddr = (u32)AU1XXX_ATA_PHYS_ADDR;
425 dev->dev_intlevel = 0;
426 dev->dev_intpolarity = 0;
427 dev->dev_tsize = tsize;
428 dev->dev_devwidth = devwidth;
429 dev->dev_flags = flags;
432 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
434 static void auide_dma_timeout(ide_drive_t *drive)
436 ide_hwif_t *hwif = HWIF(drive);
438 printk(KERN_ERR "%s: DMA timeout occurred: ", drive->name);
440 if (hwif->ide_dma_test_irq(drive))
441 return;
443 hwif->ide_dma_end(drive);
447 static int auide_ddma_init(_auide_hwif *auide) {
449 dbdev_tab_t source_dev_tab, target_dev_tab;
450 u32 dev_id, tsize, devwidth, flags;
451 ide_hwif_t *hwif = auide->hwif;
453 dev_id = AU1XXX_ATA_DDMA_REQ;
455 if (auide->white_list || auide->black_list) {
456 tsize = 8;
457 devwidth = 32;
459 else {
460 tsize = 1;
461 devwidth = 16;
463 printk(KERN_ERR "au1xxx-ide: %s is not on ide driver whitelist.\n",auide_hwif.drive->id->model);
464 printk(KERN_ERR " please read 'Documentation/mips/AU1xxx_IDE.README'");
467 #ifdef IDE_AU1XXX_BURSTMODE
468 flags = DEV_FLAGS_SYNC | DEV_FLAGS_BURSTABLE;
469 #else
470 flags = DEV_FLAGS_SYNC;
471 #endif
473 /* setup dev_tab for tx channel */
474 auide_init_dbdma_dev( &source_dev_tab,
475 dev_id,
476 tsize, devwidth, DEV_FLAGS_OUT | flags);
477 auide->tx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
479 auide_init_dbdma_dev( &source_dev_tab,
480 dev_id,
481 tsize, devwidth, DEV_FLAGS_IN | flags);
482 auide->rx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
484 /* We also need to add a target device for the DMA */
485 auide_init_dbdma_dev( &target_dev_tab,
486 (u32)DSCR_CMD0_ALWAYS,
487 tsize, devwidth, DEV_FLAGS_ANYUSE);
488 auide->target_dev_id = au1xxx_ddma_add_device(&target_dev_tab);
490 /* Get a channel for TX */
491 auide->tx_chan = au1xxx_dbdma_chan_alloc(auide->target_dev_id,
492 auide->tx_dev_id,
493 auide_ddma_tx_callback,
494 (void*)auide);
496 /* Get a channel for RX */
497 auide->rx_chan = au1xxx_dbdma_chan_alloc(auide->rx_dev_id,
498 auide->target_dev_id,
499 auide_ddma_rx_callback,
500 (void*)auide);
502 auide->tx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->tx_chan,
503 NUM_DESCRIPTORS);
504 auide->rx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->rx_chan,
505 NUM_DESCRIPTORS);
507 hwif->dmatable_cpu = dma_alloc_coherent(auide->dev,
508 PRD_ENTRIES * PRD_BYTES, /* 1 Page */
509 &hwif->dmatable_dma, GFP_KERNEL);
511 au1xxx_dbdma_start( auide->tx_chan );
512 au1xxx_dbdma_start( auide->rx_chan );
514 return 0;
516 #else
518 static int auide_ddma_init( _auide_hwif *auide )
520 dbdev_tab_t source_dev_tab;
521 int flags;
523 #ifdef IDE_AU1XXX_BURSTMODE
524 flags = DEV_FLAGS_SYNC | DEV_FLAGS_BURSTABLE;
525 #else
526 flags = DEV_FLAGS_SYNC;
527 #endif
529 /* setup dev_tab for tx channel */
530 auide_init_dbdma_dev( &source_dev_tab,
531 (u32)DSCR_CMD0_ALWAYS,
532 8, 32, DEV_FLAGS_OUT | flags);
533 auide->tx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
535 auide_init_dbdma_dev( &source_dev_tab,
536 (u32)DSCR_CMD0_ALWAYS,
537 8, 32, DEV_FLAGS_IN | flags);
538 auide->rx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
540 /* Get a channel for TX */
541 auide->tx_chan = au1xxx_dbdma_chan_alloc(DSCR_CMD0_ALWAYS,
542 auide->tx_dev_id,
543 NULL,
544 (void*)auide);
546 /* Get a channel for RX */
547 auide->rx_chan = au1xxx_dbdma_chan_alloc(auide->rx_dev_id,
548 DSCR_CMD0_ALWAYS,
549 NULL,
550 (void*)auide);
552 auide->tx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->tx_chan,
553 NUM_DESCRIPTORS);
554 auide->rx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->rx_chan,
555 NUM_DESCRIPTORS);
557 au1xxx_dbdma_start( auide->tx_chan );
558 au1xxx_dbdma_start( auide->rx_chan );
560 return 0;
562 #endif
564 static void auide_setup_ports(hw_regs_t *hw, _auide_hwif *ahwif)
566 int i;
567 unsigned long *ata_regs = hw->io_ports;
569 /* FIXME? */
570 for (i = 0; i < IDE_CONTROL_OFFSET; i++) {
571 *ata_regs++ = ahwif->regbase + (i << AU1XXX_ATA_REG_OFFSET);
574 /* set the Alternative Status register */
575 *ata_regs = ahwif->regbase + (14 << AU1XXX_ATA_REG_OFFSET);
578 static int au_ide_probe(struct device *dev)
580 struct platform_device *pdev = to_platform_device(dev);
581 _auide_hwif *ahwif = &auide_hwif;
582 ide_hwif_t *hwif;
583 struct resource *res;
584 int ret = 0;
585 u8 idx[4] = { 0xff, 0xff, 0xff, 0xff };
586 hw_regs_t hw;
588 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
589 char *mode = "MWDMA2";
590 #elif defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
591 char *mode = "PIO+DDMA(offload)";
592 #endif
594 memset(&auide_hwif, 0, sizeof(_auide_hwif));
595 auide_hwif.dev = 0;
597 ahwif->dev = dev;
598 ahwif->irq = platform_get_irq(pdev, 0);
600 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
602 if (res == NULL) {
603 pr_debug("%s %d: no base address\n", DRV_NAME, pdev->id);
604 ret = -ENODEV;
605 goto out;
607 if (ahwif->irq < 0) {
608 pr_debug("%s %d: no IRQ\n", DRV_NAME, pdev->id);
609 ret = -ENODEV;
610 goto out;
613 if (!request_mem_region (res->start, res->end-res->start, pdev->name)) {
614 pr_debug("%s: request_mem_region failed\n", DRV_NAME);
615 ret = -EBUSY;
616 goto out;
619 ahwif->regbase = (u32)ioremap(res->start, res->end-res->start);
620 if (ahwif->regbase == 0) {
621 ret = -ENOMEM;
622 goto out;
625 /* FIXME: This might possibly break PCMCIA IDE devices */
627 hwif = &ide_hwifs[pdev->id];
629 memset(&hw, 0, sizeof(hw));
630 auide_setup_ports(&hw, ahwif);
631 hw.irq = ahwif->irq;
632 hw.chipset = ide_au1xxx;
634 ide_init_port_hw(hwif, &hw);
636 hwif->ultra_mask = 0x0; /* Disable Ultra DMA */
637 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
638 hwif->mwdma_mask = 0x07; /* Multimode-2 DMA */
639 hwif->swdma_mask = 0x00;
640 #else
641 hwif->mwdma_mask = 0x0;
642 hwif->swdma_mask = 0x0;
643 #endif
645 hwif->pio_mask = ATA_PIO4;
646 hwif->host_flags = IDE_HFLAG_POST_SET_MODE;
648 hwif->drives[0].unmask = 1;
649 hwif->drives[1].unmask = 1;
651 /* hold should be on in all cases */
652 hwif->hold = 1;
654 hwif->mmio = 1;
656 /* If the user has selected DDMA assisted copies,
657 then set up a few local I/O function entry points
660 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
661 hwif->INSW = auide_insw;
662 hwif->OUTSW = auide_outsw;
663 #endif
665 hwif->set_pio_mode = &au1xxx_set_pio_mode;
666 hwif->set_dma_mode = &auide_set_dma_mode;
668 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
669 hwif->dma_timeout = &auide_dma_timeout;
671 hwif->mdma_filter = &auide_mdma_filter;
673 hwif->dma_host_set = &auide_dma_host_set;
674 hwif->dma_exec_cmd = &auide_dma_exec_cmd;
675 hwif->dma_start = &auide_dma_start;
676 hwif->ide_dma_end = &auide_dma_end;
677 hwif->dma_setup = &auide_dma_setup;
678 hwif->ide_dma_test_irq = &auide_dma_test_irq;
679 hwif->dma_lost_irq = &auide_dma_lost_irq;
680 #endif
681 hwif->channel = 0;
682 hwif->select_data = 0; /* no chipset-specific code */
683 hwif->config_data = 0; /* no chipset-specific code */
685 hwif->drives[0].autotune = 1; /* 1=autotune, 2=noautotune, 0=default */
686 hwif->drives[1].autotune = 1;
688 hwif->drives[0].no_io_32bit = 1;
689 hwif->drives[1].no_io_32bit = 1;
691 auide_hwif.hwif = hwif;
692 hwif->hwif_data = &auide_hwif;
694 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
695 auide_ddma_init(&auide_hwif);
696 dbdma_init_done = 1;
697 #endif
699 idx[0] = hwif->index;
701 ide_device_add(idx);
703 dev_set_drvdata(dev, hwif);
705 printk(KERN_INFO "Au1xxx IDE(builtin) configured for %s\n", mode );
707 out:
708 return ret;
711 static int au_ide_remove(struct device *dev)
713 struct platform_device *pdev = to_platform_device(dev);
714 struct resource *res;
715 ide_hwif_t *hwif = dev_get_drvdata(dev);
716 _auide_hwif *ahwif = &auide_hwif;
718 ide_unregister(hwif - ide_hwifs);
720 iounmap((void *)ahwif->regbase);
722 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
723 release_mem_region(res->start, res->end - res->start);
725 return 0;
728 static struct device_driver au1200_ide_driver = {
729 .name = "au1200-ide",
730 .bus = &platform_bus_type,
731 .probe = au_ide_probe,
732 .remove = au_ide_remove,
735 static int __init au_ide_init(void)
737 return driver_register(&au1200_ide_driver);
740 static void __exit au_ide_exit(void)
742 driver_unregister(&au1200_ide_driver);
745 MODULE_LICENSE("GPL");
746 MODULE_DESCRIPTION("AU1200 IDE driver");
748 module_init(au_ide_init);
749 module_exit(au_ide_exit);