2 * Copyright 2004-2007, Haiku, Inc. All RightsReserved.
3 * Copyright 2002/03, Thomas Kurschel. All rights reserved.
5 * Distributed under the terms of the MIT License.
8 //! DMA helper functions
11 #include "ide_internal.h"
13 #define CHECK_DEV_DMA_MODE(infoblock, elem, mode, this_mode, num_modes ) \
14 if( infoblock->elem ) { \
21 get_device_dma_mode(ide_device_info
*device
)
23 ide_device_infoblock
*infoblock
= &device
->infoblock
;
30 if (!infoblock
->DMA_supported
)
33 CHECK_DEV_DMA_MODE(infoblock
, MDMA0_selected
, mode
, 0, num_modes
);
34 CHECK_DEV_DMA_MODE(infoblock
, MDMA1_selected
, mode
, 1, num_modes
);
35 CHECK_DEV_DMA_MODE(infoblock
, MDMA2_selected
, mode
, 2, num_modes
);
37 if (infoblock
->_88_valid
) {
38 CHECK_DEV_DMA_MODE(infoblock
, UDMA0_selected
, mode
, 0x10, num_modes
);
39 CHECK_DEV_DMA_MODE(infoblock
, UDMA1_selected
, mode
, 0x11, num_modes
);
40 CHECK_DEV_DMA_MODE(infoblock
, UDMA2_selected
, mode
, 0x12, num_modes
);
41 CHECK_DEV_DMA_MODE(infoblock
, UDMA3_selected
, mode
, 0x13, num_modes
);
42 CHECK_DEV_DMA_MODE(infoblock
, UDMA4_selected
, mode
, 0x14, num_modes
);
43 CHECK_DEV_DMA_MODE(infoblock
, UDMA5_selected
, mode
, 0x15, num_modes
);
44 CHECK_DEV_DMA_MODE(infoblock
, UDMA6_selected
, mode
, 0x16, num_modes
);
50 SHOW_FLOW(3, "%x", mode
);
57 configure_dma(ide_device_info
*device
)
59 if (get_device_dma_mode(device
) != -1) {
60 device
->DMA_enabled
= device
->DMA_supported
= device
->bus
->can_DMA
;
61 if (device
->DMA_enabled
) {
62 dprintf("IDE: enabling DMA\n");
64 dprintf("IDE: disabling DMA (failsafe option selected)\n");
67 device
->DMA_enabled
= false;
68 dprintf("IDE: DMA not possible, disabling\n");
75 /*! Abort DMA transmission
76 must be called _before_ start_dma_wait
79 abort_dma(ide_device_info
*device
, ide_qrequest
*qrequest
)
81 ide_bus_info
*bus
= device
->bus
;
85 bus
->controller
->finish_dma(bus
->channel_cookie
);
89 /*! Prepare DMA transmission
90 on return, DMA engine waits for device to transmit data
91 warning: doesn't set sense data on error
94 prepare_dma(ide_device_info
*device
, ide_qrequest
*qrequest
)
96 ide_bus_info
*bus
= device
->bus
;
97 scsi_ccb
*request
= qrequest
->request
;
100 res
= bus
->controller
->prepare_dma(bus
->channel_cookie
, request
->sg_list
,
101 request
->sg_count
, qrequest
->is_write
);
109 /*! Start waiting for DMA to be finished */
111 start_dma_wait(ide_device_info
*device
, ide_qrequest
*qrequest
)
113 ide_bus_info
*bus
= device
->bus
;
115 bus
->controller
->start_dma(bus
->channel_cookie
);
117 start_waiting(bus
, qrequest
->request
->timeout
> 0 ?
118 qrequest
->request
->timeout
: IDE_STD_TIMEOUT
, ide_state_async_waiting
);
122 /*! Start waiting for DMA to be finished with bus lock not hold */
124 start_dma_wait_no_lock(ide_device_info
*device
, ide_qrequest
*qrequest
)
126 ide_bus_info
*bus
= device
->bus
;
129 start_dma_wait(device
, qrequest
);
133 /*! Finish dma transmission after device has fired IRQ */
135 finish_dma(ide_device_info
*device
)
137 ide_bus_info
*bus
= device
->bus
;
140 dma_res
= bus
->controller
->finish_dma(bus
->channel_cookie
);
142 return dma_res
== B_OK
|| dma_res
== B_DEV_DATA_OVERRUN
;