8 #include <KernelExport.h>
10 typedef vint32 MM_ATOMIC_T
;
12 //#define MM_SWAP_LE16(x) B_SWAP_INT16(x)
13 #define MM_SWAP_LE16(x) x
14 #define MM_SWAP_LE32(x) x
15 #define MM_SWAP_BE32(x) B_SWAP_INT32(x)
17 /*#define MM_ATOMIC_SET(ptr, val) atomic_and(ptr, 0); atomic_add(ptr,val)
18 #define MM_ATOMIC_READ(ptr) atomic_add(ptr,0)
19 #define MM_ATOMIC_INC(ptr) atomic_add(ptr,1)
20 #define MM_ATOMIC_ADD(ptr, val) atomic_add(ptr,val)
21 #define MM_ATOMIC_DEC(ptr) atomic_add(ptr,-1)
22 #define MM_ATOMIC_SUB(ptr, val) atomic_add(ptr,0-val)*/
24 #define MM_ATOMIC_SET(ptr, val) *(ptr)=val
25 #define MM_ATOMIC_READ(ptr) *(ptr)
26 #define MM_ATOMIC_INC(ptr) (*(ptr))++
27 #define MM_ATOMIC_ADD(ptr, val) *(ptr)+=val
28 #define MM_ATOMIC_DEC(ptr) (*(ptr))--
29 #define MM_ATOMIC_SUB(ptr, val) *(ptr)-=val
31 /* All critical sections are protected by locking mechanisms already */
33 #define __io_virt(x) ((void *)(x))
34 #define readl(addr) (*(volatile unsigned int *) __io_virt(addr))
35 #define writel(b,addr) (*(volatile unsigned int *) __io_virt(addr) = (b))
36 #define __raw_readl readl
37 #define __raw_writel writel
41 #define MM_MEMWRITEL(ptr, val) __raw_writel(val, ptr)
42 #define MM_MEMREADL(ptr) __raw_readl(ptr)
45 #define mb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)": : :"memory")
48 #define mb() memory_write_barrier()
50 #warning no memory barrier function defined.
57 #define readl(addr) (*(volatile unsigned int *) __io_virt(addr))
60 #define MM_WMB() wmb()
61 #define MM_RMB() rmb()
65 extern int b57_Packet_Desc_Size
;
67 #define MM_PACKET_DESC_SIZE b57_Packet_Desc_Size
74 struct _LM_DEVICE_BLOCK lm_dev
;
76 struct pci_info pci_data
;
78 sem_id packet_release_sem
;
79 //sem_id interrupt_sem;
80 //thread_id interrupt_handler;
82 LM_RX_PACKET_Q RxPacketReadQ
;
87 area_id lockmem_list
[16];
97 #ifdef HAIKU_TARGET_PLATFORM_HAIKU
103 struct _LM_PACKET pkt
;
109 static inline void MM_MapRxDma(PLM_DEVICE_BLOCK pDevice
,
110 struct _LM_PACKET
*pPacket
,
111 T3_64BIT_HOST_ADDR
*paddr
)
113 physical_entry entry
;
114 struct B_UM_PACKET
*bpkt
= (struct B_UM_PACKET
*)(pPacket
);
116 get_memory_map(bpkt
->data
,pPacket
->u
.Rx
.RxBufferSize
,&entry
,1);
117 paddr
->Low
= (LM_UINT32
)entry
.address
;
118 paddr
->High
= (LM_UINT32
)(entry
.address
>> 32);
121 static inline void MM_MapTxDma(PLM_DEVICE_BLOCK pDevice
,
122 struct _LM_PACKET
*pPacket
,
123 T3_64BIT_HOST_ADDR
*paddr
, LM_UINT32
*len
, int frag
)
125 struct B_UM_PACKET
*pkt
= (struct B_UM_PACKET
*)pPacket
;
126 physical_entry entry
;
128 get_memory_map(pkt
->data
,pkt
->size
,&entry
,1);
129 paddr
->Low
= (LM_UINT32
)entry
.address
;
130 paddr
->High
= (LM_UINT32
)(entry
.address
>> 32);
131 *len
= pPacket
->PacketSize
;
134 #if (BITS_PER_LONG == 64)
135 #define MM_GETSTATS(_Ctr) \
136 (unsigned long) (_Ctr).Low + ((unsigned long) (_Ctr).High << 32)
138 #define MM_GETSTATS(_Ctr) \
139 (unsigned long) (_Ctr).Low
142 #define MM_ACQUIRE_UNDI_LOCK(_pDevice)/* \
143 ((struct be_b57_dev *)(_pDevice))->cpu = disable_interrupts(); \
144 acquire_spinlock(&(((struct be_b57_dev *)(_pDevice))->lock));*/
146 #define MM_RELEASE_UNDI_LOCK(_pDevice)/* \
147 release_spinlock(&(((struct be_b57_dev *)(_pDevice))->lock)); \
148 enable_interrupts(((struct be_b57_dev *)(_pDevice))->cpu);*/
150 #define MM_ACQUIRE_PHY_LOCK_IN_IRQ(_pDevice)/* \
151 ((struct be_b57_dev *)(_pDevice))->cpu = disable_interrupts(); \
152 acquire_spinlock(&(((struct be_b57_dev *)(_pDevice))->lock));*/
154 #define MM_RELEASE_PHY_LOCK_IN_IRQ(_pDevice) /*\
155 release_spinlock(&(((struct be_b57_dev *)(_pDevice))->lock)); \
156 enable_interrupts(((struct be_b57_dev *)(_pDevice))->cpu);*/
158 #define MM_PTR(_ptr) ((unsigned long) (_ptr))
159 #define MM_UINT_PTR(_ptr) ((unsigned long) (_ptr))
160 #define printf(fmt, args...) dprintf(fmt, ##args)
161 #define DbgPrint(fmt, arg...) dprintf(fmt, ##arg)
162 #define DbgBreakPoint()
163 #define MM_Wait(time) udelay(time)
164 #define ASSERT(expr) \
166 dprintf("ASSERT failed: %s\n", #expr); \