[PATCH] W1: w1_netlink: New init/fini netlink callbacks.
[linux-2.6/verdex.git] / drivers / isdn / hisax / netjet.c
blob94da03c30c51507c1af9c74d449e04930a4a3c52
1 /* $Id: netjet.c,v 1.29.2.4 2004/02/11 13:21:34 keil Exp $
3 * low level stuff for Traverse Technologie NETJet ISDN cards
5 * Author Karsten Keil
6 * Copyright by Karsten Keil <keil@isdn4linux.de>
7 *
8 * This software may be used and distributed according to the terms
9 * of the GNU General Public License, incorporated herein by reference.
11 * Thanks to Traverse Technologies Australia for documents and information
13 * 16-Apr-2002 - led code added - Guy Ellis (guy@traverse.com.au)
17 #include <linux/init.h>
18 #include "hisax.h"
19 #include "isac.h"
20 #include "hscx.h"
21 #include "isdnl1.h"
22 #include <linux/pci.h>
23 #include <linux/interrupt.h>
24 #include <linux/ppp_defs.h>
25 #include <asm/io.h>
26 #include "netjet.h"
28 /* Interface functions */
30 u_char
31 NETjet_ReadIC(struct IsdnCardState *cs, u_char offset)
33 u_char ret;
35 cs->hw.njet.auxd &= 0xfc;
36 cs->hw.njet.auxd |= (offset>>4) & 3;
37 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
38 ret = bytein(cs->hw.njet.isac + ((offset & 0xf)<<2));
39 return(ret);
42 void
43 NETjet_WriteIC(struct IsdnCardState *cs, u_char offset, u_char value)
45 cs->hw.njet.auxd &= 0xfc;
46 cs->hw.njet.auxd |= (offset>>4) & 3;
47 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
48 byteout(cs->hw.njet.isac + ((offset & 0xf)<<2), value);
51 void
52 NETjet_ReadICfifo(struct IsdnCardState *cs, u_char *data, int size)
54 cs->hw.njet.auxd &= 0xfc;
55 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
56 insb(cs->hw.njet.isac, data, size);
59 void
60 NETjet_WriteICfifo(struct IsdnCardState *cs, u_char *data, int size)
62 cs->hw.njet.auxd &= 0xfc;
63 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
64 outsb(cs->hw.njet.isac, data, size);
67 static void fill_mem(struct BCState *bcs, u_int *pos, u_int cnt, int chan, u_char fill)
69 u_int mask=0x000000ff, val = 0, *p=pos;
70 u_int i;
72 val |= fill;
73 if (chan) {
74 val <<= 8;
75 mask <<= 8;
77 mask ^= 0xffffffff;
78 for (i=0; i<cnt; i++) {
79 *p &= mask;
80 *p++ |= val;
81 if (p > bcs->hw.tiger.s_end)
82 p = bcs->hw.tiger.send;
86 static void
87 mode_tiger(struct BCState *bcs, int mode, int bc)
89 struct IsdnCardState *cs = bcs->cs;
90 u_char led;
92 if (cs->debug & L1_DEB_HSCX)
93 debugl1(cs, "Tiger mode %d bchan %d/%d",
94 mode, bc, bcs->channel);
95 bcs->mode = mode;
96 bcs->channel = bc;
97 switch (mode) {
98 case (L1_MODE_NULL):
99 fill_mem(bcs, bcs->hw.tiger.send,
100 NETJET_DMA_TXSIZE, bc, 0xff);
101 if (cs->debug & L1_DEB_HSCX)
102 debugl1(cs, "Tiger stat rec %d/%d send %d",
103 bcs->hw.tiger.r_tot, bcs->hw.tiger.r_err,
104 bcs->hw.tiger.s_tot);
105 if ((cs->bcs[0].mode == L1_MODE_NULL) &&
106 (cs->bcs[1].mode == L1_MODE_NULL)) {
107 cs->hw.njet.dmactrl = 0;
108 byteout(cs->hw.njet.base + NETJET_DMACTRL,
109 cs->hw.njet.dmactrl);
110 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0);
112 if (cs->typ == ISDN_CTYPE_NETJET_S)
114 // led off
115 led = bc & 0x01;
116 led = 0x01 << (6 + led); // convert to mask
117 led = ~led;
118 cs->hw.njet.auxd &= led;
119 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
121 break;
122 case (L1_MODE_TRANS):
123 break;
124 case (L1_MODE_HDLC_56K):
125 case (L1_MODE_HDLC):
126 fill_mem(bcs, bcs->hw.tiger.send,
127 NETJET_DMA_TXSIZE, bc, 0xff);
128 bcs->hw.tiger.r_state = HDLC_ZERO_SEARCH;
129 bcs->hw.tiger.r_tot = 0;
130 bcs->hw.tiger.r_bitcnt = 0;
131 bcs->hw.tiger.r_one = 0;
132 bcs->hw.tiger.r_err = 0;
133 bcs->hw.tiger.s_tot = 0;
134 if (! cs->hw.njet.dmactrl) {
135 fill_mem(bcs, bcs->hw.tiger.send,
136 NETJET_DMA_TXSIZE, !bc, 0xff);
137 cs->hw.njet.dmactrl = 1;
138 byteout(cs->hw.njet.base + NETJET_DMACTRL,
139 cs->hw.njet.dmactrl);
140 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0x0f);
141 /* was 0x3f now 0x0f for TJ300 and TJ320 GE 13/07/00 */
143 bcs->hw.tiger.sendp = bcs->hw.tiger.send;
144 bcs->hw.tiger.free = NETJET_DMA_TXSIZE;
145 test_and_set_bit(BC_FLG_EMPTY, &bcs->Flag);
146 if (cs->typ == ISDN_CTYPE_NETJET_S)
148 // led on
149 led = bc & 0x01;
150 led = 0x01 << (6 + led); // convert to mask
151 cs->hw.njet.auxd |= led;
152 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
154 break;
156 if (cs->debug & L1_DEB_HSCX)
157 debugl1(cs, "tiger: set %x %x %x %x/%x pulse=%d",
158 bytein(cs->hw.njet.base + NETJET_DMACTRL),
159 bytein(cs->hw.njet.base + NETJET_IRQMASK0),
160 bytein(cs->hw.njet.base + NETJET_IRQSTAT0),
161 inl(cs->hw.njet.base + NETJET_DMA_READ_ADR),
162 inl(cs->hw.njet.base + NETJET_DMA_WRITE_ADR),
163 bytein(cs->hw.njet.base + NETJET_PULSE_CNT));
166 static void printframe(struct IsdnCardState *cs, u_char *buf, int count, char *s) {
167 char tmp[128];
168 char *t = tmp;
169 int i=count,j;
170 u_char *p = buf;
172 t += sprintf(t, "tiger %s(%4d)", s, count);
173 while (i>0) {
174 if (i>16)
175 j=16;
176 else
177 j=i;
178 QuickHex(t, p, j);
179 debugl1(cs, tmp);
180 p += j;
181 i -= j;
182 t = tmp;
183 t += sprintf(t, "tiger %s ", s);
187 // macro for 64k
189 #define MAKE_RAW_BYTE for (j=0; j<8; j++) { \
190 bitcnt++;\
191 s_val >>= 1;\
192 if (val & 1) {\
193 s_one++;\
194 s_val |= 0x80;\
195 } else {\
196 s_one = 0;\
197 s_val &= 0x7f;\
199 if (bitcnt==8) {\
200 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
201 bitcnt = 0;\
203 if (s_one == 5) {\
204 s_val >>= 1;\
205 s_val &= 0x7f;\
206 bitcnt++;\
207 s_one = 0;\
209 if (bitcnt==8) {\
210 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
211 bitcnt = 0;\
213 val >>= 1;\
216 static int make_raw_data(struct BCState *bcs) {
217 // this make_raw is for 64k
218 register u_int i,s_cnt=0;
219 register u_char j;
220 register u_char val;
221 register u_char s_one = 0;
222 register u_char s_val = 0;
223 register u_char bitcnt = 0;
224 u_int fcs;
226 if (!bcs->tx_skb) {
227 debugl1(bcs->cs, "tiger make_raw: NULL skb");
228 return(1);
230 bcs->hw.tiger.sendbuf[s_cnt++] = HDLC_FLAG_VALUE;
231 fcs = PPP_INITFCS;
232 for (i=0; i<bcs->tx_skb->len; i++) {
233 val = bcs->tx_skb->data[i];
234 fcs = PPP_FCS (fcs, val);
235 MAKE_RAW_BYTE;
237 fcs ^= 0xffff;
238 val = fcs & 0xff;
239 MAKE_RAW_BYTE;
240 val = (fcs>>8) & 0xff;
241 MAKE_RAW_BYTE;
242 val = HDLC_FLAG_VALUE;
243 for (j=0; j<8; j++) {
244 bitcnt++;
245 s_val >>= 1;
246 if (val & 1)
247 s_val |= 0x80;
248 else
249 s_val &= 0x7f;
250 if (bitcnt==8) {
251 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
252 bitcnt = 0;
254 val >>= 1;
256 if (bcs->cs->debug & L1_DEB_HSCX)
257 debugl1(bcs->cs,"tiger make_raw: in %ld out %d.%d",
258 bcs->tx_skb->len, s_cnt, bitcnt);
259 if (bitcnt) {
260 while (8>bitcnt++) {
261 s_val >>= 1;
262 s_val |= 0x80;
264 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
265 bcs->hw.tiger.sendbuf[s_cnt++] = 0xff; // NJ<->NJ thoughput bug fix
267 bcs->hw.tiger.sendcnt = s_cnt;
268 bcs->tx_cnt -= bcs->tx_skb->len;
269 bcs->hw.tiger.sp = bcs->hw.tiger.sendbuf;
270 return(0);
273 // macro for 56k
275 #define MAKE_RAW_BYTE_56K for (j=0; j<8; j++) { \
276 bitcnt++;\
277 s_val >>= 1;\
278 if (val & 1) {\
279 s_one++;\
280 s_val |= 0x80;\
281 } else {\
282 s_one = 0;\
283 s_val &= 0x7f;\
285 if (bitcnt==7) {\
286 s_val >>= 1;\
287 s_val |= 0x80;\
288 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
289 bitcnt = 0;\
291 if (s_one == 5) {\
292 s_val >>= 1;\
293 s_val &= 0x7f;\
294 bitcnt++;\
295 s_one = 0;\
297 if (bitcnt==7) {\
298 s_val >>= 1;\
299 s_val |= 0x80;\
300 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
301 bitcnt = 0;\
303 val >>= 1;\
306 static int make_raw_data_56k(struct BCState *bcs) {
307 // this make_raw is for 56k
308 register u_int i,s_cnt=0;
309 register u_char j;
310 register u_char val;
311 register u_char s_one = 0;
312 register u_char s_val = 0;
313 register u_char bitcnt = 0;
314 u_int fcs;
316 if (!bcs->tx_skb) {
317 debugl1(bcs->cs, "tiger make_raw_56k: NULL skb");
318 return(1);
320 val = HDLC_FLAG_VALUE;
321 for (j=0; j<8; j++) {
322 bitcnt++;
323 s_val >>= 1;
324 if (val & 1)
325 s_val |= 0x80;
326 else
327 s_val &= 0x7f;
328 if (bitcnt==7) {
329 s_val >>= 1;
330 s_val |= 0x80;
331 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
332 bitcnt = 0;
334 val >>= 1;
336 fcs = PPP_INITFCS;
337 for (i=0; i<bcs->tx_skb->len; i++) {
338 val = bcs->tx_skb->data[i];
339 fcs = PPP_FCS (fcs, val);
340 MAKE_RAW_BYTE_56K;
342 fcs ^= 0xffff;
343 val = fcs & 0xff;
344 MAKE_RAW_BYTE_56K;
345 val = (fcs>>8) & 0xff;
346 MAKE_RAW_BYTE_56K;
347 val = HDLC_FLAG_VALUE;
348 for (j=0; j<8; j++) {
349 bitcnt++;
350 s_val >>= 1;
351 if (val & 1)
352 s_val |= 0x80;
353 else
354 s_val &= 0x7f;
355 if (bitcnt==7) {
356 s_val >>= 1;
357 s_val |= 0x80;
358 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
359 bitcnt = 0;
361 val >>= 1;
363 if (bcs->cs->debug & L1_DEB_HSCX)
364 debugl1(bcs->cs,"tiger make_raw_56k: in %ld out %d.%d",
365 bcs->tx_skb->len, s_cnt, bitcnt);
366 if (bitcnt) {
367 while (8>bitcnt++) {
368 s_val >>= 1;
369 s_val |= 0x80;
371 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
372 bcs->hw.tiger.sendbuf[s_cnt++] = 0xff; // NJ<->NJ thoughput bug fix
374 bcs->hw.tiger.sendcnt = s_cnt;
375 bcs->tx_cnt -= bcs->tx_skb->len;
376 bcs->hw.tiger.sp = bcs->hw.tiger.sendbuf;
377 return(0);
380 static void got_frame(struct BCState *bcs, int count) {
381 struct sk_buff *skb;
383 if (!(skb = dev_alloc_skb(count)))
384 printk(KERN_WARNING "TIGER: receive out of memory\n");
385 else {
386 memcpy(skb_put(skb, count), bcs->hw.tiger.rcvbuf, count);
387 skb_queue_tail(&bcs->rqueue, skb);
389 test_and_set_bit(B_RCVBUFREADY, &bcs->event);
390 schedule_work(&bcs->tqueue);
392 if (bcs->cs->debug & L1_DEB_RECEIVE_FRAME)
393 printframe(bcs->cs, bcs->hw.tiger.rcvbuf, count, "rec");
398 static void read_raw(struct BCState *bcs, u_int *buf, int cnt){
399 int i;
400 register u_char j;
401 register u_char val;
402 u_int *pend = bcs->hw.tiger.rec +NETJET_DMA_RXSIZE -1;
403 register u_char state = bcs->hw.tiger.r_state;
404 register u_char r_one = bcs->hw.tiger.r_one;
405 register u_char r_val = bcs->hw.tiger.r_val;
406 register u_int bitcnt = bcs->hw.tiger.r_bitcnt;
407 u_int *p = buf;
408 int bits;
409 u_char mask;
411 if (bcs->mode == L1_MODE_HDLC) { // it's 64k
412 mask = 0xff;
413 bits = 8;
415 else { // it's 56K
416 mask = 0x7f;
417 bits = 7;
419 for (i=0;i<cnt;i++) {
420 val = bcs->channel ? ((*p>>8) & 0xff) : (*p & 0xff);
421 p++;
422 if (p > pend)
423 p = bcs->hw.tiger.rec;
424 if ((val & mask) == mask) {
425 state = HDLC_ZERO_SEARCH;
426 bcs->hw.tiger.r_tot++;
427 bitcnt = 0;
428 r_one = 0;
429 continue;
431 for (j=0;j<bits;j++) {
432 if (state == HDLC_ZERO_SEARCH) {
433 if (val & 1) {
434 r_one++;
435 } else {
436 r_one=0;
437 state= HDLC_FLAG_SEARCH;
438 if (bcs->cs->debug & L1_DEB_HSCX)
439 debugl1(bcs->cs,"tiger read_raw: zBit(%d,%d,%d) %x",
440 bcs->hw.tiger.r_tot,i,j,val);
442 } else if (state == HDLC_FLAG_SEARCH) {
443 if (val & 1) {
444 r_one++;
445 if (r_one>6) {
446 state=HDLC_ZERO_SEARCH;
448 } else {
449 if (r_one==6) {
450 bitcnt=0;
451 r_val=0;
452 state=HDLC_FLAG_FOUND;
453 if (bcs->cs->debug & L1_DEB_HSCX)
454 debugl1(bcs->cs,"tiger read_raw: flag(%d,%d,%d) %x",
455 bcs->hw.tiger.r_tot,i,j,val);
457 r_one=0;
459 } else if (state == HDLC_FLAG_FOUND) {
460 if (val & 1) {
461 r_one++;
462 if (r_one>6) {
463 state=HDLC_ZERO_SEARCH;
464 } else {
465 r_val >>= 1;
466 r_val |= 0x80;
467 bitcnt++;
469 } else {
470 if (r_one==6) {
471 bitcnt=0;
472 r_val=0;
473 r_one=0;
474 val >>= 1;
475 continue;
476 } else if (r_one!=5) {
477 r_val >>= 1;
478 r_val &= 0x7f;
479 bitcnt++;
481 r_one=0;
483 if ((state != HDLC_ZERO_SEARCH) &&
484 !(bitcnt & 7)) {
485 state=HDLC_FRAME_FOUND;
486 bcs->hw.tiger.r_fcs = PPP_INITFCS;
487 bcs->hw.tiger.rcvbuf[0] = r_val;
488 bcs->hw.tiger.r_fcs = PPP_FCS (bcs->hw.tiger.r_fcs, r_val);
489 if (bcs->cs->debug & L1_DEB_HSCX)
490 debugl1(bcs->cs,"tiger read_raw: byte1(%d,%d,%d) rval %x val %x i %x",
491 bcs->hw.tiger.r_tot,i,j,r_val,val,
492 bcs->cs->hw.njet.irqstat0);
494 } else if (state == HDLC_FRAME_FOUND) {
495 if (val & 1) {
496 r_one++;
497 if (r_one>6) {
498 state=HDLC_ZERO_SEARCH;
499 bitcnt=0;
500 } else {
501 r_val >>= 1;
502 r_val |= 0x80;
503 bitcnt++;
505 } else {
506 if (r_one==6) {
507 r_val=0;
508 r_one=0;
509 bitcnt++;
510 if (bitcnt & 7) {
511 debugl1(bcs->cs, "tiger: frame not byte aligned");
512 state=HDLC_FLAG_SEARCH;
513 bcs->hw.tiger.r_err++;
514 #ifdef ERROR_STATISTIC
515 bcs->err_inv++;
516 #endif
517 } else {
518 if (bcs->cs->debug & L1_DEB_HSCX)
519 debugl1(bcs->cs,"tiger frame end(%d,%d): fcs(%x) i %x",
520 i,j,bcs->hw.tiger.r_fcs, bcs->cs->hw.njet.irqstat0);
521 if (bcs->hw.tiger.r_fcs == PPP_GOODFCS) {
522 got_frame(bcs, (bitcnt>>3)-3);
523 } else {
524 if (bcs->cs->debug) {
525 debugl1(bcs->cs, "tiger FCS error");
526 printframe(bcs->cs, bcs->hw.tiger.rcvbuf,
527 (bitcnt>>3)-1, "rec");
528 bcs->hw.tiger.r_err++;
530 #ifdef ERROR_STATISTIC
531 bcs->err_crc++;
532 #endif
534 state=HDLC_FLAG_FOUND;
536 bitcnt=0;
537 } else if (r_one==5) {
538 val >>= 1;
539 r_one=0;
540 continue;
541 } else {
542 r_val >>= 1;
543 r_val &= 0x7f;
544 bitcnt++;
546 r_one=0;
548 if ((state == HDLC_FRAME_FOUND) &&
549 !(bitcnt & 7)) {
550 if ((bitcnt>>3)>=HSCX_BUFMAX) {
551 debugl1(bcs->cs, "tiger: frame too big");
552 r_val=0;
553 state=HDLC_FLAG_SEARCH;
554 bcs->hw.tiger.r_err++;
555 #ifdef ERROR_STATISTIC
556 bcs->err_inv++;
557 #endif
558 } else {
559 bcs->hw.tiger.rcvbuf[(bitcnt>>3)-1] = r_val;
560 bcs->hw.tiger.r_fcs =
561 PPP_FCS (bcs->hw.tiger.r_fcs, r_val);
565 val >>= 1;
567 bcs->hw.tiger.r_tot++;
569 bcs->hw.tiger.r_state = state;
570 bcs->hw.tiger.r_one = r_one;
571 bcs->hw.tiger.r_val = r_val;
572 bcs->hw.tiger.r_bitcnt = bitcnt;
575 void read_tiger(struct IsdnCardState *cs) {
576 u_int *p;
577 int cnt = NETJET_DMA_RXSIZE/2;
579 if ((cs->hw.njet.irqstat0 & cs->hw.njet.last_is0) & NETJET_IRQM0_READ) {
580 debugl1(cs,"tiger warn read double dma %x/%x",
581 cs->hw.njet.irqstat0, cs->hw.njet.last_is0);
582 #ifdef ERROR_STATISTIC
583 if (cs->bcs[0].mode)
584 cs->bcs[0].err_rdo++;
585 if (cs->bcs[1].mode)
586 cs->bcs[1].err_rdo++;
587 #endif
588 return;
589 } else {
590 cs->hw.njet.last_is0 &= ~NETJET_IRQM0_READ;
591 cs->hw.njet.last_is0 |= (cs->hw.njet.irqstat0 & NETJET_IRQM0_READ);
593 if (cs->hw.njet.irqstat0 & NETJET_IRQM0_READ_1)
594 p = cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1;
595 else
596 p = cs->bcs[0].hw.tiger.rec + cnt - 1;
597 if ((cs->bcs[0].mode == L1_MODE_HDLC) || (cs->bcs[0].mode == L1_MODE_HDLC_56K))
598 read_raw(cs->bcs, p, cnt);
600 if ((cs->bcs[1].mode == L1_MODE_HDLC) || (cs->bcs[1].mode == L1_MODE_HDLC_56K))
601 read_raw(cs->bcs + 1, p, cnt);
602 cs->hw.njet.irqstat0 &= ~NETJET_IRQM0_READ;
605 static void write_raw(struct BCState *bcs, u_int *buf, int cnt);
607 void netjet_fill_dma(struct BCState *bcs)
609 register u_int *p, *sp;
610 register int cnt;
612 if (!bcs->tx_skb)
613 return;
614 if (bcs->cs->debug & L1_DEB_HSCX)
615 debugl1(bcs->cs,"tiger fill_dma1: c%d %4x", bcs->channel,
616 bcs->Flag);
617 if (test_and_set_bit(BC_FLG_BUSY, &bcs->Flag))
618 return;
619 if (bcs->mode == L1_MODE_HDLC) { // it's 64k
620 if (make_raw_data(bcs))
621 return;
623 else { // it's 56k
624 if (make_raw_data_56k(bcs))
625 return;
627 if (bcs->cs->debug & L1_DEB_HSCX)
628 debugl1(bcs->cs,"tiger fill_dma2: c%d %4x", bcs->channel,
629 bcs->Flag);
630 if (test_and_clear_bit(BC_FLG_NOFRAME, &bcs->Flag)) {
631 write_raw(bcs, bcs->hw.tiger.sendp, bcs->hw.tiger.free);
632 } else if (test_and_clear_bit(BC_FLG_HALF, &bcs->Flag)) {
633 p = bus_to_virt(inl(bcs->cs->hw.njet.base + NETJET_DMA_READ_ADR));
634 sp = bcs->hw.tiger.sendp;
635 if (p == bcs->hw.tiger.s_end)
636 p = bcs->hw.tiger.send -1;
637 if (sp == bcs->hw.tiger.s_end)
638 sp = bcs->hw.tiger.send -1;
639 cnt = p - sp;
640 if (cnt <0) {
641 write_raw(bcs, bcs->hw.tiger.sendp, bcs->hw.tiger.free);
642 } else {
643 p++;
644 cnt++;
645 if (p > bcs->hw.tiger.s_end)
646 p = bcs->hw.tiger.send;
647 p++;
648 cnt++;
649 if (p > bcs->hw.tiger.s_end)
650 p = bcs->hw.tiger.send;
651 write_raw(bcs, p, bcs->hw.tiger.free - cnt);
653 } else if (test_and_clear_bit(BC_FLG_EMPTY, &bcs->Flag)) {
654 p = bus_to_virt(inl(bcs->cs->hw.njet.base + NETJET_DMA_READ_ADR));
655 cnt = bcs->hw.tiger.s_end - p;
656 if (cnt < 2) {
657 p = bcs->hw.tiger.send + 1;
658 cnt = NETJET_DMA_TXSIZE/2 - 2;
659 } else {
660 p++;
661 p++;
662 if (cnt <= (NETJET_DMA_TXSIZE/2))
663 cnt += NETJET_DMA_TXSIZE/2;
664 cnt--;
665 cnt--;
667 write_raw(bcs, p, cnt);
669 if (bcs->cs->debug & L1_DEB_HSCX)
670 debugl1(bcs->cs,"tiger fill_dma3: c%d %4x", bcs->channel,
671 bcs->Flag);
674 static void write_raw(struct BCState *bcs, u_int *buf, int cnt) {
675 u_int mask, val, *p=buf;
676 u_int i, s_cnt;
678 if (cnt <= 0)
679 return;
680 if (test_bit(BC_FLG_BUSY, &bcs->Flag)) {
681 if (bcs->hw.tiger.sendcnt> cnt) {
682 s_cnt = cnt;
683 bcs->hw.tiger.sendcnt -= cnt;
684 } else {
685 s_cnt = bcs->hw.tiger.sendcnt;
686 bcs->hw.tiger.sendcnt = 0;
688 if (bcs->channel)
689 mask = 0xffff00ff;
690 else
691 mask = 0xffffff00;
692 for (i=0; i<s_cnt; i++) {
693 val = bcs->channel ? ((bcs->hw.tiger.sp[i] <<8) & 0xff00) :
694 (bcs->hw.tiger.sp[i]);
695 *p &= mask;
696 *p++ |= val;
697 if (p>bcs->hw.tiger.s_end)
698 p = bcs->hw.tiger.send;
700 bcs->hw.tiger.s_tot += s_cnt;
701 if (bcs->cs->debug & L1_DEB_HSCX)
702 debugl1(bcs->cs,"tiger write_raw: c%d %p-%p %d/%d %d %x", bcs->channel,
703 buf, p, s_cnt, cnt,
704 bcs->hw.tiger.sendcnt, bcs->cs->hw.njet.irqstat0);
705 if (bcs->cs->debug & L1_DEB_HSCX_FIFO)
706 printframe(bcs->cs, bcs->hw.tiger.sp, s_cnt, "snd");
707 bcs->hw.tiger.sp += s_cnt;
708 bcs->hw.tiger.sendp = p;
709 if (!bcs->hw.tiger.sendcnt) {
710 if (!bcs->tx_skb) {
711 debugl1(bcs->cs,"tiger write_raw: NULL skb s_cnt %d", s_cnt);
712 } else {
713 if (test_bit(FLG_LLI_L1WAKEUP,&bcs->st->lli.flag) &&
714 (PACKET_NOACK != bcs->tx_skb->pkt_type)) {
715 u_long flags;
716 spin_lock_irqsave(&bcs->aclock, flags);
717 bcs->ackcnt += bcs->tx_skb->len;
718 spin_unlock_irqrestore(&bcs->aclock, flags);
719 schedule_event(bcs, B_ACKPENDING);
721 dev_kfree_skb_any(bcs->tx_skb);
722 bcs->tx_skb = NULL;
724 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
725 bcs->hw.tiger.free = cnt - s_cnt;
726 if (bcs->hw.tiger.free > (NETJET_DMA_TXSIZE/2))
727 test_and_set_bit(BC_FLG_HALF, &bcs->Flag);
728 else {
729 test_and_clear_bit(BC_FLG_HALF, &bcs->Flag);
730 test_and_set_bit(BC_FLG_NOFRAME, &bcs->Flag);
732 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) {
733 netjet_fill_dma(bcs);
734 } else {
735 mask ^= 0xffffffff;
736 if (s_cnt < cnt) {
737 for (i=s_cnt; i<cnt;i++) {
738 *p++ |= mask;
739 if (p>bcs->hw.tiger.s_end)
740 p = bcs->hw.tiger.send;
742 if (bcs->cs->debug & L1_DEB_HSCX)
743 debugl1(bcs->cs, "tiger write_raw: fill rest %d",
744 cnt - s_cnt);
746 test_and_set_bit(B_XMTBUFREADY, &bcs->event);
747 schedule_work(&bcs->tqueue);
750 } else if (test_and_clear_bit(BC_FLG_NOFRAME, &bcs->Flag)) {
751 test_and_set_bit(BC_FLG_HALF, &bcs->Flag);
752 fill_mem(bcs, buf, cnt, bcs->channel, 0xff);
753 bcs->hw.tiger.free += cnt;
754 if (bcs->cs->debug & L1_DEB_HSCX)
755 debugl1(bcs->cs,"tiger write_raw: fill half");
756 } else if (test_and_clear_bit(BC_FLG_HALF, &bcs->Flag)) {
757 test_and_set_bit(BC_FLG_EMPTY, &bcs->Flag);
758 fill_mem(bcs, buf, cnt, bcs->channel, 0xff);
759 if (bcs->cs->debug & L1_DEB_HSCX)
760 debugl1(bcs->cs,"tiger write_raw: fill full");
764 void write_tiger(struct IsdnCardState *cs) {
765 u_int *p, cnt = NETJET_DMA_TXSIZE/2;
767 if ((cs->hw.njet.irqstat0 & cs->hw.njet.last_is0) & NETJET_IRQM0_WRITE) {
768 debugl1(cs,"tiger warn write double dma %x/%x",
769 cs->hw.njet.irqstat0, cs->hw.njet.last_is0);
770 #ifdef ERROR_STATISTIC
771 if (cs->bcs[0].mode)
772 cs->bcs[0].err_tx++;
773 if (cs->bcs[1].mode)
774 cs->bcs[1].err_tx++;
775 #endif
776 return;
777 } else {
778 cs->hw.njet.last_is0 &= ~NETJET_IRQM0_WRITE;
779 cs->hw.njet.last_is0 |= (cs->hw.njet.irqstat0 & NETJET_IRQM0_WRITE);
781 if (cs->hw.njet.irqstat0 & NETJET_IRQM0_WRITE_1)
782 p = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1;
783 else
784 p = cs->bcs[0].hw.tiger.send + cnt - 1;
785 if ((cs->bcs[0].mode == L1_MODE_HDLC) || (cs->bcs[0].mode == L1_MODE_HDLC_56K))
786 write_raw(cs->bcs, p, cnt);
787 if ((cs->bcs[1].mode == L1_MODE_HDLC) || (cs->bcs[1].mode == L1_MODE_HDLC_56K))
788 write_raw(cs->bcs + 1, p, cnt);
789 cs->hw.njet.irqstat0 &= ~NETJET_IRQM0_WRITE;
792 static void
793 tiger_l2l1(struct PStack *st, int pr, void *arg)
795 struct BCState *bcs = st->l1.bcs;
796 struct sk_buff *skb = arg;
797 u_long flags;
799 switch (pr) {
800 case (PH_DATA | REQUEST):
801 spin_lock_irqsave(&bcs->cs->lock, flags);
802 if (bcs->tx_skb) {
803 skb_queue_tail(&bcs->squeue, skb);
804 } else {
805 bcs->tx_skb = skb;
806 bcs->cs->BC_Send_Data(bcs);
808 spin_unlock_irqrestore(&bcs->cs->lock, flags);
809 break;
810 case (PH_PULL | INDICATION):
811 spin_lock_irqsave(&bcs->cs->lock, flags);
812 if (bcs->tx_skb) {
813 printk(KERN_WARNING "tiger_l2l1: this shouldn't happen\n");
814 } else {
815 bcs->tx_skb = skb;
816 bcs->cs->BC_Send_Data(bcs);
818 spin_unlock_irqrestore(&bcs->cs->lock, flags);
819 break;
820 case (PH_PULL | REQUEST):
821 if (!bcs->tx_skb) {
822 test_and_clear_bit(FLG_L1_PULL_REQ, &st->l1.Flags);
823 st->l1.l1l2(st, PH_PULL | CONFIRM, NULL);
824 } else
825 test_and_set_bit(FLG_L1_PULL_REQ, &st->l1.Flags);
826 break;
827 case (PH_ACTIVATE | REQUEST):
828 spin_lock_irqsave(&bcs->cs->lock, flags);
829 test_and_set_bit(BC_FLG_ACTIV, &bcs->Flag);
830 mode_tiger(bcs, st->l1.mode, st->l1.bc);
831 /* 2001/10/04 Christoph Ersfeld, Formula-n Europe AG */
832 spin_unlock_irqrestore(&bcs->cs->lock, flags);
833 bcs->cs->cardmsg(bcs->cs, MDL_BC_ASSIGN, (void *)(&st->l1.bc));
834 l1_msg_b(st, pr, arg);
835 break;
836 case (PH_DEACTIVATE | REQUEST):
837 /* 2001/10/04 Christoph Ersfeld, Formula-n Europe AG */
838 bcs->cs->cardmsg(bcs->cs, MDL_BC_RELEASE, (void *)(&st->l1.bc));
839 l1_msg_b(st, pr, arg);
840 break;
841 case (PH_DEACTIVATE | CONFIRM):
842 spin_lock_irqsave(&bcs->cs->lock, flags);
843 test_and_clear_bit(BC_FLG_ACTIV, &bcs->Flag);
844 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
845 mode_tiger(bcs, 0, st->l1.bc);
846 spin_unlock_irqrestore(&bcs->cs->lock, flags);
847 st->l1.l1l2(st, PH_DEACTIVATE | CONFIRM, NULL);
848 break;
853 static void
854 close_tigerstate(struct BCState *bcs)
856 mode_tiger(bcs, 0, bcs->channel);
857 if (test_and_clear_bit(BC_FLG_INIT, &bcs->Flag)) {
858 if (bcs->hw.tiger.rcvbuf) {
859 kfree(bcs->hw.tiger.rcvbuf);
860 bcs->hw.tiger.rcvbuf = NULL;
862 if (bcs->hw.tiger.sendbuf) {
863 kfree(bcs->hw.tiger.sendbuf);
864 bcs->hw.tiger.sendbuf = NULL;
866 skb_queue_purge(&bcs->rqueue);
867 skb_queue_purge(&bcs->squeue);
868 if (bcs->tx_skb) {
869 dev_kfree_skb_any(bcs->tx_skb);
870 bcs->tx_skb = NULL;
871 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
876 static int
877 open_tigerstate(struct IsdnCardState *cs, struct BCState *bcs)
879 if (!test_and_set_bit(BC_FLG_INIT, &bcs->Flag)) {
880 if (!(bcs->hw.tiger.rcvbuf = kmalloc(HSCX_BUFMAX, GFP_ATOMIC))) {
881 printk(KERN_WARNING
882 "HiSax: No memory for tiger.rcvbuf\n");
883 return (1);
885 if (!(bcs->hw.tiger.sendbuf = kmalloc(RAW_BUFMAX, GFP_ATOMIC))) {
886 printk(KERN_WARNING
887 "HiSax: No memory for tiger.sendbuf\n");
888 return (1);
890 skb_queue_head_init(&bcs->rqueue);
891 skb_queue_head_init(&bcs->squeue);
893 bcs->tx_skb = NULL;
894 bcs->hw.tiger.sendcnt = 0;
895 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
896 bcs->event = 0;
897 bcs->tx_cnt = 0;
898 return (0);
901 static int
902 setstack_tiger(struct PStack *st, struct BCState *bcs)
904 bcs->channel = st->l1.bc;
905 if (open_tigerstate(st->l1.hardware, bcs))
906 return (-1);
907 st->l1.bcs = bcs;
908 st->l2.l2l1 = tiger_l2l1;
909 setstack_manager(st);
910 bcs->st = st;
911 setstack_l1_B(st);
912 return (0);
916 void __init
917 inittiger(struct IsdnCardState *cs)
919 if (!(cs->bcs[0].hw.tiger.send = kmalloc(NETJET_DMA_TXSIZE * sizeof(unsigned int),
920 GFP_KERNEL | GFP_DMA))) {
921 printk(KERN_WARNING
922 "HiSax: No memory for tiger.send\n");
923 return;
925 cs->bcs[0].hw.tiger.s_irq = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE/2 - 1;
926 cs->bcs[0].hw.tiger.s_end = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1;
927 cs->bcs[1].hw.tiger.send = cs->bcs[0].hw.tiger.send;
928 cs->bcs[1].hw.tiger.s_irq = cs->bcs[0].hw.tiger.s_irq;
929 cs->bcs[1].hw.tiger.s_end = cs->bcs[0].hw.tiger.s_end;
931 memset(cs->bcs[0].hw.tiger.send, 0xff, NETJET_DMA_TXSIZE * sizeof(unsigned int));
932 debugl1(cs, "tiger: send buf %p - %p", cs->bcs[0].hw.tiger.send,
933 cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1);
934 outl(virt_to_bus(cs->bcs[0].hw.tiger.send),
935 cs->hw.njet.base + NETJET_DMA_READ_START);
936 outl(virt_to_bus(cs->bcs[0].hw.tiger.s_irq),
937 cs->hw.njet.base + NETJET_DMA_READ_IRQ);
938 outl(virt_to_bus(cs->bcs[0].hw.tiger.s_end),
939 cs->hw.njet.base + NETJET_DMA_READ_END);
940 if (!(cs->bcs[0].hw.tiger.rec = kmalloc(NETJET_DMA_RXSIZE * sizeof(unsigned int),
941 GFP_KERNEL | GFP_DMA))) {
942 printk(KERN_WARNING
943 "HiSax: No memory for tiger.rec\n");
944 return;
946 debugl1(cs, "tiger: rec buf %p - %p", cs->bcs[0].hw.tiger.rec,
947 cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1);
948 cs->bcs[1].hw.tiger.rec = cs->bcs[0].hw.tiger.rec;
949 memset(cs->bcs[0].hw.tiger.rec, 0xff, NETJET_DMA_RXSIZE * sizeof(unsigned int));
950 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec),
951 cs->hw.njet.base + NETJET_DMA_WRITE_START);
952 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE/2 - 1),
953 cs->hw.njet.base + NETJET_DMA_WRITE_IRQ);
954 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1),
955 cs->hw.njet.base + NETJET_DMA_WRITE_END);
956 debugl1(cs, "tiger: dmacfg %x/%x pulse=%d",
957 inl(cs->hw.njet.base + NETJET_DMA_WRITE_ADR),
958 inl(cs->hw.njet.base + NETJET_DMA_READ_ADR),
959 bytein(cs->hw.njet.base + NETJET_PULSE_CNT));
960 cs->hw.njet.last_is0 = 0;
961 cs->bcs[0].BC_SetStack = setstack_tiger;
962 cs->bcs[1].BC_SetStack = setstack_tiger;
963 cs->bcs[0].BC_Close = close_tigerstate;
964 cs->bcs[1].BC_Close = close_tigerstate;
967 static void
968 releasetiger(struct IsdnCardState *cs)
970 if (cs->bcs[0].hw.tiger.send) {
971 kfree(cs->bcs[0].hw.tiger.send);
972 cs->bcs[0].hw.tiger.send = NULL;
974 if (cs->bcs[1].hw.tiger.send) {
975 cs->bcs[1].hw.tiger.send = NULL;
977 if (cs->bcs[0].hw.tiger.rec) {
978 kfree(cs->bcs[0].hw.tiger.rec);
979 cs->bcs[0].hw.tiger.rec = NULL;
981 if (cs->bcs[1].hw.tiger.rec) {
982 cs->bcs[1].hw.tiger.rec = NULL;
986 void
987 release_io_netjet(struct IsdnCardState *cs)
989 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0);
990 byteout(cs->hw.njet.base + NETJET_IRQMASK1, 0);
991 releasetiger(cs);
992 release_region(cs->hw.njet.base, 256);