aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/isdn/hisax/netjet.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 18:20:36 -0400
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 18:20:36 -0400
commit1da177e4c3f41524e886b7f1b8a0c1fc7321cac2 (patch)
tree0bba044c4ce775e45a88a51686b5d9f90697ea9d /drivers/isdn/hisax/netjet.c
Linux-2.6.12-rc2v2.6.12-rc2
Initial git repository build. I'm not bothering with the full history, even though we have it. We can create a separate "historical" git archive of that later if we want to, and in the meantime it's about 3.2GB when imported into git - space that would just make the early git days unnecessarily complicated, when we don't have a lot of good infrastructure for it. Let it rip!
Diffstat (limited to 'drivers/isdn/hisax/netjet.c')
-rw-r--r--drivers/isdn/hisax/netjet.c996
1 files changed, 996 insertions, 0 deletions
diff --git a/drivers/isdn/hisax/netjet.c b/drivers/isdn/hisax/netjet.c
new file mode 100644
index 000000000000..fe61d26365d3
--- /dev/null
+++ b/drivers/isdn/hisax/netjet.c
@@ -0,0 +1,996 @@
1/* $Id: netjet.c,v 1.29.2.4 2004/02/11 13:21:34 keil Exp $
2 *
3 * low level stuff for Traverse Technologie NETJet ISDN cards
4 *
5 * Author Karsten Keil
6 * Copyright by Karsten Keil <keil@isdn4linux.de>
7 *
8 * This software may be used and distributed according to the terms
9 * of the GNU General Public License, incorporated herein by reference.
10 *
11 * Thanks to Traverse Technologies Australia for documents and information
12 *
13 * 16-Apr-2002 - led code added - Guy Ellis (guy@traverse.com.au)
14 *
15 */
16
17#include <linux/init.h>
18#include "hisax.h"
19#include "isac.h"
20#include "hscx.h"
21#include "isdnl1.h"
22#include <linux/pci.h>
23#include <linux/interrupt.h>
24#include <linux/ppp_defs.h>
25#include <asm/io.h>
26#include "netjet.h"
27
28const char *NETjet_revision = "$Revision: 1.29.2.4 $";
29
30/* Interface functions */
31
32u_char
33NETjet_ReadIC(struct IsdnCardState *cs, u_char offset)
34{
35 u_char ret;
36
37 cs->hw.njet.auxd &= 0xfc;
38 cs->hw.njet.auxd |= (offset>>4) & 3;
39 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
40 ret = bytein(cs->hw.njet.isac + ((offset & 0xf)<<2));
41 return(ret);
42}
43
44void
45NETjet_WriteIC(struct IsdnCardState *cs, u_char offset, u_char value)
46{
47 cs->hw.njet.auxd &= 0xfc;
48 cs->hw.njet.auxd |= (offset>>4) & 3;
49 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
50 byteout(cs->hw.njet.isac + ((offset & 0xf)<<2), value);
51}
52
53void
54NETjet_ReadICfifo(struct IsdnCardState *cs, u_char *data, int size)
55{
56 cs->hw.njet.auxd &= 0xfc;
57 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
58 insb(cs->hw.njet.isac, data, size);
59}
60
61void
62NETjet_WriteICfifo(struct IsdnCardState *cs, u_char *data, int size)
63{
64 cs->hw.njet.auxd &= 0xfc;
65 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
66 outsb(cs->hw.njet.isac, data, size);
67}
68
69void fill_mem(struct BCState *bcs, u_int *pos, u_int cnt, int chan, u_char fill)
70{
71 u_int mask=0x000000ff, val = 0, *p=pos;
72 u_int i;
73
74 val |= fill;
75 if (chan) {
76 val <<= 8;
77 mask <<= 8;
78 }
79 mask ^= 0xffffffff;
80 for (i=0; i<cnt; i++) {
81 *p &= mask;
82 *p++ |= val;
83 if (p > bcs->hw.tiger.s_end)
84 p = bcs->hw.tiger.send;
85 }
86}
87
88void
89mode_tiger(struct BCState *bcs, int mode, int bc)
90{
91 struct IsdnCardState *cs = bcs->cs;
92 u_char led;
93
94 if (cs->debug & L1_DEB_HSCX)
95 debugl1(cs, "Tiger mode %d bchan %d/%d",
96 mode, bc, bcs->channel);
97 bcs->mode = mode;
98 bcs->channel = bc;
99 switch (mode) {
100 case (L1_MODE_NULL):
101 fill_mem(bcs, bcs->hw.tiger.send,
102 NETJET_DMA_TXSIZE, bc, 0xff);
103 if (cs->debug & L1_DEB_HSCX)
104 debugl1(cs, "Tiger stat rec %d/%d send %d",
105 bcs->hw.tiger.r_tot, bcs->hw.tiger.r_err,
106 bcs->hw.tiger.s_tot);
107 if ((cs->bcs[0].mode == L1_MODE_NULL) &&
108 (cs->bcs[1].mode == L1_MODE_NULL)) {
109 cs->hw.njet.dmactrl = 0;
110 byteout(cs->hw.njet.base + NETJET_DMACTRL,
111 cs->hw.njet.dmactrl);
112 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0);
113 }
114 if (cs->typ == ISDN_CTYPE_NETJET_S)
115 {
116 // led off
117 led = bc & 0x01;
118 led = 0x01 << (6 + led); // convert to mask
119 led = ~led;
120 cs->hw.njet.auxd &= led;
121 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
122 }
123 break;
124 case (L1_MODE_TRANS):
125 break;
126 case (L1_MODE_HDLC_56K):
127 case (L1_MODE_HDLC):
128 fill_mem(bcs, bcs->hw.tiger.send,
129 NETJET_DMA_TXSIZE, bc, 0xff);
130 bcs->hw.tiger.r_state = HDLC_ZERO_SEARCH;
131 bcs->hw.tiger.r_tot = 0;
132 bcs->hw.tiger.r_bitcnt = 0;
133 bcs->hw.tiger.r_one = 0;
134 bcs->hw.tiger.r_err = 0;
135 bcs->hw.tiger.s_tot = 0;
136 if (! cs->hw.njet.dmactrl) {
137 fill_mem(bcs, bcs->hw.tiger.send,
138 NETJET_DMA_TXSIZE, !bc, 0xff);
139 cs->hw.njet.dmactrl = 1;
140 byteout(cs->hw.njet.base + NETJET_DMACTRL,
141 cs->hw.njet.dmactrl);
142 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0x0f);
143 /* was 0x3f now 0x0f for TJ300 and TJ320 GE 13/07/00 */
144 }
145 bcs->hw.tiger.sendp = bcs->hw.tiger.send;
146 bcs->hw.tiger.free = NETJET_DMA_TXSIZE;
147 test_and_set_bit(BC_FLG_EMPTY, &bcs->Flag);
148 if (cs->typ == ISDN_CTYPE_NETJET_S)
149 {
150 // led on
151 led = bc & 0x01;
152 led = 0x01 << (6 + led); // convert to mask
153 cs->hw.njet.auxd |= led;
154 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
155 }
156 break;
157 }
158 if (cs->debug & L1_DEB_HSCX)
159 debugl1(cs, "tiger: set %x %x %x %x/%x pulse=%d",
160 bytein(cs->hw.njet.base + NETJET_DMACTRL),
161 bytein(cs->hw.njet.base + NETJET_IRQMASK0),
162 bytein(cs->hw.njet.base + NETJET_IRQSTAT0),
163 inl(cs->hw.njet.base + NETJET_DMA_READ_ADR),
164 inl(cs->hw.njet.base + NETJET_DMA_WRITE_ADR),
165 bytein(cs->hw.njet.base + NETJET_PULSE_CNT));
166}
167
168static void printframe(struct IsdnCardState *cs, u_char *buf, int count, char *s) {
169 char tmp[128];
170 char *t = tmp;
171 int i=count,j;
172 u_char *p = buf;
173
174 t += sprintf(t, "tiger %s(%4d)", s, count);
175 while (i>0) {
176 if (i>16)
177 j=16;
178 else
179 j=i;
180 QuickHex(t, p, j);
181 debugl1(cs, tmp);
182 p += j;
183 i -= j;
184 t = tmp;
185 t += sprintf(t, "tiger %s ", s);
186 }
187}
188
189// macro for 64k
190
191#define MAKE_RAW_BYTE for (j=0; j<8; j++) { \
192 bitcnt++;\
193 s_val >>= 1;\
194 if (val & 1) {\
195 s_one++;\
196 s_val |= 0x80;\
197 } else {\
198 s_one = 0;\
199 s_val &= 0x7f;\
200 }\
201 if (bitcnt==8) {\
202 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
203 bitcnt = 0;\
204 }\
205 if (s_one == 5) {\
206 s_val >>= 1;\
207 s_val &= 0x7f;\
208 bitcnt++;\
209 s_one = 0;\
210 }\
211 if (bitcnt==8) {\
212 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
213 bitcnt = 0;\
214 }\
215 val >>= 1;\
216 }
217
218static int make_raw_data(struct BCState *bcs) {
219// this make_raw is for 64k
220 register u_int i,s_cnt=0;
221 register u_char j;
222 register u_char val;
223 register u_char s_one = 0;
224 register u_char s_val = 0;
225 register u_char bitcnt = 0;
226 u_int fcs;
227
228 if (!bcs->tx_skb) {
229 debugl1(bcs->cs, "tiger make_raw: NULL skb");
230 return(1);
231 }
232 bcs->hw.tiger.sendbuf[s_cnt++] = HDLC_FLAG_VALUE;
233 fcs = PPP_INITFCS;
234 for (i=0; i<bcs->tx_skb->len; i++) {
235 val = bcs->tx_skb->data[i];
236 fcs = PPP_FCS (fcs, val);
237 MAKE_RAW_BYTE;
238 }
239 fcs ^= 0xffff;
240 val = fcs & 0xff;
241 MAKE_RAW_BYTE;
242 val = (fcs>>8) & 0xff;
243 MAKE_RAW_BYTE;
244 val = HDLC_FLAG_VALUE;
245 for (j=0; j<8; j++) {
246 bitcnt++;
247 s_val >>= 1;
248 if (val & 1)
249 s_val |= 0x80;
250 else
251 s_val &= 0x7f;
252 if (bitcnt==8) {
253 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
254 bitcnt = 0;
255 }
256 val >>= 1;
257 }
258 if (bcs->cs->debug & L1_DEB_HSCX)
259 debugl1(bcs->cs,"tiger make_raw: in %ld out %d.%d",
260 bcs->tx_skb->len, s_cnt, bitcnt);
261 if (bitcnt) {
262 while (8>bitcnt++) {
263 s_val >>= 1;
264 s_val |= 0x80;
265 }
266 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
267 bcs->hw.tiger.sendbuf[s_cnt++] = 0xff; // NJ<->NJ thoughput bug fix
268 }
269 bcs->hw.tiger.sendcnt = s_cnt;
270 bcs->tx_cnt -= bcs->tx_skb->len;
271 bcs->hw.tiger.sp = bcs->hw.tiger.sendbuf;
272 return(0);
273}
274
275// macro for 56k
276
277#define MAKE_RAW_BYTE_56K for (j=0; j<8; j++) { \
278 bitcnt++;\
279 s_val >>= 1;\
280 if (val & 1) {\
281 s_one++;\
282 s_val |= 0x80;\
283 } else {\
284 s_one = 0;\
285 s_val &= 0x7f;\
286 }\
287 if (bitcnt==7) {\
288 s_val >>= 1;\
289 s_val |= 0x80;\
290 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
291 bitcnt = 0;\
292 }\
293 if (s_one == 5) {\
294 s_val >>= 1;\
295 s_val &= 0x7f;\
296 bitcnt++;\
297 s_one = 0;\
298 }\
299 if (bitcnt==7) {\
300 s_val >>= 1;\
301 s_val |= 0x80;\
302 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
303 bitcnt = 0;\
304 }\
305 val >>= 1;\
306 }
307
308static int make_raw_data_56k(struct BCState *bcs) {
309// this make_raw is for 56k
310 register u_int i,s_cnt=0;
311 register u_char j;
312 register u_char val;
313 register u_char s_one = 0;
314 register u_char s_val = 0;
315 register u_char bitcnt = 0;
316 u_int fcs;
317
318 if (!bcs->tx_skb) {
319 debugl1(bcs->cs, "tiger make_raw_56k: NULL skb");
320 return(1);
321 }
322 val = HDLC_FLAG_VALUE;
323 for (j=0; j<8; j++) {
324 bitcnt++;
325 s_val >>= 1;
326 if (val & 1)
327 s_val |= 0x80;
328 else
329 s_val &= 0x7f;
330 if (bitcnt==7) {
331 s_val >>= 1;
332 s_val |= 0x80;
333 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
334 bitcnt = 0;
335 }
336 val >>= 1;
337 };
338 fcs = PPP_INITFCS;
339 for (i=0; i<bcs->tx_skb->len; i++) {
340 val = bcs->tx_skb->data[i];
341 fcs = PPP_FCS (fcs, val);
342 MAKE_RAW_BYTE_56K;
343 }
344 fcs ^= 0xffff;
345 val = fcs & 0xff;
346 MAKE_RAW_BYTE_56K;
347 val = (fcs>>8) & 0xff;
348 MAKE_RAW_BYTE_56K;
349 val = HDLC_FLAG_VALUE;
350 for (j=0; j<8; j++) {
351 bitcnt++;
352 s_val >>= 1;
353 if (val & 1)
354 s_val |= 0x80;
355 else
356 s_val &= 0x7f;
357 if (bitcnt==7) {
358 s_val >>= 1;
359 s_val |= 0x80;
360 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
361 bitcnt = 0;
362 }
363 val >>= 1;
364 }
365 if (bcs->cs->debug & L1_DEB_HSCX)
366 debugl1(bcs->cs,"tiger make_raw_56k: in %ld out %d.%d",
367 bcs->tx_skb->len, s_cnt, bitcnt);
368 if (bitcnt) {
369 while (8>bitcnt++) {
370 s_val >>= 1;
371 s_val |= 0x80;
372 }
373 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
374 bcs->hw.tiger.sendbuf[s_cnt++] = 0xff; // NJ<->NJ thoughput bug fix
375 }
376 bcs->hw.tiger.sendcnt = s_cnt;
377 bcs->tx_cnt -= bcs->tx_skb->len;
378 bcs->hw.tiger.sp = bcs->hw.tiger.sendbuf;
379 return(0);
380}
381
382static void got_frame(struct BCState *bcs, int count) {
383 struct sk_buff *skb;
384
385 if (!(skb = dev_alloc_skb(count)))
386 printk(KERN_WARNING "TIGER: receive out of memory\n");
387 else {
388 memcpy(skb_put(skb, count), bcs->hw.tiger.rcvbuf, count);
389 skb_queue_tail(&bcs->rqueue, skb);
390 }
391 test_and_set_bit(B_RCVBUFREADY, &bcs->event);
392 schedule_work(&bcs->tqueue);
393
394 if (bcs->cs->debug & L1_DEB_RECEIVE_FRAME)
395 printframe(bcs->cs, bcs->hw.tiger.rcvbuf, count, "rec");
396}
397
398
399
400static void read_raw(struct BCState *bcs, u_int *buf, int cnt){
401 int i;
402 register u_char j;
403 register u_char val;
404 u_int *pend = bcs->hw.tiger.rec +NETJET_DMA_RXSIZE -1;
405 register u_char state = bcs->hw.tiger.r_state;
406 register u_char r_one = bcs->hw.tiger.r_one;
407 register u_char r_val = bcs->hw.tiger.r_val;
408 register u_int bitcnt = bcs->hw.tiger.r_bitcnt;
409 u_int *p = buf;
410 int bits;
411 u_char mask;
412
413 if (bcs->mode == L1_MODE_HDLC) { // it's 64k
414 mask = 0xff;
415 bits = 8;
416 }
417 else { // it's 56K
418 mask = 0x7f;
419 bits = 7;
420 };
421 for (i=0;i<cnt;i++) {
422 val = bcs->channel ? ((*p>>8) & 0xff) : (*p & 0xff);
423 p++;
424 if (p > pend)
425 p = bcs->hw.tiger.rec;
426 if ((val & mask) == mask) {
427 state = HDLC_ZERO_SEARCH;
428 bcs->hw.tiger.r_tot++;
429 bitcnt = 0;
430 r_one = 0;
431 continue;
432 }
433 for (j=0;j<bits;j++) {
434 if (state == HDLC_ZERO_SEARCH) {
435 if (val & 1) {
436 r_one++;
437 } else {
438 r_one=0;
439 state= HDLC_FLAG_SEARCH;
440 if (bcs->cs->debug & L1_DEB_HSCX)
441 debugl1(bcs->cs,"tiger read_raw: zBit(%d,%d,%d) %x",
442 bcs->hw.tiger.r_tot,i,j,val);
443 }
444 } else if (state == HDLC_FLAG_SEARCH) {
445 if (val & 1) {
446 r_one++;
447 if (r_one>6) {
448 state=HDLC_ZERO_SEARCH;
449 }
450 } else {
451 if (r_one==6) {
452 bitcnt=0;
453 r_val=0;
454 state=HDLC_FLAG_FOUND;
455 if (bcs->cs->debug & L1_DEB_HSCX)
456 debugl1(bcs->cs,"tiger read_raw: flag(%d,%d,%d) %x",
457 bcs->hw.tiger.r_tot,i,j,val);
458 }
459 r_one=0;
460 }
461 } else if (state == HDLC_FLAG_FOUND) {
462 if (val & 1) {
463 r_one++;
464 if (r_one>6) {
465 state=HDLC_ZERO_SEARCH;
466 } else {
467 r_val >>= 1;
468 r_val |= 0x80;
469 bitcnt++;
470 }
471 } else {
472 if (r_one==6) {
473 bitcnt=0;
474 r_val=0;
475 r_one=0;
476 val >>= 1;
477 continue;
478 } else if (r_one!=5) {
479 r_val >>= 1;
480 r_val &= 0x7f;
481 bitcnt++;
482 }
483 r_one=0;
484 }
485 if ((state != HDLC_ZERO_SEARCH) &&
486 !(bitcnt & 7)) {
487 state=HDLC_FRAME_FOUND;
488 bcs->hw.tiger.r_fcs = PPP_INITFCS;
489 bcs->hw.tiger.rcvbuf[0] = r_val;
490 bcs->hw.tiger.r_fcs = PPP_FCS (bcs->hw.tiger.r_fcs, r_val);
491 if (bcs->cs->debug & L1_DEB_HSCX)
492 debugl1(bcs->cs,"tiger read_raw: byte1(%d,%d,%d) rval %x val %x i %x",
493 bcs->hw.tiger.r_tot,i,j,r_val,val,
494 bcs->cs->hw.njet.irqstat0);
495 }
496 } else if (state == HDLC_FRAME_FOUND) {
497 if (val & 1) {
498 r_one++;
499 if (r_one>6) {
500 state=HDLC_ZERO_SEARCH;
501 bitcnt=0;
502 } else {
503 r_val >>= 1;
504 r_val |= 0x80;
505 bitcnt++;
506 }
507 } else {
508 if (r_one==6) {
509 r_val=0;
510 r_one=0;
511 bitcnt++;
512 if (bitcnt & 7) {
513 debugl1(bcs->cs, "tiger: frame not byte aligned");
514 state=HDLC_FLAG_SEARCH;
515 bcs->hw.tiger.r_err++;
516#ifdef ERROR_STATISTIC
517 bcs->err_inv++;
518#endif
519 } else {
520 if (bcs->cs->debug & L1_DEB_HSCX)
521 debugl1(bcs->cs,"tiger frame end(%d,%d): fcs(%x) i %x",
522 i,j,bcs->hw.tiger.r_fcs, bcs->cs->hw.njet.irqstat0);
523 if (bcs->hw.tiger.r_fcs == PPP_GOODFCS) {
524 got_frame(bcs, (bitcnt>>3)-3);
525 } else {
526 if (bcs->cs->debug) {
527 debugl1(bcs->cs, "tiger FCS error");
528 printframe(bcs->cs, bcs->hw.tiger.rcvbuf,
529 (bitcnt>>3)-1, "rec");
530 bcs->hw.tiger.r_err++;
531 }
532#ifdef ERROR_STATISTIC
533 bcs->err_crc++;
534#endif
535 }
536 state=HDLC_FLAG_FOUND;
537 }
538 bitcnt=0;
539 } else if (r_one==5) {
540 val >>= 1;
541 r_one=0;
542 continue;
543 } else {
544 r_val >>= 1;
545 r_val &= 0x7f;
546 bitcnt++;
547 }
548 r_one=0;
549 }
550 if ((state == HDLC_FRAME_FOUND) &&
551 !(bitcnt & 7)) {
552 if ((bitcnt>>3)>=HSCX_BUFMAX) {
553 debugl1(bcs->cs, "tiger: frame too big");
554 r_val=0;
555 state=HDLC_FLAG_SEARCH;
556 bcs->hw.tiger.r_err++;
557#ifdef ERROR_STATISTIC
558 bcs->err_inv++;
559#endif
560 } else {
561 bcs->hw.tiger.rcvbuf[(bitcnt>>3)-1] = r_val;
562 bcs->hw.tiger.r_fcs =
563 PPP_FCS (bcs->hw.tiger.r_fcs, r_val);
564 }
565 }
566 }
567 val >>= 1;
568 }
569 bcs->hw.tiger.r_tot++;
570 }
571 bcs->hw.tiger.r_state = state;
572 bcs->hw.tiger.r_one = r_one;
573 bcs->hw.tiger.r_val = r_val;
574 bcs->hw.tiger.r_bitcnt = bitcnt;
575}
576
577void read_tiger(struct IsdnCardState *cs) {
578 u_int *p;
579 int cnt = NETJET_DMA_RXSIZE/2;
580
581 if ((cs->hw.njet.irqstat0 & cs->hw.njet.last_is0) & NETJET_IRQM0_READ) {
582 debugl1(cs,"tiger warn read double dma %x/%x",
583 cs->hw.njet.irqstat0, cs->hw.njet.last_is0);
584#ifdef ERROR_STATISTIC
585 if (cs->bcs[0].mode)
586 cs->bcs[0].err_rdo++;
587 if (cs->bcs[1].mode)
588 cs->bcs[1].err_rdo++;
589#endif
590 return;
591 } else {
592 cs->hw.njet.last_is0 &= ~NETJET_IRQM0_READ;
593 cs->hw.njet.last_is0 |= (cs->hw.njet.irqstat0 & NETJET_IRQM0_READ);
594 }
595 if (cs->hw.njet.irqstat0 & NETJET_IRQM0_READ_1)
596 p = cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1;
597 else
598 p = cs->bcs[0].hw.tiger.rec + cnt - 1;
599 if ((cs->bcs[0].mode == L1_MODE_HDLC) || (cs->bcs[0].mode == L1_MODE_HDLC_56K))
600 read_raw(cs->bcs, p, cnt);
601
602 if ((cs->bcs[1].mode == L1_MODE_HDLC) || (cs->bcs[1].mode == L1_MODE_HDLC_56K))
603 read_raw(cs->bcs + 1, p, cnt);
604 cs->hw.njet.irqstat0 &= ~NETJET_IRQM0_READ;
605}
606
607static void write_raw(struct BCState *bcs, u_int *buf, int cnt);
608
609void netjet_fill_dma(struct BCState *bcs)
610{
611 register u_int *p, *sp;
612 register int cnt;
613
614 if (!bcs->tx_skb)
615 return;
616 if (bcs->cs->debug & L1_DEB_HSCX)
617 debugl1(bcs->cs,"tiger fill_dma1: c%d %4x", bcs->channel,
618 bcs->Flag);
619 if (test_and_set_bit(BC_FLG_BUSY, &bcs->Flag))
620 return;
621 if (bcs->mode == L1_MODE_HDLC) { // it's 64k
622 if (make_raw_data(bcs))
623 return;
624 }
625 else { // it's 56k
626 if (make_raw_data_56k(bcs))
627 return;
628 };
629 if (bcs->cs->debug & L1_DEB_HSCX)
630 debugl1(bcs->cs,"tiger fill_dma2: c%d %4x", bcs->channel,
631 bcs->Flag);
632 if (test_and_clear_bit(BC_FLG_NOFRAME, &bcs->Flag)) {
633 write_raw(bcs, bcs->hw.tiger.sendp, bcs->hw.tiger.free);
634 } else if (test_and_clear_bit(BC_FLG_HALF, &bcs->Flag)) {
635 p = bus_to_virt(inl(bcs->cs->hw.njet.base + NETJET_DMA_READ_ADR));
636 sp = bcs->hw.tiger.sendp;
637 if (p == bcs->hw.tiger.s_end)
638 p = bcs->hw.tiger.send -1;
639 if (sp == bcs->hw.tiger.s_end)
640 sp = bcs->hw.tiger.send -1;
641 cnt = p - sp;
642 if (cnt <0) {
643 write_raw(bcs, bcs->hw.tiger.sendp, bcs->hw.tiger.free);
644 } else {
645 p++;
646 cnt++;
647 if (p > bcs->hw.tiger.s_end)
648 p = bcs->hw.tiger.send;
649 p++;
650 cnt++;
651 if (p > bcs->hw.tiger.s_end)
652 p = bcs->hw.tiger.send;
653 write_raw(bcs, p, bcs->hw.tiger.free - cnt);
654 }
655 } else if (test_and_clear_bit(BC_FLG_EMPTY, &bcs->Flag)) {
656 p = bus_to_virt(inl(bcs->cs->hw.njet.base + NETJET_DMA_READ_ADR));
657 cnt = bcs->hw.tiger.s_end - p;
658 if (cnt < 2) {
659 p = bcs->hw.tiger.send + 1;
660 cnt = NETJET_DMA_TXSIZE/2 - 2;
661 } else {
662 p++;
663 p++;
664 if (cnt <= (NETJET_DMA_TXSIZE/2))
665 cnt += NETJET_DMA_TXSIZE/2;
666 cnt--;
667 cnt--;
668 }
669 write_raw(bcs, p, cnt);
670 }
671 if (bcs->cs->debug & L1_DEB_HSCX)
672 debugl1(bcs->cs,"tiger fill_dma3: c%d %4x", bcs->channel,
673 bcs->Flag);
674}
675
676static void write_raw(struct BCState *bcs, u_int *buf, int cnt) {
677 u_int mask, val, *p=buf;
678 u_int i, s_cnt;
679
680 if (cnt <= 0)
681 return;
682 if (test_bit(BC_FLG_BUSY, &bcs->Flag)) {
683 if (bcs->hw.tiger.sendcnt> cnt) {
684 s_cnt = cnt;
685 bcs->hw.tiger.sendcnt -= cnt;
686 } else {
687 s_cnt = bcs->hw.tiger.sendcnt;
688 bcs->hw.tiger.sendcnt = 0;
689 }
690 if (bcs->channel)
691 mask = 0xffff00ff;
692 else
693 mask = 0xffffff00;
694 for (i=0; i<s_cnt; i++) {
695 val = bcs->channel ? ((bcs->hw.tiger.sp[i] <<8) & 0xff00) :
696 (bcs->hw.tiger.sp[i]);
697 *p &= mask;
698 *p++ |= val;
699 if (p>bcs->hw.tiger.s_end)
700 p = bcs->hw.tiger.send;
701 }
702 bcs->hw.tiger.s_tot += s_cnt;
703 if (bcs->cs->debug & L1_DEB_HSCX)
704 debugl1(bcs->cs,"tiger write_raw: c%d %p-%p %d/%d %d %x", bcs->channel,
705 buf, p, s_cnt, cnt,
706 bcs->hw.tiger.sendcnt, bcs->cs->hw.njet.irqstat0);
707 if (bcs->cs->debug & L1_DEB_HSCX_FIFO)
708 printframe(bcs->cs, bcs->hw.tiger.sp, s_cnt, "snd");
709 bcs->hw.tiger.sp += s_cnt;
710 bcs->hw.tiger.sendp = p;
711 if (!bcs->hw.tiger.sendcnt) {
712 if (!bcs->tx_skb) {
713 debugl1(bcs->cs,"tiger write_raw: NULL skb s_cnt %d", s_cnt);
714 } else {
715 if (test_bit(FLG_LLI_L1WAKEUP,&bcs->st->lli.flag) &&
716 (PACKET_NOACK != bcs->tx_skb->pkt_type)) {
717 u_long flags;
718 spin_lock_irqsave(&bcs->aclock, flags);
719 bcs->ackcnt += bcs->tx_skb->len;
720 spin_unlock_irqrestore(&bcs->aclock, flags);
721 schedule_event(bcs, B_ACKPENDING);
722 }
723 dev_kfree_skb_any(bcs->tx_skb);
724 bcs->tx_skb = NULL;
725 }
726 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
727 bcs->hw.tiger.free = cnt - s_cnt;
728 if (bcs->hw.tiger.free > (NETJET_DMA_TXSIZE/2))
729 test_and_set_bit(BC_FLG_HALF, &bcs->Flag);
730 else {
731 test_and_clear_bit(BC_FLG_HALF, &bcs->Flag);
732 test_and_set_bit(BC_FLG_NOFRAME, &bcs->Flag);
733 }
734 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) {
735 netjet_fill_dma(bcs);
736 } else {
737 mask ^= 0xffffffff;
738 if (s_cnt < cnt) {
739 for (i=s_cnt; i<cnt;i++) {
740 *p++ |= mask;
741 if (p>bcs->hw.tiger.s_end)
742 p = bcs->hw.tiger.send;
743 }
744 if (bcs->cs->debug & L1_DEB_HSCX)
745 debugl1(bcs->cs, "tiger write_raw: fill rest %d",
746 cnt - s_cnt);
747 }
748 test_and_set_bit(B_XMTBUFREADY, &bcs->event);
749 schedule_work(&bcs->tqueue);
750 }
751 }
752 } else if (test_and_clear_bit(BC_FLG_NOFRAME, &bcs->Flag)) {
753 test_and_set_bit(BC_FLG_HALF, &bcs->Flag);
754 fill_mem(bcs, buf, cnt, bcs->channel, 0xff);
755 bcs->hw.tiger.free += cnt;
756 if (bcs->cs->debug & L1_DEB_HSCX)
757 debugl1(bcs->cs,"tiger write_raw: fill half");
758 } else if (test_and_clear_bit(BC_FLG_HALF, &bcs->Flag)) {
759 test_and_set_bit(BC_FLG_EMPTY, &bcs->Flag);
760 fill_mem(bcs, buf, cnt, bcs->channel, 0xff);
761 if (bcs->cs->debug & L1_DEB_HSCX)
762 debugl1(bcs->cs,"tiger write_raw: fill full");
763 }
764}
765
766void write_tiger(struct IsdnCardState *cs) {
767 u_int *p, cnt = NETJET_DMA_TXSIZE/2;
768
769 if ((cs->hw.njet.irqstat0 & cs->hw.njet.last_is0) & NETJET_IRQM0_WRITE) {
770 debugl1(cs,"tiger warn write double dma %x/%x",
771 cs->hw.njet.irqstat0, cs->hw.njet.last_is0);
772#ifdef ERROR_STATISTIC
773 if (cs->bcs[0].mode)
774 cs->bcs[0].err_tx++;
775 if (cs->bcs[1].mode)
776 cs->bcs[1].err_tx++;
777#endif
778 return;
779 } else {
780 cs->hw.njet.last_is0 &= ~NETJET_IRQM0_WRITE;
781 cs->hw.njet.last_is0 |= (cs->hw.njet.irqstat0 & NETJET_IRQM0_WRITE);
782 }
783 if (cs->hw.njet.irqstat0 & NETJET_IRQM0_WRITE_1)
784 p = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1;
785 else
786 p = cs->bcs[0].hw.tiger.send + cnt - 1;
787 if ((cs->bcs[0].mode == L1_MODE_HDLC) || (cs->bcs[0].mode == L1_MODE_HDLC_56K))
788 write_raw(cs->bcs, p, cnt);
789 if ((cs->bcs[1].mode == L1_MODE_HDLC) || (cs->bcs[1].mode == L1_MODE_HDLC_56K))
790 write_raw(cs->bcs + 1, p, cnt);
791 cs->hw.njet.irqstat0 &= ~NETJET_IRQM0_WRITE;
792}
793
794static void
795tiger_l2l1(struct PStack *st, int pr, void *arg)
796{
797 struct BCState *bcs = st->l1.bcs;
798 struct sk_buff *skb = arg;
799 u_long flags;
800
801 switch (pr) {
802 case (PH_DATA | REQUEST):
803 spin_lock_irqsave(&bcs->cs->lock, flags);
804 if (bcs->tx_skb) {
805 skb_queue_tail(&bcs->squeue, skb);
806 } else {
807 bcs->tx_skb = skb;
808 bcs->cs->BC_Send_Data(bcs);
809 }
810 spin_unlock_irqrestore(&bcs->cs->lock, flags);
811 break;
812 case (PH_PULL | INDICATION):
813 spin_lock_irqsave(&bcs->cs->lock, flags);
814 if (bcs->tx_skb) {
815 printk(KERN_WARNING "tiger_l2l1: this shouldn't happen\n");
816 } else {
817 bcs->tx_skb = skb;
818 bcs->cs->BC_Send_Data(bcs);
819 }
820 spin_unlock_irqrestore(&bcs->cs->lock, flags);
821 break;
822 case (PH_PULL | REQUEST):
823 if (!bcs->tx_skb) {
824 test_and_clear_bit(FLG_L1_PULL_REQ, &st->l1.Flags);
825 st->l1.l1l2(st, PH_PULL | CONFIRM, NULL);
826 } else
827 test_and_set_bit(FLG_L1_PULL_REQ, &st->l1.Flags);
828 break;
829 case (PH_ACTIVATE | REQUEST):
830 spin_lock_irqsave(&bcs->cs->lock, flags);
831 test_and_set_bit(BC_FLG_ACTIV, &bcs->Flag);
832 mode_tiger(bcs, st->l1.mode, st->l1.bc);
833 /* 2001/10/04 Christoph Ersfeld, Formula-n Europe AG */
834 spin_unlock_irqrestore(&bcs->cs->lock, flags);
835 bcs->cs->cardmsg(bcs->cs, MDL_BC_ASSIGN, (void *)(&st->l1.bc));
836 l1_msg_b(st, pr, arg);
837 break;
838 case (PH_DEACTIVATE | REQUEST):
839 /* 2001/10/04 Christoph Ersfeld, Formula-n Europe AG */
840 bcs->cs->cardmsg(bcs->cs, MDL_BC_RELEASE, (void *)(&st->l1.bc));
841 l1_msg_b(st, pr, arg);
842 break;
843 case (PH_DEACTIVATE | CONFIRM):
844 spin_lock_irqsave(&bcs->cs->lock, flags);
845 test_and_clear_bit(BC_FLG_ACTIV, &bcs->Flag);
846 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
847 mode_tiger(bcs, 0, st->l1.bc);
848 spin_unlock_irqrestore(&bcs->cs->lock, flags);
849 st->l1.l1l2(st, PH_DEACTIVATE | CONFIRM, NULL);
850 break;
851 }
852}
853
854
855void
856close_tigerstate(struct BCState *bcs)
857{
858 mode_tiger(bcs, 0, bcs->channel);
859 if (test_and_clear_bit(BC_FLG_INIT, &bcs->Flag)) {
860 if (bcs->hw.tiger.rcvbuf) {
861 kfree(bcs->hw.tiger.rcvbuf);
862 bcs->hw.tiger.rcvbuf = NULL;
863 }
864 if (bcs->hw.tiger.sendbuf) {
865 kfree(bcs->hw.tiger.sendbuf);
866 bcs->hw.tiger.sendbuf = NULL;
867 }
868 skb_queue_purge(&bcs->rqueue);
869 skb_queue_purge(&bcs->squeue);
870 if (bcs->tx_skb) {
871 dev_kfree_skb_any(bcs->tx_skb);
872 bcs->tx_skb = NULL;
873 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
874 }
875 }
876}
877
878static int
879open_tigerstate(struct IsdnCardState *cs, struct BCState *bcs)
880{
881 if (!test_and_set_bit(BC_FLG_INIT, &bcs->Flag)) {
882 if (!(bcs->hw.tiger.rcvbuf = kmalloc(HSCX_BUFMAX, GFP_ATOMIC))) {
883 printk(KERN_WARNING
884 "HiSax: No memory for tiger.rcvbuf\n");
885 return (1);
886 }
887 if (!(bcs->hw.tiger.sendbuf = kmalloc(RAW_BUFMAX, GFP_ATOMIC))) {
888 printk(KERN_WARNING
889 "HiSax: No memory for tiger.sendbuf\n");
890 return (1);
891 }
892 skb_queue_head_init(&bcs->rqueue);
893 skb_queue_head_init(&bcs->squeue);
894 }
895 bcs->tx_skb = NULL;
896 bcs->hw.tiger.sendcnt = 0;
897 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
898 bcs->event = 0;
899 bcs->tx_cnt = 0;
900 return (0);
901}
902
903int
904setstack_tiger(struct PStack *st, struct BCState *bcs)
905{
906 bcs->channel = st->l1.bc;
907 if (open_tigerstate(st->l1.hardware, bcs))
908 return (-1);
909 st->l1.bcs = bcs;
910 st->l2.l2l1 = tiger_l2l1;
911 setstack_manager(st);
912 bcs->st = st;
913 setstack_l1_B(st);
914 return (0);
915}
916
917
918void __init
919inittiger(struct IsdnCardState *cs)
920{
921 if (!(cs->bcs[0].hw.tiger.send = kmalloc(NETJET_DMA_TXSIZE * sizeof(unsigned int),
922 GFP_KERNEL | GFP_DMA))) {
923 printk(KERN_WARNING
924 "HiSax: No memory for tiger.send\n");
925 return;
926 }
927 cs->bcs[0].hw.tiger.s_irq = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE/2 - 1;
928 cs->bcs[0].hw.tiger.s_end = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1;
929 cs->bcs[1].hw.tiger.send = cs->bcs[0].hw.tiger.send;
930 cs->bcs[1].hw.tiger.s_irq = cs->bcs[0].hw.tiger.s_irq;
931 cs->bcs[1].hw.tiger.s_end = cs->bcs[0].hw.tiger.s_end;
932
933 memset(cs->bcs[0].hw.tiger.send, 0xff, NETJET_DMA_TXSIZE * sizeof(unsigned int));
934 debugl1(cs, "tiger: send buf %p - %p", cs->bcs[0].hw.tiger.send,
935 cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1);
936 outl(virt_to_bus(cs->bcs[0].hw.tiger.send),
937 cs->hw.njet.base + NETJET_DMA_READ_START);
938 outl(virt_to_bus(cs->bcs[0].hw.tiger.s_irq),
939 cs->hw.njet.base + NETJET_DMA_READ_IRQ);
940 outl(virt_to_bus(cs->bcs[0].hw.tiger.s_end),
941 cs->hw.njet.base + NETJET_DMA_READ_END);
942 if (!(cs->bcs[0].hw.tiger.rec = kmalloc(NETJET_DMA_RXSIZE * sizeof(unsigned int),
943 GFP_KERNEL | GFP_DMA))) {
944 printk(KERN_WARNING
945 "HiSax: No memory for tiger.rec\n");
946 return;
947 }
948 debugl1(cs, "tiger: rec buf %p - %p", cs->bcs[0].hw.tiger.rec,
949 cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1);
950 cs->bcs[1].hw.tiger.rec = cs->bcs[0].hw.tiger.rec;
951 memset(cs->bcs[0].hw.tiger.rec, 0xff, NETJET_DMA_RXSIZE * sizeof(unsigned int));
952 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec),
953 cs->hw.njet.base + NETJET_DMA_WRITE_START);
954 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE/2 - 1),
955 cs->hw.njet.base + NETJET_DMA_WRITE_IRQ);
956 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1),
957 cs->hw.njet.base + NETJET_DMA_WRITE_END);
958 debugl1(cs, "tiger: dmacfg %x/%x pulse=%d",
959 inl(cs->hw.njet.base + NETJET_DMA_WRITE_ADR),
960 inl(cs->hw.njet.base + NETJET_DMA_READ_ADR),
961 bytein(cs->hw.njet.base + NETJET_PULSE_CNT));
962 cs->hw.njet.last_is0 = 0;
963 cs->bcs[0].BC_SetStack = setstack_tiger;
964 cs->bcs[1].BC_SetStack = setstack_tiger;
965 cs->bcs[0].BC_Close = close_tigerstate;
966 cs->bcs[1].BC_Close = close_tigerstate;
967}
968
969void
970releasetiger(struct IsdnCardState *cs)
971{
972 if (cs->bcs[0].hw.tiger.send) {
973 kfree(cs->bcs[0].hw.tiger.send);
974 cs->bcs[0].hw.tiger.send = NULL;
975 }
976 if (cs->bcs[1].hw.tiger.send) {
977 cs->bcs[1].hw.tiger.send = NULL;
978 }
979 if (cs->bcs[0].hw.tiger.rec) {
980 kfree(cs->bcs[0].hw.tiger.rec);
981 cs->bcs[0].hw.tiger.rec = NULL;
982 }
983 if (cs->bcs[1].hw.tiger.rec) {
984 cs->bcs[1].hw.tiger.rec = NULL;
985 }
986}
987
988void
989release_io_netjet(struct IsdnCardState *cs)
990{
991 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0);
992 byteout(cs->hw.njet.base + NETJET_IRQMASK1, 0);
993 releasetiger(cs);
994 release_region(cs->hw.njet.base, 256);
995}
996