3 nodemask_t node_online_neighbour_map
= NODE_MASK_NONE
;
4 EXPORT_SYMBOL(node_online_neighbour_map
);
6 int node_neighbour_num
= 0;
7 void intr_channel(unsigned int es
, unsigned int tcs
, unsigned int mcs
,
8 unsigned int instance
);
10 void rdma_interrupt(struct pt_regs
*regs
)
12 unsigned int es
, tcs
, mcs
;
13 unsigned int node_neighbour_num_add
= 0;
14 rdma_addr_struct_t p_xxb
;
16 unsigned int link
= NODE_NUMIOLINKS
, i
, inst
;
20 cpu
= raw_smp_processor_id();
21 unsigned int node_id
= e90s_cpu_to_node(cpu
);
23 cpu
= raw_smp_processor_id();
24 unsigned int node_id
= numa_node_id();
26 /* Temporarily until a correct definition of link */
27 for (i
= 0; i
< link
; i
++ ) {
28 node
= node_id
* NODE_NUMIOLINKS
+ i
;
29 for_each_online_rdma(inst
)
30 if ( node
== inst
) goto next
;
34 fix_event(node
, RDMA_INTR
, START_EVENT
, cpu
);
35 es
= RDR_rdma(SHIFT_CS
, node
);
36 es
= RDR_rdma(SHIFT_ES
, node
);
37 if (es
& ES_RIRM_Ev
) {
38 /* Started neighbor */
40 WRR_rdma(SHIFT_ES
, node
, ES_RIRM_Ev
); /* for CAM */
42 node_neighbour_num_add
= 0;
43 if (!node_test_and_set(node
, node_online_neighbour_map
))
44 node_neighbour_num_add
= 1;
46 if (node_neighbour_num_add
)
49 *((unsigned long *)&node_online_neighbour_map
);
50 fix_event(node
, RDMA_INTR
, RIRM_EVENT
,
51 ((node_neighbour_num
& 0xf) << 28) |
52 (p_xxb
.fields
.laddr
& 0x0fffffff));
54 if (es
& ES_RIAM_Ev
) {
55 /* Neighbor is already acive */
57 WRR_rdma(SHIFT_ES
, node
, ES_RIAM_Ev
); /* for CAM */
59 node_neighbour_num_add
= 0;
60 if (!node_test_and_set(node
, node_online_neighbour_map
))
61 node_neighbour_num_add
= 1;
62 if (node_neighbour_num_add
)
65 *((unsigned long *)&node_online_neighbour_map
);
66 fix_event(node
, RDMA_INTR
, RIAM_EVENT
,
67 ((node_neighbour_num
& 0xf) << 28) |
68 (p_xxb
.fields
.laddr
& 0x0fffffff));
71 while ((es
= RDR_rdma(SHIFT_ES
, node
)) & irq_mc
) {
72 tcs
= RDR_rdma(SHIFT_DMA_TCS
, node
);
74 WRR_rdma(SHIFT_ES
, node
, es
& ~ES_SM_Ev
& ~ES_DSF_Ev
);
76 WRR_rdma(SHIFT_ES
, node
, es
& ~ES_SM_Ev
);
78 mcs
= RDR_rdma(SHIFT_MSG_CS
, node
);
79 intr_channel(es
, tcs
, mcs
, node
);
81 fix_event(node
, RDMA_INTR
, RETURN_EVENT
, 0);
86 EXPORT_SYMBOL(rdma_interrupt
);
87 void intr_channel(unsigned int evs
, unsigned int tcs
, unsigned int mcs
,
88 unsigned int instance
)
90 struct stat_rdma
*pst
;
93 dev_rdma_sem_t
*dev_sem
;
95 register volatile unsigned int tbc
;
96 unsigned int int_cnt
= 0;
97 rdma_state_inst_t
*xspi
= &rdma_state
->rdma_sti
[instance
];
98 rdma_addr_struct_t p_xxb
, p_xxb_pa
;
100 fix_event(instance
, INTR_START_EVENT
, evs
, tcs
);
101 pst
= &xspi
->stat_rdma
;
104 if (evs
& ES_RGP3M_Ev
) {
105 dev_rdma_sem_t
*dev_sem
;
107 if (RDR_rdma(SHIFT_CAM
, instance
)) {
108 WRR_rdma(SHIFT_CAM
, instance
, 0);
109 pcam
= &xspi
->talive
;
110 dev_sem
= &pcam
->dev_rdma_sem
;
111 raw_spin_lock(&dev_sem
->lock
);
112 if (pcam
->stat
== 1) {
113 pcam
->clkr
= join_curr_clock();
114 pcam
->int_cnt
= int_cnt
;
115 rdma_cv_broadcast_rdma(&pcam
->dev_rdma_sem
,
118 raw_spin_unlock(&dev_sem
->lock
);
120 if (state_cam
== RDMA_UNSET_CAM
) {
121 pcam
= &xspi
->talive
;
122 dev_sem
= &pcam
->dev_rdma_sem
;
123 raw_spin_lock(&dev_sem
->lock
);
124 if (pcam
->stat
== 1) {
125 pcam
->clkr
= join_curr_clock();
126 pcam
->int_cnt
= int_cnt
;
127 rdma_cv_broadcast_rdma(
131 raw_spin_unlock(&dev_sem
->lock
);
133 WRR_rdma(SHIFT_CAM
, instance
, tr_atl
);
134 pcam
= &xspi
->ralive
;
135 dev_sem
= &pcam
->dev_rdma_sem
;
136 raw_spin_lock(&dev_sem
->lock
);
138 rdma_cv_broadcast_rdma(
141 raw_spin_unlock(&dev_sem
->lock
);
145 cur_clock
= (unsigned long)jiffies
;
146 if (evs
& ES_CMIE_Ev
) {
147 WRR_rdma(SHIFT_MSG_CS
, instance
, MSG_CS_Msg_Rst
);
148 fix_event(instance
, INTR_CMIE_EVENT
, 0, 0);
152 if (evs
& ES_RDC_Ev
) {
153 pst
->rcs
= RDR_rdma(SHIFT_DMA_RCS
, instance
);
154 pst
->rbc
= RDR_rdma(SHIFT_DMA_RBC
, instance
);
155 pst
->rsa
= RDR_rdma(SHIFT_DMA_RSA
, instance
);
156 WRR_rdma(SHIFT_DMA_RCS
, instance
, pst
->rcs
& (~DMA_RCS_RE
));
157 pst
->rcs
= RDR_rdma(SHIFT_DMA_RCS
, instance
);
159 WRR_rdma(SHIFT_DMA_RCS
, instance
,
160 pst
->rcs
& (~DMA_RCS_RFSM
));
161 WRR_rdma(SHIFT_DMA_RBC
, instance
, CLEAR_RFSM
);
163 pd
= xspi
->rw_states_rd
;
164 p_xxb
.addr
= (unsigned long)pd
;
165 fix_event(instance
, INTR_RDC_EVENT
, p_xxb
.fields
.haddr
,
167 xspi
->rw_states_rd
= 0;
169 fix_event(instance
, INTR_RDC_PD_NULL_EVENT
,
170 intr_rdc_count
[instance
], tcs
);
172 goto ES_RDC_Ev_label
;
174 dev_sem
= &pd
->dev_rdma_sem
;
175 p_xxb
.addr
= (unsigned long)dev_sem
;
176 fix_event(instance
, INTR_RDC_EVENT
, pd
->int_ac
,
177 intr_rdc_count
[instance
]);
178 fix_event(instance
, INTR_RDC_EVENT
, p_xxb
.fields
.haddr
,
180 raw_spin_lock(&dev_sem
->lock
);
181 intr_rdc_count
[instance
]++;
182 pd
->clock_rdc
= cur_clock
;
183 switch (pd
->int_ac
) {
186 fix_event(instance
, INTR_SIGN2_READ_EVENT
,
187 pd
->int_ac
, dev_sem
->num_obmen
);
188 dev_sem
->time_broadcast
= join_curr_clock();
189 rdma_cv_broadcast_rdma(&pd
->dev_rdma_sem
, instance
);
195 fix_event(instance
, INTR_UNEXP2_READ_EVENT
,
196 pd
->int_ac
, dev_sem
->num_obmen
);
200 raw_spin_unlock(&dev_sem
->lock
);
203 rdc_byte
+= allign_dma(pd
->size_trb
);
204 if (rdc_byte
>> 10) {
205 pst
->rdc_kbyte
+= (rdc_byte
>> 10);
209 evs
= evs
& ~ES_RDC_Ev
;
211 if (evs
& (ES_TDC_Ev
| ES_DSF_Ev
)) {
212 pst
->tcs
= RDR_rdma(SHIFT_DMA_TCS
, instance
);
213 pst
->tbc
= RDR_rdma(SHIFT_DMA_TBC
, instance
);
214 pst
->tsa
= RDR_rdma(SHIFT_DMA_TSA
, instance
);
215 if (evs
& ES_TDC_Ev
)
216 WRR_rdma(SHIFT_DMA_TCS
, instance
, pst
->tcs
& (~DMA_TCS_TE
));
217 pst
->tcs
= RDR_rdma(SHIFT_DMA_TCS
, instance
);
218 pd
= xspi
->rw_states_wr
;
220 fix_event(instance
, INTR_TDC_DSF_PD_NULL_EVENT
,
221 intr_rdc_count
[instance
], tcs
);
222 goto ES_TDC_Ev_label
;
224 xspi
->rw_states_wr
= 0;
225 dev_sem
= &pd
->dev_rdma_sem
;
226 raw_spin_lock(&dev_sem
->lock
);
228 pd
->clock_tdc
= cur_clock
;
229 if (evs
& ES_DSF_Ev
) {
230 tbc
= RDR_rdma(SHIFT_DMA_TBC
, instance
);
234 WRR_rdma(SIC_rdma_irq_mc
, instance
, irq_mc
& ~IRQ_DSF
);
235 for (count_reset_tcs
= 0; count_reset_tcs
< 10;
238 WRR_rdma(SHIFT_DMA_TCS
, instance
,
241 WRR_rdma(SIC_rdma_irq_mc
, instance
, irq_mc
);
242 WRR_rdma(SHIFT_DMA_TCS
, instance
,
243 RCode_64
| DMA_TCS_DRCL
);
245 fix_event(instance
, INTR_DSF_EVENT
, pd
->int_ac
, tcs
);
246 fix_event(instance
, INTR_DSF_EVENT
, pd
->int_ac
,
249 fix_event(instance
, INTR_TDC_EVENT
, pd
->int_ac
,
252 switch (pd
->int_ac
) {
255 fix_event(instance
, INTR_SIGN1_WRITE_EVENT
,
256 pd
->int_ac
, dev_sem
->num_obmen
);
257 rdma_cv_broadcast_rdma(&pd
->dev_rdma_sem
, instance
);
264 fix_event(instance
, INTR_TDC_UNXP_EVENT
, pd
->int_ac
,
268 raw_spin_unlock(&dev_sem
->lock
);
270 if (evs
& ES_DSF_Ev
) {
272 if (tcs
&DMA_TCS_DPS_Err
)
273 pst
->dma_tcs_dps_err
++;
275 if (tcs
&DMA_TCS_DPCRC_Err
)
276 pst
->dma_tcs_dpcrc_err
++;
278 if (tcs
&DMA_TCS_DPTO_Err
)
279 pst
->dma_tcs_dpto_err
++;
281 if (tcs
&DMA_TCS_DPID_Err
)
282 pst
->dma_tcs_dpid_err
++;
283 if (evs
& ES_TDC_Ev
) {
290 evs
= evs
& (~(ES_TDC_Ev
| ES_DSF_Ev
));
292 if (evs
& ES_RDM_Ev
) {
293 int rdmc
= (evs
& ES_RDMC
)>>27;
300 msg
= RDR_rdma(SHIFT_RDMSG
, instance
);
303 if ((msg
& MSG_OPER
) == MSG_READY
) {
305 switch ((msg
& MSG_ABONENT
) >> SHIFT_ABONENT
) {
310 pd
= &xspi
->rw_states_d
[WRITER
];
313 pd
= &xspi
->rw_states_d
[WRITER
];
316 dev_sem
= &pd
->dev_rdma_sem
;
317 p_xxb
.addr
= (unsigned long)pd
;
318 fix_event(instance
, INTR_READY_EVENT
,
319 pd
->int_ac
, dev_sem
->num_obmen
);
320 fix_event(instance
, INTR_READY_EVENT
,
321 p_xxb
.fields
.haddr
, p_xxb
.fields
.laddr
);
322 raw_spin_lock(&dev_sem
->lock
);
323 switch (pd
->int_ac
) {
327 raw_spin_unlock(&dev_sem
->lock
);
332 raw_spin_unlock(&dev_sem
->lock
);
337 raw_spin_unlock(&dev_sem
->lock
);
342 raw_spin_unlock(&dev_sem
->lock
);
346 pd
->clock_receive_ready
= cur_clock
;
348 fix_event(instance
, INTR_TDMA_EVENT
,
349 pd
->real_size
, pd
->dma
);
350 xspi
->rw_states_wr
= pd
;
351 if (RDR_rdma(SHIFT_DMA_TBC
, instance
)) {
353 rdma_cv_broadcast_rdma(
356 raw_spin_unlock(&dev_sem
->lock
);
359 if (RDR_rdma(SHIFT_DMA_TCS
, instance
) &
362 rdma_cv_broadcast_rdma(
365 raw_spin_unlock(&dev_sem
->lock
);
370 rdma_cv_broadcast_rdma(
373 raw_spin_unlock(&dev_sem
->lock
);
376 p_xxb_pa
.addr
= (unsigned long)pd
->dma
;
377 WRR_rdma(SHIFT_DMA_HTSA
, instance
,
378 p_xxb_pa
.fields
.haddr
);
379 WRR_rdma(SHIFT_DMA_TSA
, instance
,
380 p_xxb_pa
.fields
.laddr
);
384 WRR_rdma( SHIFT_DMA_TBC
,
388 WRR_rdma( SHIFT_DMA_TBC
,
390 PAGE_ALIGN(pd
->real_size
));
392 WRR_rdma( SHIFT_DMA_TBC
, instance
,
393 PAGE_ALIGN(pd
->real_size
));
397 WRR_rdma( SHIFT_DMA_TBC
, instance
,
399 WRR_rdma(SHIFT_DMA_TCS
, instance
,
400 RCode_64
| DMA_TCS_DRCL
|
402 (pd
->tm
?DMA_TCS_TTM
:0));
403 pst
->tcs
= RDR_rdma(SHIFT_DMA_TCS
,
405 pd
->tm
?pst
->try_TDMA_tm
++:pst
->try_TDMA
++;
406 raw_spin_unlock(&dev_sem
->lock
);
409 if ((msg
& MSG_OPER
) == MSG_TRWD
) {
413 switch ((msg
& MSG_ABONENT
) >> SHIFT_ABONENT
) {
418 chann
= msg
& MSG_ABONENT
;
419 pd
= &xspi
->rw_states_d
[READER
];
422 chann
= msg
& MSG_ABONENT
;
423 pd
= &xspi
->rw_states_d
[READER
];
427 p_xxb
.addr
= (unsigned long)pd
;
428 dev_sem
= &pd
->dev_rdma_sem
;
429 pd
->clock_receive_trwd
= cur_clock
;
430 raw_spin_lock(&dev_sem
->lock
);
431 fix_event(instance
, INTR_TRWD_EVENT
,
432 pd
->int_ac
, dev_sem
->num_obmen
);
433 fix_event(instance
, INTR_TRWD_EVENT
,
434 p_xxb
.fields
.haddr
, p_xxb
.fields
.laddr
);
435 switch (pd
->int_ac
) {
440 INTR_SIGN1_READ_EVENT
,
441 pd
->int_ac
, dev_sem
->num_obmen
);
442 rdma_cv_broadcast_rdma(
445 raw_spin_unlock(&dev_sem
->lock
);
454 INTR_TRWD_UNXP_EVENT
,
455 pd
->int_ac
, dev_sem
->num_obmen
);
456 raw_spin_unlock(&dev_sem
->lock
);
465 INTR_TRWD_UNXP_EVENT
,
466 pd
->int_ac
, dev_sem
->num_obmen
);
467 raw_spin_unlock(&dev_sem
->lock
);
476 INTR_TRWD_UNXP_EVENT
,
477 pd
->int_ac
, dev_sem
->num_obmen
);
478 raw_spin_unlock(&dev_sem
->lock
);
486 INTR_TRWD_UNXP_EVENT
,
487 pd
->int_ac
, dev_sem
->num_obmen
);
488 raw_spin_unlock(&dev_sem
->lock
);
495 INTR_TRWD_UNXP_EVENT
,
496 pd
->int_ac
, dev_sem
->num_obmen
);
497 raw_spin_unlock(&dev_sem
->lock
);
500 } else { /* if (msg & MSG_TRWD) { */
501 pm
= &xspi
->rw_states_m
[0];
502 dev_sem
= &pm
->dev_rdma_sem
;
503 raw_spin_lock(&dev_sem
->lock
);
504 if (pm
->stat
== RDMA_IOC_DR
) {
505 fix_event(instance
, INTR_RMSG_EVENT
,
509 rdma_cv_broadcast_rdma(
512 raw_spin_unlock(&dev_sem
->lock
);
515 INTR_RMSG_UNXP_EVENT
,
517 raw_spin_unlock(&dev_sem
->lock
);
522 evs
= evs
& ~ES_RDM_Ev
;
524 if (evs
& ES_MSF_Ev
) {
525 dev_rdma_sem_t
*dev_sem
;
528 WRR_rdma(SHIFT_CAM
, instance
, 0);
529 WRR_rdma(SHIFT_MSG_CS
, instance
, msg_cs_dmrcl
| MSG_CS_Msg_Rst
);
530 fix_event(instance
, INTR_MSF_EVENT
, 1, 0);
531 pcam
= &xspi
->talive
;
532 dev_sem
= &pcam
->dev_rdma_sem
;
533 raw_spin_lock(&dev_sem
->lock
);
534 if (pcam
->stat
== 1) {
535 pcam
->clkr
= join_curr_clock();
536 pcam
->int_cnt
= int_cnt
;
537 rdma_cv_broadcast_rdma(&pcam
->dev_rdma_sem
, instance
);
539 raw_spin_unlock(&dev_sem
->lock
);
541 if (evs
& ES_RGP2M_Ev
) {
545 if (evs
& ES_RGP1M_Ev
) {
549 if (evs
& ES_RGP0M_Ev
) {
551 if (enable_exit_gp0
) {
552 pd
= &xspi
->rw_states_d
[READER
];
556 dev_sem
= &pd
->dev_rdma_sem
;
557 fix_event(instance
, INTR_GP0_EVENT
, pd
->int_ac
,
559 raw_spin_lock(&dev_sem
->lock
);
561 switch (pd
->int_ac
) {
563 rdma_cv_broadcast_rdma(&pd
->dev_rdma_sem
,
572 raw_spin_unlock(&dev_sem
->lock
);
577 if (evs
& ES_RLM_Ev
) {
581 if (evs
& ES_RULM_Ev
) {
585 if (evs
& ES_RIAM_Ev
) {
586 dev_rdma_sem_t
*dev_sem
;
589 WRR_rdma(SHIFT_CAM
, instance
, tr_atl
);
590 time_ID_ANS
= join_curr_clock();
591 pcam
= &xspi
->ralive
;
592 dev_sem
= &pcam
->dev_rdma_sem
;
593 raw_spin_lock(&dev_sem
->lock
);
594 if (pcam
->stat
== 1) {
595 pcam
->clkr
= join_curr_clock();
596 pcam
->int_cnt
= int_cnt
;
597 rdma_cv_broadcast_rdma(&pcam
->dev_rdma_sem
, instance
);
599 raw_spin_unlock(&dev_sem
->lock
);
602 fix_event(instance
, INTR_RIAM_EVENT
, 0, pst
->es_riam
);
604 if (evs
& ES_RIRM_Ev
) {
605 dev_rdma_sem_t
*dev_sem
;
608 WRR_rdma(SHIFT_CAM
, instance
, tr_atl
);
609 time_ID_REQ
= join_curr_clock();
610 pcam
= &xspi
->ralive
;
611 dev_sem
= &pcam
->dev_rdma_sem
;
612 raw_spin_lock(&dev_sem
->lock
);
613 if (pcam
->stat
== 1) {
614 pcam
->clkr
= join_curr_clock();
615 pcam
->int_cnt
= int_cnt
;
616 rdma_cv_broadcast_rdma(&pcam
->dev_rdma_sem
, instance
);
618 raw_spin_unlock(&dev_sem
->lock
);
621 fix_event(instance
, INTR_RIRM_EVENT
, 0, pst
->es_rirm
);
623 fix_event(instance
, INTR_EXIT_EVENT
, 0, 0);