39 #define OP(x) IB_OPCODE_RC_##x
41 static void rc_timeout(
unsigned long arg);
57 static void start_timer(
struct qib_qp *qp)
60 qp->
s_timer.function = rc_timeout;
94 case OP(RDMA_READ_RESPONSE_LAST):
95 case
OP(RDMA_READ_RESPONSE_ONLY):
96 e = &qp->s_ack_queue[qp->s_tail_ack_queue];
102 case OP(ATOMIC_ACKNOWLEDGE):
109 qp->s_tail_ack_queue = 0;
112 case
OP(ACKNOWLEDGE):
114 if (qp->r_head_ack_queue == qp->s_tail_ack_queue) {
121 if (e->
opcode ==
OP(RDMA_READ_REQUEST)) {
157 ohdr->
u.
at.atomic_ack_eth[0] =
159 ohdr->
u.
at.atomic_ack_eth[1] =
161 hwords +=
sizeof(ohdr->
u.
at) /
sizeof(
u32);
168 case OP(RDMA_READ_RESPONSE_FIRST):
169 qp->s_ack_state =
OP(RDMA_READ_RESPONSE_MIDDLE);
171 case OP(RDMA_READ_RESPONSE_MIDDLE):
172 qp->s_cur_sge = &qp->s_ack_rdma_sge;
210 bth0 =
OP(ACKNOWLEDGE) << 24;
247 ohdr = &qp->
s_hdr->u.oth;
249 ohdr = &qp->
s_hdr->u.l.oth;
259 qib_make_rc_ack(dev, qp, ohdr, pmtu))
273 wqe = get_swqe_ptr(qp, qp->
s_last);
297 wqe = get_swqe_ptr(qp, qp->
s_cur);
334 switch (wqe->
wr.opcode) {
339 qib_cmp24(wqe->
ssn, qp->
s_lsn + 1) > 0) {
345 wqe->
lpsn += (len - 1) / pmtu;
353 qp->
s_state =
OP(SEND_ONLY_WITH_IMMEDIATE);
372 qib_cmp24(wqe->
ssn, qp->
s_lsn + 1) > 0) {
376 ohdr->
u.
rc.reth.vaddr =
378 ohdr->
u.
rc.reth.rkey =
381 hwords +=
sizeof(
struct ib_reth) / sizeof(u32);
384 wqe->
lpsn += (len - 1) / pmtu;
393 OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE);
395 ohdr->
u.
rc.imm_data = wqe->
wr.ex.imm_data;
427 ohdr->
u.
rc.reth.vaddr =
429 ohdr->
u.
rc.reth.rkey =
433 hwords +=
sizeof(ohdr->
u.
rc.reth) /
sizeof(u32);
461 wqe->
wr.wr.atomic.swap);
463 wqe->
wr.wr.atomic.compare_add);
467 wqe->
wr.wr.atomic.compare_add);
471 wqe->
wr.wr.atomic.remote_addr >> 32);
473 wqe->
wr.wr.atomic.remote_addr);
475 wqe->
wr.wr.atomic.rkey);
489 qp->
s_sge.num_sge = wqe->
wr.num_sge;
506 case OP(RDMA_READ_RESPONSE_FIRST):
521 case OP(SEND_MIDDLE):
534 qp->
s_state =
OP(SEND_LAST_WITH_IMMEDIATE);
547 case OP(RDMA_READ_RESPONSE_LAST):
559 case OP(RDMA_WRITE_FIRST):
562 case OP(RDMA_WRITE_MIDDLE):
575 qp->
s_state =
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE);
588 case OP(RDMA_READ_RESPONSE_MIDDLE):
599 ohdr->
u.
rc.reth.vaddr =
601 ohdr->
u.
rc.reth.rkey =
605 hwords +=
sizeof(ohdr->
u.
rc.reth) /
sizeof(
u32);
616 delta = (((
int) bth2 - (
int) wqe->
psn) << 8) >> 8;
636 spin_unlock_irqrestore(&qp->s_lock, flags);
704 spin_unlock_irqrestore(&qp->s_lock, flags);
713 pbc = ((
u64) control << 32) | (hwords + 1);
769 spin_unlock_irqrestore(&qp->s_lock, flags);
783 static void reset_psn(
struct qib_qp *qp,
u32 psn)
786 struct qib_swqe *wqe = get_swqe_ptr(qp, n);
795 if (qib_cmp24(psn, wqe->
psn) <= 0) {
801 opcode = wqe->
wr.opcode;
809 wqe = get_swqe_ptr(qp, n);
810 diff = qib_cmp24(psn, wqe->
psn);
822 opcode = wqe->
wr.opcode;
833 qp->
s_state =
OP(RDMA_READ_RESPONSE_FIRST);
838 qp->
s_state =
OP(RDMA_READ_RESPONSE_LAST);
842 qp->
s_state =
OP(RDMA_READ_RESPONSE_MIDDLE);
868 static void qib_restart_rc(
struct qib_qp *qp,
u32 psn,
int wait)
903 static void rc_timeout(
unsigned long arg)
910 spin_lock(&qp->s_lock);
919 spin_unlock(&qp->s_lock);
920 spin_unlock_irqrestore(&qp->r_lock, flags);
937 spin_unlock_irqrestore(&qp->s_lock, flags);
944 static void reset_sending_psn(
struct qib_qp *qp,
u32 psn)
951 wqe = get_swqe_ptr(qp, n);
952 if (qib_cmp24(psn, wqe->
lpsn) <= 0) {
985 ohdr = &hdr->
u.
l.oth;
988 if (opcode >=
OP(RDMA_READ_RESPONSE_FIRST) &&
989 opcode <=
OP(ATOMIC_ACKNOWLEDGE)) {
996 reset_sending_psn(qp, psn);
1008 wqe = get_swqe_ptr(qp, qp->
s_last);
1012 for (i = 0; i < wqe->
wr.num_sge; i++) {
1015 qib_put_mr(sge->
mr);
1020 memset(&wc, 0,
sizeof wc);
1044 static inline void update_last_psn(
struct qib_qp *qp,
u32 psn)
1068 for (i = 0; i < wqe->
wr.num_sge; i++) {
1071 qib_put_mr(sge->
mr);
1077 wc.wr_id = wqe->
wr.wr_id;
1090 update_last_psn(qp, wqe->
lpsn);
1101 wqe = get_swqe_ptr(qp, qp->
s_cur);
1111 wqe = get_swqe_ptr(qp, qp->
s_acked);
1152 wqe = get_swqe_ptr(qp, qp->
s_acked);
1159 while ((diff = qib_cmp24(ack_psn, wqe->
lpsn)) >= 0) {
1167 opcode ==
OP(RDMA_READ_RESPONSE_ONLY) &&
1182 (opcode !=
OP(RDMA_READ_RESPONSE_LAST) || diff != 0)) ||
1185 (opcode !=
OP(ATOMIC_ACKNOWLEDGE) || diff != 0))) {
1190 if (list_empty(&qp->
rspwait)) {
1225 wqe = do_rc_completion(qp, wqe, ibp);
1230 switch (aeth >> 29) {
1243 if (qib_cmp24(qp->
s_psn, psn) <= 0)
1244 reset_psn(qp, psn + 1);
1245 }
else if (qib_cmp24(qp->
s_psn, psn) <= 0) {
1247 qp->
s_psn = psn + 1;
1256 update_last_psn(qp, psn);
1274 update_last_psn(qp, psn - 1);
1293 update_last_psn(qp, psn - 1);
1304 qib_restart_rc(qp, psn, 0);
1361 wqe = get_swqe_ptr(qp, qp->
s_acked);
1363 while (qib_cmp24(psn, wqe->
lpsn) > 0) {
1368 wqe = do_rc_completion(qp, wqe, ibp);
1374 if (list_empty(&qp->
rspwait)) {
1397 static void qib_rc_rcv_resp(
struct qib_ibport *ibp,
1408 unsigned long flags;
1414 if (opcode !=
OP(RDMA_READ_RESPONSE_MIDDLE)) {
1432 spin_unlock_irqrestore(&ppd->sdma_lock, flags);
1449 if (diff == 0 && opcode ==
OP(ACKNOWLEDGE)) {
1451 if ((aeth >> 29) == 0)
1469 wqe = get_swqe_ptr(qp, qp->
s_acked);
1473 case OP(ACKNOWLEDGE):
1474 case
OP(ATOMIC_ACKNOWLEDGE):
1475 case
OP(RDMA_READ_RESPONSE_FIRST):
1477 if (opcode ==
OP(ATOMIC_ACKNOWLEDGE)) {
1484 if (!do_rc_ack(qp, aeth, psn, opcode, val, rcd) ||
1485 opcode !=
OP(RDMA_READ_RESPONSE_FIRST))
1488 wqe = get_swqe_ptr(qp, qp->
s_acked);
1500 case OP(RDMA_READ_RESPONSE_MIDDLE):
1502 if (
unlikely(qib_cmp24(psn, qp->s_last_psn + 1)))
1507 if (
unlikely(tlen != (hdrsize + pmtu + 4)))
1523 if (opcode ==
OP(RDMA_READ_RESPONSE_MIDDLE))
1531 update_last_psn(qp, psn);
1532 spin_unlock_irqrestore(&qp->s_lock, flags);
1536 case OP(RDMA_READ_RESPONSE_ONLY):
1538 if (!do_rc_ack(qp, aeth, psn, opcode, 0, rcd))
1547 if (
unlikely(tlen < (hdrsize + pad + 8)))
1554 wqe = get_swqe_ptr(qp, qp->
s_acked);
1559 case OP(RDMA_READ_RESPONSE_LAST):
1561 if (
unlikely(qib_cmp24(psn, qp->s_last_psn + 1)))
1572 if (
unlikely(tlen <= (hdrsize + pad + 8)))
1575 tlen -= hdrsize + pad + 8;
1581 (
void) do_rc_ack(qp, aeth, psn,
1582 OP(RDMA_READ_RESPONSE_LAST), 0, rcd);
1591 rdma_seq_err(qp, ibp, psn, rcd);
1602 spin_unlock_irqrestore(&qp->s_lock, flags);
1632 unsigned long flags;
1652 if (list_empty(&qp->
rspwait)) {
1694 e = &qp->s_ack_queue[
prev];
1699 if (qib_cmp24(psn, e->
psn) >= 0) {
1701 qib_cmp24(psn, e->
lpsn) <= 0)
1707 case OP(RDMA_READ_REQUEST): {
1716 if (!e || e->
opcode !=
OP(RDMA_READ_REQUEST))
1719 reth = &ohdr->
u.
rc.reth;
1757 case OP(COMPARE_SWAP):
1758 case
OP(FETCH_ADD): {
1764 if (!e || e->
opcode != (
u8) opcode || old_req)
1782 spin_unlock_irqrestore(&qp->s_lock, flags);
1793 spin_unlock_irqrestore(&qp->s_lock, flags);
1811 spin_unlock_irqrestore(&qp->s_lock, flags);
1821 unsigned long flags;
1826 spin_unlock_irqrestore(&qp->s_lock, flags);
1834 qp->
ibqp.event_handler(&ev, qp->
ibqp.qp_context);
1838 static inline void qib_update_ack_queue(
struct qib_qp *qp,
unsigned n)
1863 int has_grh,
void *data,
u32 tlen,
struct qib_qp *qp)
1875 unsigned long flags;
1883 ohdr = &hdr->
u.
l.oth;
1884 hdrsize = 8 + 40 + 12;
1900 if (opcode >=
OP(RDMA_READ_RESPONSE_FIRST) &&
1901 opcode <=
OP(ATOMIC_ACKNOWLEDGE)) {
1902 qib_rc_rcv_resp(ibp, ohdr, data, tlen, qp, opcode, psn,
1903 hdrsize, pmtu, rcd);
1908 diff = qib_cmp24(psn, qp->
r_psn);
1910 if (qib_rc_rcv_error(ohdr, data, qp, opcode, psn, diff, rcd))
1917 case OP(SEND_FIRST):
1918 case OP(SEND_MIDDLE):
1919 if (opcode ==
OP(SEND_MIDDLE) ||
1920 opcode ==
OP(SEND_LAST) ||
1921 opcode ==
OP(SEND_LAST_WITH_IMMEDIATE))
1925 case OP(RDMA_WRITE_FIRST):
1926 case OP(RDMA_WRITE_MIDDLE):
1927 if (opcode ==
OP(RDMA_WRITE_MIDDLE) ||
1928 opcode ==
OP(RDMA_WRITE_LAST) ||
1929 opcode ==
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE))
1934 if (opcode ==
OP(SEND_MIDDLE) ||
1935 opcode ==
OP(SEND_LAST) ||
1936 opcode ==
OP(SEND_LAST_WITH_IMMEDIATE) ||
1937 opcode ==
OP(RDMA_WRITE_MIDDLE) ||
1938 opcode ==
OP(RDMA_WRITE_LAST) ||
1939 opcode ==
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE))
1951 if (qp->
ibqp.event_handler) {
1957 qp->
ibqp.event_handler(&ev, qp->
ibqp.qp_context);
1963 case OP(SEND_FIRST):
1971 case OP(SEND_MIDDLE):
1972 case OP(RDMA_WRITE_MIDDLE):
1975 if (
unlikely(tlen != (hdrsize + pmtu + 4)))
1983 case OP(RDMA_WRITE_LAST_WITH_IMMEDIATE):
1993 case OP(SEND_ONLY_WITH_IMMEDIATE):
2000 if (opcode ==
OP(SEND_ONLY))
2001 goto no_immediate_data;
2003 case OP(SEND_LAST_WITH_IMMEDIATE):
2010 case OP(RDMA_WRITE_LAST):
2019 if (
unlikely(tlen < (hdrsize + pad + 4)))
2022 tlen -= (hdrsize + pad + 4);
2027 qib_put_ss(&qp->
r_sge);
2033 if (opcode ==
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE) ||
2034 opcode ==
OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE))
2053 case OP(RDMA_WRITE_FIRST):
2054 case OP(RDMA_WRITE_ONLY):
2055 case OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE):
2059 reth = &ohdr->
u.
rc.reth;
2060 hdrsize +=
sizeof(*reth);
2064 if (qp->
r_len != 0) {
2074 qp->
r_sge.num_sge = 1;
2076 qp->
r_sge.num_sge = 0;
2079 qp->
r_sge.sge.length = 0;
2080 qp->
r_sge.sge.sge_length = 0;
2082 if (opcode ==
OP(RDMA_WRITE_FIRST))
2084 else if (opcode ==
OP(RDMA_WRITE_ONLY))
2085 goto no_immediate_data;
2096 case OP(RDMA_READ_REQUEST): {
2109 if (!qp->s_ack_queue[next].sent)
2110 goto nack_inv_unlck;
2111 qib_update_ack_queue(qp, next);
2118 reth = &ohdr->
u.
rc.reth;
2129 goto nack_acc_unlck;
2135 qp->
r_psn += (len - 1) / pmtu;
2164 case OP(COMPARE_SWAP):
2165 case OP(FETCH_ADD): {
2181 if (!qp->s_ack_queue[next].sent)
2182 goto nack_inv_unlck;
2183 qib_update_ack_queue(qp, next);
2194 goto nack_inv_unlck;
2200 goto nack_acc_unlck;
2209 qib_put_mr(qp->
r_sge.sge.mr);
2210 qp->
r_sge.num_sge = 0;
2237 if (psn & (1 << 31))
2245 if (list_empty(&qp->
rspwait)) {
2257 if (list_empty(&qp->
rspwait)) {
2265 spin_unlock_irqrestore(&qp->s_lock, flags);
2271 if (list_empty(&qp->
rspwait)) {
2279 spin_unlock_irqrestore(&qp->s_lock, flags);
2289 spin_unlock_irqrestore(&qp->s_lock, flags);