40 #define OP(x) IB_OPCODE_RC_##x
68 dev = to_idev(qp->
ibqp.device);
103 case OP(RDMA_READ_RESPONSE_LAST):
104 case
OP(RDMA_READ_RESPONSE_ONLY):
105 case
OP(ATOMIC_ACKNOWLEDGE):
112 qp->s_tail_ack_queue = 0;
115 case
OP(ACKNOWLEDGE):
117 if (qp->r_head_ack_queue == qp->s_tail_ack_queue) {
125 if (e->
opcode ==
OP(RDMA_READ_REQUEST)) {
147 ohdr->
u.
at.atomic_ack_eth[0] =
149 ohdr->
u.
at.atomic_ack_eth[1] =
151 hwords +=
sizeof(ohdr->
u.
at) /
sizeof(
u32);
158 case OP(RDMA_READ_RESPONSE_FIRST):
159 qp->s_ack_state =
OP(RDMA_READ_RESPONSE_MIDDLE);
161 case OP(RDMA_READ_RESPONSE_MIDDLE):
162 len = qp->s_ack_rdma_sge.
sge.sge_length;
195 bth0 =
OP(ACKNOWLEDGE) << 24;
228 ohdr = &qp->
s_hdr.u.oth;
230 ohdr = &qp->
s_hdr.u.l.oth;
242 ipath_make_rc_ack(dev, qp, ohdr, pmtu))
256 wqe = get_swqe_ptr(qp, qp->
s_last);
272 wqe = get_swqe_ptr(qp, qp->
s_cur);
310 switch (wqe->
wr.opcode) {
315 ipath_cmp24(wqe->
ssn, qp->
s_lsn + 1) > 0) {
321 wqe->
lpsn += (len - 1) / pmtu;
329 qp->
s_state =
OP(SEND_ONLY_WITH_IMMEDIATE);
342 if (newreq && qp->
s_lsn != (
u32) -1)
348 ipath_cmp24(wqe->
ssn, qp->
s_lsn + 1) > 0) {
352 ohdr->
u.
rc.reth.vaddr =
354 ohdr->
u.
rc.reth.rkey =
357 hwords +=
sizeof(
struct ib_reth) / sizeof(u32);
360 wqe->
lpsn += (len - 1) / pmtu;
369 OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE);
371 ohdr->
u.
rc.imm_data = wqe->
wr.ex.imm_data;
393 if (qp->
s_lsn != (u32) -1)
403 ohdr->
u.
rc.reth.vaddr =
405 ohdr->
u.
rc.reth.rkey =
409 hwords +=
sizeof(ohdr->
u.
rc.reth) /
sizeof(u32);
429 if (qp->
s_lsn != (u32) -1)
436 wqe->
wr.wr.atomic.swap);
438 wqe->
wr.wr.atomic.compare_add);
442 wqe->
wr.wr.atomic.compare_add);
446 wqe->
wr.wr.atomic.remote_addr >> 32);
448 wqe->
wr.wr.atomic.remote_addr);
450 wqe->
wr.wr.atomic.rkey);
463 qp->
s_sge.num_sge = wqe->
wr.num_sge;
490 case OP(RDMA_READ_RESPONSE_FIRST):
495 ipath_init_restart(qp, wqe);
500 case OP(SEND_MIDDLE):
513 qp->
s_state =
OP(SEND_LAST_WITH_IMMEDIATE);
526 case OP(RDMA_READ_RESPONSE_LAST):
531 ipath_init_restart(qp, wqe);
533 case OP(RDMA_WRITE_FIRST):
536 case OP(RDMA_WRITE_MIDDLE):
549 qp->
s_state =
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE);
562 case OP(RDMA_READ_RESPONSE_MIDDLE):
567 ipath_init_restart(qp, wqe);
569 ohdr->
u.
rc.reth.vaddr =
571 ohdr->
u.
rc.reth.rkey =
575 hwords +=
sizeof(ohdr->
u.
rc.reth) /
sizeof(
u32);
599 spin_unlock_irqrestore(&qp->
s_lock, flags);
612 static void send_rc_ack(
struct ipath_qp *qp)
632 spin_unlock_irqrestore(&qp->
s_lock, flags);
666 (
OP(ACKNOWLEDGE) << 24) | (1 << 22);
683 writeq(hwords + 1, piobuf);
708 ipath_schedule_send(qp);
710 spin_unlock_irqrestore(&qp->
s_lock, flags);
724 static void reset_psn(
struct ipath_qp *qp,
u32 psn)
736 if (ipath_cmp24(psn, wqe->
psn) <= 0) {
742 opcode = wqe->
wr.opcode;
750 wqe = get_swqe_ptr(qp, n);
751 diff = ipath_cmp24(psn, wqe->
psn);
763 opcode = wqe->
wr.opcode;
774 qp->
s_state =
OP(RDMA_READ_RESPONSE_FIRST);
779 qp->
s_state =
OP(RDMA_READ_RESPONSE_LAST);
783 qp->
s_state =
OP(RDMA_READ_RESPONSE_MIDDLE);
821 dev = to_idev(qp->
ibqp.device);
835 ipath_schedule_send(qp);
841 static inline void update_last_psn(
struct ipath_qp *qp,
u32 psn)
888 wqe = get_swqe_ptr(qp, qp->
s_last);
894 while ((diff = ipath_cmp24(ack_psn, wqe->
lpsn)) >= 0) {
902 opcode ==
OP(RDMA_READ_RESPONSE_ONLY) &&
917 (opcode !=
OP(RDMA_READ_RESPONSE_LAST) || diff != 0)) ||
920 (opcode !=
OP(ATOMIC_ACKNOWLEDGE) || diff != 0))) {
925 update_last_psn(qp, wqe->
psn - 1);
946 ipath_schedule_send(qp);
952 wc.wr_id = wqe->
wr.wr_id;
974 wqe = get_swqe_ptr(qp, qp->
s_cur);
984 wqe = get_swqe_ptr(qp, qp->
s_last);
988 switch (aeth >> 29) {
1003 if (ipath_cmp24(qp->
s_psn, psn) <= 0) {
1004 reset_psn(qp, psn + 1);
1005 ipath_schedule_send(qp);
1007 }
else if (ipath_cmp24(qp->
s_psn, psn) <= 0) {
1009 qp->
s_psn = psn + 1;
1014 update_last_psn(qp, psn);
1030 update_last_psn(qp, psn - 1);
1044 ipath_schedule_send(qp);
1051 update_last_psn(qp, psn - 1);
1117 static inline void ipath_rc_rcv_resp(
struct ipath_ibdev *dev,
1127 unsigned long flags;
1147 if (diff == 0 && opcode ==
OP(ACKNOWLEDGE)) {
1148 if (!header_in_data)
1154 if ((aeth >> 29) == 0)
1162 wqe = get_swqe_ptr(qp, qp->
s_last);
1166 case OP(ACKNOWLEDGE):
1167 case
OP(ATOMIC_ACKNOWLEDGE):
1168 case
OP(RDMA_READ_RESPONSE_FIRST):
1169 if (!header_in_data)
1175 if (opcode ==
OP(ATOMIC_ACKNOWLEDGE)) {
1176 if (!header_in_data) {
1185 if (!do_rc_ack(qp, aeth, psn, opcode, val) ||
1186 opcode !=
OP(RDMA_READ_RESPONSE_FIRST))
1189 wqe = get_swqe_ptr(qp, qp->
s_last);
1202 case OP(RDMA_READ_RESPONSE_MIDDLE):
1204 if (
unlikely(ipath_cmp24(psn, qp->s_last_psn + 1))) {
1215 if (
unlikely(tlen != (hdrsize + pmtu + 4)))
1227 if (opcode ==
OP(RDMA_READ_RESPONSE_MIDDLE))
1235 update_last_psn(qp, psn);
1236 spin_unlock_irqrestore(&qp->
s_lock, flags);
1240 case OP(RDMA_READ_RESPONSE_ONLY):
1241 if (!header_in_data)
1245 if (!do_rc_ack(qp, aeth, psn, opcode, 0))
1254 if (
unlikely(tlen < (hdrsize + pad + 8)))
1261 wqe = get_swqe_ptr(qp, qp->
s_last);
1266 case OP(RDMA_READ_RESPONSE_LAST):
1268 if (
unlikely(ipath_cmp24(psn, qp->s_last_psn + 1))) {
1285 if (
unlikely(tlen <= (hdrsize + pad + 8)))
1288 tlen -= hdrsize + pad + 8;
1291 if (!header_in_data)
1298 (
void) do_rc_ack(qp, aeth, psn,
1299 OP(RDMA_READ_RESPONSE_LAST), 0);
1313 spin_unlock_irqrestore(&qp->
s_lock, flags);
1335 static inline int ipath_rc_rcv_error(
struct ipath_ibdev *dev,
1347 unsigned long flags;
1403 if (ipath_cmp24(psn, e->
psn) >= 0) {
1410 case OP(RDMA_READ_REQUEST): {
1420 if (!e || e->
opcode !=
OP(RDMA_READ_REQUEST) || old_req)
1423 if (!header_in_data)
1424 reth = &ohdr->
u.
rc.reth;
1426 reth = (
struct ib_reth *)data;
1427 data +=
sizeof(*reth);
1465 case OP(COMPARE_SWAP):
1466 case
OP(FETCH_ADD): {
1472 if (!e || e->
opcode != (
u8) opcode || old_req)
1487 spin_unlock_irqrestore(&qp->
s_lock, flags);
1500 spin_unlock_irqrestore(&qp->
s_lock, flags);
1514 ipath_schedule_send(qp);
1517 spin_unlock_irqrestore(&qp->
s_lock, flags);
1527 unsigned long flags;
1532 spin_unlock_irqrestore(&qp->
s_lock, flags);
1540 qp->
ibqp.event_handler(&ev, qp->
ibqp.qp_context);
1544 static inline void ipath_update_ack_queue(
struct ipath_qp *qp,
unsigned n)
1571 int has_grh,
void *data,
u32 tlen,
struct ipath_qp *qp)
1583 unsigned long flags;
1596 ohdr = &hdr->
u.
l.oth;
1597 hdrsize = 8 + 40 + 12;
1603 header_in_data = dev->
dd->ipath_rcvhdrentsize == 16;
1604 if (header_in_data) {
1618 if (opcode >=
OP(RDMA_READ_RESPONSE_FIRST) &&
1619 opcode <=
OP(ATOMIC_ACKNOWLEDGE)) {
1620 ipath_rc_rcv_resp(dev, ohdr, data, tlen, qp, opcode, psn,
1621 hdrsize, pmtu, header_in_data);
1626 diff = ipath_cmp24(psn, qp->
r_psn);
1628 if (ipath_rc_rcv_error(dev, ohdr, data, qp, opcode,
1629 psn, diff, header_in_data))
1636 case OP(SEND_FIRST):
1637 case OP(SEND_MIDDLE):
1638 if (opcode ==
OP(SEND_MIDDLE) ||
1639 opcode ==
OP(SEND_LAST) ||
1640 opcode ==
OP(SEND_LAST_WITH_IMMEDIATE))
1644 case OP(RDMA_WRITE_FIRST):
1645 case OP(RDMA_WRITE_MIDDLE):
1646 if (opcode ==
OP(RDMA_WRITE_MIDDLE) ||
1647 opcode ==
OP(RDMA_WRITE_LAST) ||
1648 opcode ==
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE))
1653 if (opcode ==
OP(SEND_MIDDLE) ||
1654 opcode ==
OP(SEND_LAST) ||
1655 opcode ==
OP(SEND_LAST_WITH_IMMEDIATE) ||
1656 opcode ==
OP(RDMA_WRITE_MIDDLE) ||
1657 opcode ==
OP(RDMA_WRITE_LAST) ||
1658 opcode ==
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE))
1668 memset(&wc, 0,
sizeof wc);
1672 case OP(SEND_FIRST):
1677 case OP(SEND_MIDDLE):
1678 case OP(RDMA_WRITE_MIDDLE):
1681 if (
unlikely(tlen != (hdrsize + pmtu + 4)))
1689 case OP(RDMA_WRITE_LAST_WITH_IMMEDIATE):
1696 case OP(SEND_ONLY_WITH_IMMEDIATE):
1700 if (opcode ==
OP(SEND_ONLY))
1703 case OP(SEND_LAST_WITH_IMMEDIATE):
1705 if (header_in_data) {
1716 case OP(RDMA_WRITE_LAST):
1722 if (
unlikely(tlen < (hdrsize + pad + 4)))
1725 tlen -= (hdrsize + pad + 4);
1735 if (opcode ==
OP(RDMA_WRITE_LAST_WITH_IMMEDIATE) ||
1736 opcode ==
OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE))
1750 case OP(RDMA_WRITE_FIRST):
1751 case OP(RDMA_WRITE_ONLY):
1752 case OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE):
1758 if (!header_in_data)
1759 reth = &ohdr->
u.
rc.reth;
1761 reth = (
struct ib_reth *)data;
1762 data +=
sizeof(*reth);
1764 hdrsize +=
sizeof(*reth);
1767 if (qp->
r_len != 0) {
1774 qp->
r_len, vaddr, rkey,
1782 qp->
r_sge.sge.length = 0;
1783 qp->
r_sge.sge.sge_length = 0;
1785 if (opcode ==
OP(RDMA_WRITE_FIRST))
1787 else if (opcode ==
OP(RDMA_WRITE_ONLY))
1793 case OP(RDMA_READ_REQUEST): {
1810 goto nack_inv_unlck;
1811 ipath_update_ack_queue(qp, next);
1815 if (!header_in_data)
1816 reth = &ohdr->
u.
rc.reth;
1818 reth = (
struct ib_reth *)data;
1819 data +=
sizeof(*reth);
1831 goto nack_acc_unlck;
1837 qp->
r_psn += (len - 1) / pmtu;
1861 ipath_schedule_send(qp);
1866 case OP(COMPARE_SWAP):
1867 case OP(FETCH_ADD): {
1888 goto nack_inv_unlck;
1889 ipath_update_ack_queue(qp, next);
1891 if (!header_in_data)
1898 goto nack_inv_unlck;
1902 sizeof(
u64), vaddr, rkey,
1904 goto nack_acc_unlck;
1924 ipath_schedule_send(qp);
1938 if (psn & (1 << 31))
1948 spin_unlock_irqrestore(&qp->
s_lock, flags);
1956 spin_unlock_irqrestore(&qp->
s_lock, flags);
1966 spin_unlock_irqrestore(&qp->
s_lock, flags);