ca_attr_p->modify_srq_depth = hca_info_p->device_cap_flags & IB_DEVICE_SRQ_RESIZE;\r
ca_attr_p->system_image_guid_support = hca_info_p->device_cap_flags & IB_DEVICE_SYS_IMAGE_GUID;\r
ca_attr_p->hw_agents = FALSE; // in the context of IBAL then agent is implemented on the host\r
-\r
+ ca_attr_p->ipoib_csum = hca_info_p->device_cap_flags & IB_DEVICE_IPOIB_CSUM;\r
+ \r
ca_attr_p->num_page_sizes = 1;\r
ca_attr_p->p_page_size[0] = PAGE_SIZE; // TBD: extract an array of page sizes from HCA cap\r
\r
IB_DEVICE_RC_RNR_NAK_GEN = (1<<12),\r
IB_DEVICE_SRQ_RESIZE = (1<<13),\r
IB_DEVICE_N_NOTIFY_CQ = (1<<14),\r
+ IB_DEVICE_IPOIB_CSUM = (1<<18)\r
};\r
\r
struct ib_device_attr {\r
/* Check port for UD address vector: */
*(inbox + INIT_HCA_FLAGS_OFFSET / 4) |= cl_hton32(1);
+ /* Enable IPoIB checksumming if we can: */
+ if (dev->device_cap_flags & IB_DEVICE_IPOIB_CSUM)
+ *(inbox + INIT_HCA_FLAGS_OFFSET / 4) |= cl_hton32(7 << 3);
+
/* We leave wqe_quota, responder_exu, etc as 0 (default) */
/* QPC/EEC/CQC/EQC/RDB attributes */
DEV_LIM_FLAG_RAW_IPV6 = 1 << 4,
DEV_LIM_FLAG_RAW_ETHER = 1 << 5,
DEV_LIM_FLAG_SRQ = 1 << 6,
+ DEV_LIM_FLAG_IPOIB_CSUM = 1 << 7,
DEV_LIM_FLAG_BAD_PKEY_CNTR = 1 << 8,
DEV_LIM_FLAG_BAD_QKEY_CNTR = 1 << 9,
DEV_LIM_FLAG_MW = 1 << 16,
SYNDROME_INVAL_EEC_STATE_ERR = 0x24\r
};\r
\r
+enum { \r
+ MTHCA_NdisPacketTcpChecksumFailed = 1 << 1,\r
+ MTHCA_NdisPacketUdpChecksumFailed = 1 << 2,\r
+ MTHCA_NdisPacketIpChecksumFailed = 1 << 3,\r
+ MTHCA_NdisPacketTcpChecksumSucceeded = 1 << 4,\r
+ MTHCA_NdisPacketUdpChecksumSucceeded = 1 << 5,\r
+ MTHCA_NdisPacketIpChecksumSucceeded = 1 << 6\r
+};\r
+\r
struct mthca_cqe {\r
__be32 my_qpn;\r
__be32 my_ee;\r
__be32 rqpn;\r
- __be16 sl_g_mlpath;\r
+ u8 sl_ipok;\r
+ u8 g_mlpath;\r
__be16 rlid;\r
__be32 imm_etype_pkey_eec;\r
__be32 byte_cnt;\r
*free_cqe = 0;\r
}\r
\r
+static inline uint8_t mthca_ib_ipoib_csum_ok(u16 checksum, u8 ip_ok) {\r
+ \r
+ #define CSUM_VALID_NUM 0xffff\r
+ uint8_t res = 0;\r
+ \r
+ // Verify that IP_OK bit is set and the packet is pure IPv4 packet\r
+ if (ip_ok)\r
+ {\r
+ // IP checksum calculated by MLX4 matched the checksum in the receive packet's \r
+ res |= MTHCA_NdisPacketIpChecksumSucceeded;\r
+ if (checksum == CSUM_VALID_NUM) {\r
+ // TCP or UDP checksum calculated by MTHCA matched the checksum in the receive packet's \r
+ res |= (MTHCA_NdisPacketUdpChecksumSucceeded |\r
+ MTHCA_NdisPacketTcpChecksumSucceeded );\r
+ }\r
+ }\r
+ return res;\r
+}\r
+\r
static inline int mthca_poll_one(struct mthca_dev *dev,\r
struct mthca_cq *cq,\r
struct mthca_qp **cur_qp,\r
int is_send;\r
int free_cqe = 1;\r
int err = 0;\r
+ u16 checksum;\r
\r
HCA_ENTER(HCA_DBG_CQ);\r
cqe = next_cqe_sw(cq);\r
entry->recv.ud.remote_lid = cqe->rlid;\r
entry->recv.ud.remote_qp = cqe->rqpn & 0xffffff00;\r
entry->recv.ud.pkey_index = (u16)(cl_ntoh32(cqe->imm_etype_pkey_eec) >> 16);\r
- entry->recv.ud.remote_sl = (uint8_t)(cl_ntoh16(cqe->sl_g_mlpath) >> 12);\r
- entry->recv.ud.path_bits = (uint8_t)(cl_ntoh16(cqe->sl_g_mlpath) & 0x7f);\r
- entry->recv.ud.recv_opt |= cl_ntoh16(cqe->sl_g_mlpath) & 0x80 ?\r
- IB_RECV_OPT_GRH_VALID : 0;\r
+ entry->recv.ud.remote_sl = cqe->sl_ipok >> 4;\r
+ entry->recv.ud.path_bits = (uint8_t)(cqe->g_mlpath & 0x7f);\r
+ entry->recv.ud.recv_opt |= cqe->g_mlpath & 0x80 ? IB_RECV_OPT_GRH_VALID : 0;\r
+ checksum = (u16)(cl_ntoh32(cqe->rqpn) >> 24) |\r
+ ((cl_ntoh32(cqe->my_ee) >> 16) & 0xff00);\r
+ entry->recv.ud.csum_ok = mthca_ib_ipoib_csum_ok(checksum, cqe->sl_ipok & 1);\r
}\r
if (!is_send && cqe->rlid == 0){\r
HCA_PRINT(TRACE_LEVEL_INFORMATION,HCA_DBG_CQ,("found rlid == 0 \n "));\r
\r
if (dev_lim->flags & DEV_LIM_FLAG_SRQ)\r
mdev->mthca_flags |= MTHCA_FLAG_SRQ;\r
+ \r
+ if (mthca_is_memfree(mdev)) {\r
+ if (dev_lim->flags & DEV_LIM_FLAG_IPOIB_CSUM)\r
+ mdev->device_cap_flags |= IB_DEVICE_IPOIB_CSUM; //IB_DEVICE_UD_IP_CSUM;\r
+ }\r
\r
return 0;\r
}\r
cl_hton32(MTHCA_NEXT_CQ_UPDATE) : 0) |\r
((wr->send_opt & IB_SEND_OPT_SOLICITED) ?\r
cl_hton32(MTHCA_NEXT_SOLICIT) : 0) |\r
- cl_hton32(1);\r
+ ((wr->send_opt & IB_SEND_OPT_TX_IP_CSUM) ?\r
+ cl_hton32(MTHCA_NEXT_IP_CSUM) : 0) |\r
+ ((wr->send_opt & IB_SEND_OPT_TX_TCP_UDP_CSUM) ?\r
+ cl_hton32(MTHCA_NEXT_TCP_UDP_CSUM) : 0) |\r
+ cl_hton32(1);\r
if (opcode == MTHCA_OPCODE_SEND_IMM||\r
opcode == MTHCA_OPCODE_RDMA_WRITE_IMM)\r
((struct mthca_next_seg *) wqe)->imm = wr->immediate_data;\r
};\r
\r
enum {\r
- MTHCA_NEXT_DBD = 1 << 7,\r
- MTHCA_NEXT_FENCE = 1 << 6,\r
- MTHCA_NEXT_CQ_UPDATE = 1 << 3,\r
- MTHCA_NEXT_EVENT_GEN = 1 << 2,\r
- MTHCA_NEXT_SOLICIT = 1 << 1,\r
-\r
- MTHCA_MLX_VL15 = 1 << 17,\r
- MTHCA_MLX_SLR = 1 << 16\r
+ MTHCA_NEXT_DBD = 1 << 7,\r
+ MTHCA_NEXT_FENCE = 1 << 6,\r
+ MTHCA_NEXT_CQ_UPDATE = 1 << 3,\r
+ MTHCA_NEXT_EVENT_GEN = 1 << 2,\r
+ MTHCA_NEXT_SOLICIT = 1 << 1,\r
+ MTHCA_NEXT_IP_CSUM = 1 << 4,\r
+ MTHCA_NEXT_TCP_UDP_CSUM = 1 << 5,\r
+\r
+ MTHCA_MLX_VL15 = 1 << 17,\r
+ MTHCA_MLX_SLR = 1 << 16\r
};\r
\r
enum {\r