tbl_idx = 1;
addr_idx = 1;
while (addr_idx < nchunks) {
- if ((tbl_idx == (TARGET_PAGE_SIZE / sizeof(uint64_t)))) {
+ if (tbl_idx == TARGET_PAGE_SIZE / sizeof(uint64_t)) {
tbl_idx = 0;
dir_idx++;
pr_dbg("Mapping to table %d\n", dir_idx);
rsp->hdr.err = rdma_rm_modify_qp(&dev->rdma_dev_res, &dev->backend_dev,
cmd->qp_handle, cmd->attr_mask,
(union ibv_gid *)&cmd->attrs.ah_attr.grh.dgid,
- cmd->attrs.dest_qp_num, cmd->attrs.qp_state,
+ cmd->attrs.dest_qp_num,
+ (enum ibv_qp_state)cmd->attrs.qp_state,
cmd->attrs.qkey, cmd->attrs.rq_psn,
cmd->attrs.sq_psn);
goto out_free_tbl;
}
/* RX ring is the second */
- (struct pvrdma_ring *)(*ring_state)++;
+ (*ring_state)++;
rc = pvrdma_ring_init(ring, name, pci_dev,
(struct pvrdma_ring *)*ring_state,
(num_pages - 1) * TARGET_PAGE_SIZE /
memset(dev->rdma_dev_res.ports, 0, sizeof(dev->rdma_dev_res.ports));
for (i = 0; i < MAX_PORTS; i++) {
- dev->rdma_dev_res.ports[i].state = PVRDMA_PORT_DOWN;
+ dev->rdma_dev_res.ports[i].state = IBV_PORT_DOWN;
dev->rdma_dev_res.ports[i].pkey_tbl =
g_malloc0(sizeof(*dev->rdma_dev_res.ports[i].pkey_tbl) *