ucs_status_t uct_ud_mlx5_iface_get_av(uct_ib_iface_t *iface, uct_ud_mlx5_iface_common_t *ud_common_iface, const uct_ib_address_t *ib_addr, uint8_t path_bits, uct_ib_mlx5_base_av_t *base_av, struct mlx5_grh_av *grh_av, int *is_global) { ucs_status_t status; struct ibv_ah *ah; struct mlx5_wqe_av mlx5_av; status = uct_ib_iface_create_ah(iface, ib_addr, path_bits, &ah, is_global); if (status != UCS_OK) { return UCS_ERR_INVALID_ADDR; } uct_ib_mlx5_get_av(ah, &mlx5_av); ibv_destroy_ah(ah); base_av->stat_rate_sl = mlx5_av_base(&mlx5_av)->stat_rate_sl; base_av->fl_mlid = mlx5_av_base(&mlx5_av)->fl_mlid; base_av->rlid = mlx5_av_base(&mlx5_av)->rlid; /* copy MLX5_EXTENDED_UD_AV from the driver, if the flag is not present then * the device supports compact address vector. */ if (ud_common_iface->config.compact_av) { base_av->dqp_dct = mlx5_av_base(&mlx5_av)->dqp_dct & UCT_IB_MLX5_EXTENDED_UD_AV; } else { base_av->dqp_dct = UCT_IB_MLX5_EXTENDED_UD_AV; } ucs_assertv_always((UCT_IB_MLX5_AV_FULL_SIZE > UCT_IB_MLX5_AV_BASE_SIZE) || (base_av->dqp_dct & UCT_IB_MLX5_EXTENDED_UD_AV), "compact address vector not supported, and EXTENDED_AV flag is missing"); if (*is_global) { ucs_assert_always(grh_av != NULL); memcpy(grh_av, mlx5_av_grh(&mlx5_av), sizeof(*grh_av)); } return UCS_OK; }
static ucs_status_t uct_ud_mlx5_ep_create_ah(uct_ud_mlx5_iface_t *iface, uct_ud_mlx5_ep_t *ep, const uct_sockaddr_ib_t *if_addr) { struct ibv_ah *ah; ah = uct_ib_create_ah(&iface->super.super, if_addr->lid); if (ah == NULL) { ucs_error("failed to create address handle: %m"); return UCS_ERR_INVALID_ADDR; } uct_ib_mlx5_get_av(ah, &ep->av); mlx5_av_base(&ep->av)->key.qkey.qkey = htonl(UCT_IB_QKEY); mlx5_av_base(&ep->av)->key.qkey.reserved = iface->super.qp->qp_num; mlx5_av_base(&ep->av)->dqp_dct = htonl(if_addr->qp_num | UCT_IB_MLX5_EXTENDED_UD_AV); ibv_destroy_ah(ah); return UCS_OK; }
ucs_status_t uct_dc_mlx5_ep_fc_ctrl(uct_ep_t *tl_ep, unsigned op, uct_rc_fc_request_t *req) { uct_dc_mlx5_ep_t *dc_ep = ucs_derived_of(tl_ep, uct_dc_mlx5_ep_t); uct_dc_mlx5_iface_t *iface = ucs_derived_of(tl_ep->iface, uct_dc_mlx5_iface_t); uct_ib_iface_t *ib_iface = &iface->super.super.super; struct ibv_ah_attr ah_attr = {.is_global = 0}; uct_dc_fc_sender_data_t sender; uct_dc_fc_request_t *dc_req; struct mlx5_wqe_av mlx5_av; uct_ib_mlx5_base_av_t av; ucs_status_t status; uintptr_t sender_ep; struct ibv_ah *ah; UCT_DC_MLX5_TXQP_DECL(txqp, txwq); ucs_assert((sizeof(uint8_t) + sizeof(sender_ep)) <= UCT_IB_MLX5_AV_FULL_SIZE); UCT_DC_MLX5_CHECK_RES(iface, dc_ep); UCT_DC_MLX5_IFACE_TXQP_GET(iface, dc_ep, txqp, txwq); dc_req = ucs_derived_of(req, uct_dc_fc_request_t); if (op == UCT_RC_EP_FC_PURE_GRANT) { ucs_assert(req != NULL); sender_ep = (uintptr_t)dc_req->sender.ep; /* TODO: look at common code with uct_ud_mlx5_iface_get_av */ if (dc_req->sender.global.is_global) { uct_ib_iface_fill_ah_attr_from_gid_lid(ib_iface, dc_req->lid, ucs_unaligned_ptr(&dc_req->sender.global.gid), ib_iface->path_bits[0], &ah_attr); status = uct_ib_iface_create_ah(ib_iface, &ah_attr, &ah); if (status != UCS_OK) { return status; } uct_ib_mlx5_get_av(ah, &mlx5_av); } /* Note av initialization is copied from exp verbs */ av.stat_rate_sl = ib_iface->config.sl; /* (attr->static_rate << 4) | attr->sl */ av.fl_mlid = ib_iface->path_bits[0] & 0x7f; /* lid in dc_req is in BE already */ av.rlid = uct_ib_iface_is_roce(ib_iface) ? 0 : (dc_req->lid | htons(ib_iface->path_bits[0])); av.dqp_dct = htonl(dc_req->dct_num); uct_dc_mlx5_iface_set_av_sport(iface, &av, dc_req->dct_num); if (!iface->ud_common.config.compact_av || ah_attr.is_global) { av.dqp_dct |= UCT_IB_MLX5_EXTENDED_UD_AV; } uct_rc_mlx5_txqp_inline_post(&iface->super, UCT_IB_QPT_DCI, txqp, txwq, MLX5_OPCODE_SEND, &av /*dummy*/, 0, op, sender_ep, 0, 0, 0, &av, ah_attr.is_global ? mlx5_av_grh(&mlx5_av) : NULL, uct_ib_mlx5_wqe_av_size(&av), 0, INT_MAX); } else { ucs_assert(op == UCT_RC_EP_FC_FLAG_HARD_REQ); sender.ep = (uint64_t)dc_ep; sender.global.gid = ib_iface->gid; sender.global.is_global = dc_ep->flags & UCT_DC_MLX5_EP_FLAG_GRH; UCS_STATS_UPDATE_COUNTER(dc_ep->fc.stats, UCT_RC_FC_STAT_TX_HARD_REQ, 1); uct_rc_mlx5_txqp_inline_post(&iface->super, UCT_IB_QPT_DCI, txqp, txwq, MLX5_OPCODE_SEND_IMM, &sender.global, sizeof(sender.global), op, sender.ep, uct_dc_mlx5_get_dct_num(iface), 0, 0, &dc_ep->av, uct_dc_mlx5_ep_get_grh(dc_ep), uct_ib_mlx5_wqe_av_size(&dc_ep->av), MLX5_WQE_CTRL_SOLICITED, INT_MAX); } return UCS_OK; } UCS_CLASS_INIT_FUNC(uct_dc_mlx5_ep_t, uct_dc_mlx5_iface_t *iface, const uct_dc_mlx5_iface_addr_t *if_addr, uct_ib_mlx5_base_av_t *av) { uint32_t remote_dctn; ucs_trace_func(""); UCS_CLASS_CALL_SUPER_INIT(uct_base_ep_t, &iface->super.super.super.super); self->atomic_mr_offset = uct_ib_md_atomic_offset(if_addr->atomic_mr_id); remote_dctn = uct_ib_unpack_uint24(if_addr->qp_num); memcpy(&self->av, av, sizeof(*av)); self->av.dqp_dct |= htonl(remote_dctn); uct_dc_mlx5_iface_set_av_sport(iface, &self->av, remote_dctn); return uct_dc_mlx5_ep_basic_init(iface, self); }