rdma-core/0015-libhns-Refactor-process-of-setting-extended-sge.patch
zhengfeng luo 1725c90650 Backport bugfix for hns SRQ and SGE from rdma-core V36.
Bugfix for hns SRQ and SGE. And also added all related cleanups and
refactorings.

Signed-off-by: zhengfeng luo <luozhengfeng@h-partners.com>
Signed-off-by: Chengchang Tang <tangchengchang@huawei.com>
2022-07-14 09:13:04 +08:00

90 lines
2.7 KiB
Diff

From 11c81d0e3a987f95b74e03b5e592a45029302f1d Mon Sep 17 00:00:00 2001
From: Weihang Li <liweihang@huawei.com>
Date: Fri, 14 May 2021 10:02:56 +0800
Subject: libhns: Refactor process of setting extended sge
Refactor and encapsulate the parts of getting number of extended sge a WQE
can use to make it easier to understand.
Signed-off-by: Weihang Li <liweihang@huawei.com>
---
providers/hns/hns_roce_u_verbs.c | 45 ++++++++++++++++++++------------
1 file changed, 29 insertions(+), 16 deletions(-)
diff --git a/providers/hns/hns_roce_u_verbs.c b/providers/hns/hns_roce_u_verbs.c
index 30ab072a..a8508fc5 100644
--- a/providers/hns/hns_roce_u_verbs.c
+++ b/providers/hns/hns_roce_u_verbs.c
@@ -920,31 +920,44 @@ err_alloc:
return -ENOMEM;
}
-static void set_extend_sge_param(struct hns_roce_device *hr_dev,
- struct ibv_qp_init_attr_ex *attr,
- struct hns_roce_qp *qp, unsigned int wr_cnt)
+static unsigned int get_wqe_ext_sge_cnt(struct hns_roce_qp *qp)
{
- int cnt = 0;
+ if (qp->verbs_qp.qp.qp_type == IBV_QPT_UD)
+ return qp->sq.max_gs;
+
+ if (qp->sq.max_gs > HNS_ROCE_SGE_IN_WQE)
+ return qp->sq.max_gs - HNS_ROCE_SGE_IN_WQE;
+
+ return 0;
+}
+
+static void set_ext_sge_param(struct hns_roce_device *hr_dev,
+ struct ibv_qp_init_attr_ex *attr,
+ struct hns_roce_qp *qp, unsigned int wr_cnt)
+{
+ unsigned int total_sge_cnt;
+ unsigned int wqe_sge_cnt;
+
+ qp->ex_sge.sge_shift = HNS_ROCE_SGE_SHIFT;
if (hr_dev->hw_version == HNS_ROCE_HW_VER1) {
qp->sq.max_gs = HNS_ROCE_SGE_IN_WQE;
- } else {
- qp->sq.max_gs = attr->cap.max_send_sge;
- if (attr->qp_type == IBV_QPT_UD)
- cnt = roundup_pow_of_two(wr_cnt * qp->sq.max_gs);
- else if (qp->sq.max_gs > HNS_ROCE_SGE_IN_WQE)
- cnt = roundup_pow_of_two(wr_cnt *
- (qp->sq.max_gs -
- HNS_ROCE_SGE_IN_WQE));
+ return;
}
- qp->ex_sge.sge_shift = HNS_ROCE_SGE_SHIFT;
+ qp->sq.max_gs = attr->cap.max_send_sge;
+
+ wqe_sge_cnt = get_wqe_ext_sge_cnt(qp);
/* If the number of extended sge is not zero, they MUST use the
* space of HNS_HW_PAGE_SIZE at least.
*/
- qp->ex_sge.sge_cnt = cnt ?
- max(cnt, HNS_HW_PAGE_SIZE / HNS_ROCE_SGE_SIZE) : 0;
+ if (wqe_sge_cnt) {
+ total_sge_cnt = roundup_pow_of_two(wr_cnt * wqe_sge_cnt);
+ qp->ex_sge.sge_cnt =
+ max(total_sge_cnt,
+ (unsigned int)HNS_HW_PAGE_SIZE / HNS_ROCE_SGE_SIZE);
+ }
}
static void hns_roce_set_qp_params(struct ibv_qp_init_attr_ex *attr,
@@ -988,7 +1001,7 @@ static void hns_roce_set_qp_params(struct ibv_qp_init_attr_ex *attr,
qp->sq.wqe_cnt = cnt;
qp->sq.shift = hr_ilog32(cnt);
- set_extend_sge_param(hr_dev, attr, qp, cnt);
+ set_ext_sge_param(hr_dev, attr, qp, cnt);
qp->sq.max_post = min(ctx->max_qp_wr, cnt);
qp->sq.max_gs = min(ctx->max_sge, qp->sq.max_gs);
--
2.30.0