Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1/* SPDX-License-Identifier: GPL-2.0 */
2/* Copyright (C) 2018-2025, Advanced Micro Devices, Inc. */
3
4#ifndef _IONIC_FW_H_
5#define _IONIC_FW_H_
6
7#include <linux/kernel.h>
8#include <rdma/ib_verbs.h>
9
10/* common for ib spec */
11
12#define IONIC_EXP_DBELL_SZ 8
13
14enum ionic_mrid_bits {
15 IONIC_MRID_INDEX_SHIFT = 8,
16};
17
18static inline u32 ionic_mrid(u32 index, u8 key)
19{
20 return (index << IONIC_MRID_INDEX_SHIFT) | key;
21}
22
23static inline u32 ionic_mrid_index(u32 lrkey)
24{
25 return lrkey >> IONIC_MRID_INDEX_SHIFT;
26}
27
28/* common to all versions */
29
30/* wqe scatter gather element */
31struct ionic_sge {
32 __be64 va;
33 __be32 len;
34 __be32 lkey;
35};
36
37/* admin queue mr type */
38enum ionic_mr_flags {
39 /* bits that determine mr access */
40 IONIC_MRF_LOCAL_WRITE = BIT(0),
41 IONIC_MRF_REMOTE_WRITE = BIT(1),
42 IONIC_MRF_REMOTE_READ = BIT(2),
43 IONIC_MRF_REMOTE_ATOMIC = BIT(3),
44 IONIC_MRF_MW_BIND = BIT(4),
45 IONIC_MRF_ZERO_BASED = BIT(5),
46 IONIC_MRF_ON_DEMAND = BIT(6),
47 IONIC_MRF_PB = BIT(7),
48 IONIC_MRF_ACCESS_MASK = BIT(12) - 1,
49
50 /* bits that determine mr type */
51 IONIC_MRF_UKEY_EN = BIT(13),
52 IONIC_MRF_IS_MW = BIT(14),
53 IONIC_MRF_INV_EN = BIT(15),
54
55 /* base flags combinations for mr types */
56 IONIC_MRF_USER_MR = 0,
57 IONIC_MRF_PHYS_MR = (IONIC_MRF_UKEY_EN |
58 IONIC_MRF_INV_EN),
59 IONIC_MRF_MW_1 = (IONIC_MRF_UKEY_EN |
60 IONIC_MRF_IS_MW),
61 IONIC_MRF_MW_2 = (IONIC_MRF_UKEY_EN |
62 IONIC_MRF_IS_MW |
63 IONIC_MRF_INV_EN),
64};
65
66static inline int to_ionic_mr_flags(int access)
67{
68 int flags = 0;
69
70 if (access & IB_ACCESS_LOCAL_WRITE)
71 flags |= IONIC_MRF_LOCAL_WRITE;
72
73 if (access & IB_ACCESS_REMOTE_READ)
74 flags |= IONIC_MRF_REMOTE_READ;
75
76 if (access & IB_ACCESS_REMOTE_WRITE)
77 flags |= IONIC_MRF_REMOTE_WRITE;
78
79 if (access & IB_ACCESS_REMOTE_ATOMIC)
80 flags |= IONIC_MRF_REMOTE_ATOMIC;
81
82 if (access & IB_ACCESS_MW_BIND)
83 flags |= IONIC_MRF_MW_BIND;
84
85 if (access & IB_ZERO_BASED)
86 flags |= IONIC_MRF_ZERO_BASED;
87
88 return flags;
89}
90
91enum ionic_qp_flags {
92 /* bits that determine qp access */
93 IONIC_QPF_REMOTE_WRITE = BIT(0),
94 IONIC_QPF_REMOTE_READ = BIT(1),
95 IONIC_QPF_REMOTE_ATOMIC = BIT(2),
96
97 /* bits that determine other qp behavior */
98 IONIC_QPF_SQ_PB = BIT(6),
99 IONIC_QPF_RQ_PB = BIT(7),
100 IONIC_QPF_SQ_SPEC = BIT(8),
101 IONIC_QPF_RQ_SPEC = BIT(9),
102 IONIC_QPF_REMOTE_PRIVILEGED = BIT(10),
103 IONIC_QPF_SQ_DRAINING = BIT(11),
104 IONIC_QPF_SQD_NOTIFY = BIT(12),
105 IONIC_QPF_SQ_CMB = BIT(13),
106 IONIC_QPF_RQ_CMB = BIT(14),
107 IONIC_QPF_PRIVILEGED = BIT(15),
108};
109
110static inline int from_ionic_qp_flags(int flags)
111{
112 int access_flags = 0;
113
114 if (flags & IONIC_QPF_REMOTE_WRITE)
115 access_flags |= IB_ACCESS_REMOTE_WRITE;
116
117 if (flags & IONIC_QPF_REMOTE_READ)
118 access_flags |= IB_ACCESS_REMOTE_READ;
119
120 if (flags & IONIC_QPF_REMOTE_ATOMIC)
121 access_flags |= IB_ACCESS_REMOTE_ATOMIC;
122
123 return access_flags;
124}
125
126static inline int to_ionic_qp_flags(int access, bool sqd_notify,
127 bool sq_is_cmb, bool rq_is_cmb,
128 bool sq_spec, bool rq_spec,
129 bool privileged, bool remote_privileged)
130{
131 int flags = 0;
132
133 if (access & IB_ACCESS_REMOTE_WRITE)
134 flags |= IONIC_QPF_REMOTE_WRITE;
135
136 if (access & IB_ACCESS_REMOTE_READ)
137 flags |= IONIC_QPF_REMOTE_READ;
138
139 if (access & IB_ACCESS_REMOTE_ATOMIC)
140 flags |= IONIC_QPF_REMOTE_ATOMIC;
141
142 if (sqd_notify)
143 flags |= IONIC_QPF_SQD_NOTIFY;
144
145 if (sq_is_cmb)
146 flags |= IONIC_QPF_SQ_CMB;
147
148 if (rq_is_cmb)
149 flags |= IONIC_QPF_RQ_CMB;
150
151 if (sq_spec)
152 flags |= IONIC_QPF_SQ_SPEC;
153
154 if (rq_spec)
155 flags |= IONIC_QPF_RQ_SPEC;
156
157 if (privileged)
158 flags |= IONIC_QPF_PRIVILEGED;
159
160 if (remote_privileged)
161 flags |= IONIC_QPF_REMOTE_PRIVILEGED;
162
163 return flags;
164}
165
166/* cqe non-admin status indicated in status_length field when err bit is set */
167enum ionic_status {
168 IONIC_STS_OK,
169 IONIC_STS_LOCAL_LEN_ERR,
170 IONIC_STS_LOCAL_QP_OPER_ERR,
171 IONIC_STS_LOCAL_PROT_ERR,
172 IONIC_STS_WQE_FLUSHED_ERR,
173 IONIC_STS_MEM_MGMT_OPER_ERR,
174 IONIC_STS_BAD_RESP_ERR,
175 IONIC_STS_LOCAL_ACC_ERR,
176 IONIC_STS_REMOTE_INV_REQ_ERR,
177 IONIC_STS_REMOTE_ACC_ERR,
178 IONIC_STS_REMOTE_OPER_ERR,
179 IONIC_STS_RETRY_EXCEEDED,
180 IONIC_STS_RNR_RETRY_EXCEEDED,
181 IONIC_STS_XRC_VIO_ERR,
182 IONIC_STS_LOCAL_SGL_INV_ERR,
183};
184
185static inline int ionic_to_ib_status(int sts)
186{
187 switch (sts) {
188 case IONIC_STS_OK:
189 return IB_WC_SUCCESS;
190 case IONIC_STS_LOCAL_LEN_ERR:
191 return IB_WC_LOC_LEN_ERR;
192 case IONIC_STS_LOCAL_QP_OPER_ERR:
193 case IONIC_STS_LOCAL_SGL_INV_ERR:
194 return IB_WC_LOC_QP_OP_ERR;
195 case IONIC_STS_LOCAL_PROT_ERR:
196 return IB_WC_LOC_PROT_ERR;
197 case IONIC_STS_WQE_FLUSHED_ERR:
198 return IB_WC_WR_FLUSH_ERR;
199 case IONIC_STS_MEM_MGMT_OPER_ERR:
200 return IB_WC_MW_BIND_ERR;
201 case IONIC_STS_BAD_RESP_ERR:
202 return IB_WC_BAD_RESP_ERR;
203 case IONIC_STS_LOCAL_ACC_ERR:
204 return IB_WC_LOC_ACCESS_ERR;
205 case IONIC_STS_REMOTE_INV_REQ_ERR:
206 return IB_WC_REM_INV_REQ_ERR;
207 case IONIC_STS_REMOTE_ACC_ERR:
208 return IB_WC_REM_ACCESS_ERR;
209 case IONIC_STS_REMOTE_OPER_ERR:
210 return IB_WC_REM_OP_ERR;
211 case IONIC_STS_RETRY_EXCEEDED:
212 return IB_WC_RETRY_EXC_ERR;
213 case IONIC_STS_RNR_RETRY_EXCEEDED:
214 return IB_WC_RNR_RETRY_EXC_ERR;
215 case IONIC_STS_XRC_VIO_ERR:
216 default:
217 return IB_WC_GENERAL_ERR;
218 }
219}
220
221/* admin queue qp type */
222enum ionic_qp_type {
223 IONIC_QPT_RC,
224 IONIC_QPT_UC,
225 IONIC_QPT_RD,
226 IONIC_QPT_UD,
227 IONIC_QPT_SRQ,
228 IONIC_QPT_XRC_INI,
229 IONIC_QPT_XRC_TGT,
230 IONIC_QPT_XRC_SRQ,
231};
232
233static inline int to_ionic_qp_type(enum ib_qp_type type)
234{
235 switch (type) {
236 case IB_QPT_GSI:
237 case IB_QPT_UD:
238 return IONIC_QPT_UD;
239 case IB_QPT_RC:
240 return IONIC_QPT_RC;
241 case IB_QPT_UC:
242 return IONIC_QPT_UC;
243 case IB_QPT_XRC_INI:
244 return IONIC_QPT_XRC_INI;
245 case IB_QPT_XRC_TGT:
246 return IONIC_QPT_XRC_TGT;
247 default:
248 return -EINVAL;
249 }
250}
251
252/* admin queue qp state */
253enum ionic_qp_state {
254 IONIC_QPS_RESET,
255 IONIC_QPS_INIT,
256 IONIC_QPS_RTR,
257 IONIC_QPS_RTS,
258 IONIC_QPS_SQD,
259 IONIC_QPS_SQE,
260 IONIC_QPS_ERR,
261};
262
263static inline int from_ionic_qp_state(enum ionic_qp_state state)
264{
265 switch (state) {
266 case IONIC_QPS_RESET:
267 return IB_QPS_RESET;
268 case IONIC_QPS_INIT:
269 return IB_QPS_INIT;
270 case IONIC_QPS_RTR:
271 return IB_QPS_RTR;
272 case IONIC_QPS_RTS:
273 return IB_QPS_RTS;
274 case IONIC_QPS_SQD:
275 return IB_QPS_SQD;
276 case IONIC_QPS_SQE:
277 return IB_QPS_SQE;
278 case IONIC_QPS_ERR:
279 return IB_QPS_ERR;
280 default:
281 return -EINVAL;
282 }
283}
284
285static inline int to_ionic_qp_state(enum ib_qp_state state)
286{
287 switch (state) {
288 case IB_QPS_RESET:
289 return IONIC_QPS_RESET;
290 case IB_QPS_INIT:
291 return IONIC_QPS_INIT;
292 case IB_QPS_RTR:
293 return IONIC_QPS_RTR;
294 case IB_QPS_RTS:
295 return IONIC_QPS_RTS;
296 case IB_QPS_SQD:
297 return IONIC_QPS_SQD;
298 case IB_QPS_SQE:
299 return IONIC_QPS_SQE;
300 case IB_QPS_ERR:
301 return IONIC_QPS_ERR;
302 default:
303 return 0;
304 }
305}
306
307static inline int to_ionic_qp_modify_state(enum ib_qp_state to_state,
308 enum ib_qp_state from_state)
309{
310 return to_ionic_qp_state(to_state) |
311 (to_ionic_qp_state(from_state) << 4);
312}
313
314/* fw abi v1 */
315
316/* data payload part of v1 wqe */
317union ionic_v1_pld {
318 struct ionic_sge sgl[2];
319 __be32 spec32[8];
320 __be16 spec16[16];
321 __u8 data[32];
322};
323
324/* completion queue v1 cqe */
325struct ionic_v1_cqe {
326 union {
327 struct {
328 __be16 cmd_idx;
329 __u8 cmd_op;
330 __u8 rsvd[17];
331 __le16 old_sq_cindex;
332 __le16 old_rq_cq_cindex;
333 } admin;
334 struct {
335 __u64 wqe_id;
336 __be32 src_qpn_op;
337 __u8 src_mac[6];
338 __be16 vlan_tag;
339 __be32 imm_data_rkey;
340 } recv;
341 struct {
342 __u8 rsvd[4];
343 __be32 msg_msn;
344 __u8 rsvd2[8];
345 __u64 npg_wqe_id;
346 } send;
347 };
348 __be32 status_length;
349 __be32 qid_type_flags;
350};
351
352/* bits for cqe recv */
353enum ionic_v1_cqe_src_qpn_bits {
354 IONIC_V1_CQE_RECV_QPN_MASK = 0xffffff,
355 IONIC_V1_CQE_RECV_OP_SHIFT = 24,
356
357 /* MASK could be 0x3, but need 0x1f for makeshift values:
358 * OP_TYPE_RDMA_OPER_WITH_IMM, OP_TYPE_SEND_RCVD
359 */
360 IONIC_V1_CQE_RECV_OP_MASK = 0x1f,
361 IONIC_V1_CQE_RECV_OP_SEND = 0,
362 IONIC_V1_CQE_RECV_OP_SEND_INV = 1,
363 IONIC_V1_CQE_RECV_OP_SEND_IMM = 2,
364 IONIC_V1_CQE_RECV_OP_RDMA_IMM = 3,
365
366 IONIC_V1_CQE_RECV_IS_IPV4 = BIT(7 + IONIC_V1_CQE_RECV_OP_SHIFT),
367 IONIC_V1_CQE_RECV_IS_VLAN = BIT(6 + IONIC_V1_CQE_RECV_OP_SHIFT),
368};
369
370/* bits for cqe qid_type_flags */
371enum ionic_v1_cqe_qtf_bits {
372 IONIC_V1_CQE_COLOR = BIT(0),
373 IONIC_V1_CQE_ERROR = BIT(1),
374 IONIC_V1_CQE_TYPE_SHIFT = 5,
375 IONIC_V1_CQE_TYPE_MASK = 0x7,
376 IONIC_V1_CQE_QID_SHIFT = 8,
377
378 IONIC_V1_CQE_TYPE_ADMIN = 0,
379 IONIC_V1_CQE_TYPE_RECV = 1,
380 IONIC_V1_CQE_TYPE_SEND_MSN = 2,
381 IONIC_V1_CQE_TYPE_SEND_NPG = 3,
382};
383
384static inline bool ionic_v1_cqe_color(struct ionic_v1_cqe *cqe)
385{
386 return cqe->qid_type_flags & cpu_to_be32(IONIC_V1_CQE_COLOR);
387}
388
389static inline bool ionic_v1_cqe_error(struct ionic_v1_cqe *cqe)
390{
391 return cqe->qid_type_flags & cpu_to_be32(IONIC_V1_CQE_ERROR);
392}
393
394static inline bool ionic_v1_cqe_recv_is_ipv4(struct ionic_v1_cqe *cqe)
395{
396 return cqe->recv.src_qpn_op & cpu_to_be32(IONIC_V1_CQE_RECV_IS_IPV4);
397}
398
399static inline bool ionic_v1_cqe_recv_is_vlan(struct ionic_v1_cqe *cqe)
400{
401 return cqe->recv.src_qpn_op & cpu_to_be32(IONIC_V1_CQE_RECV_IS_VLAN);
402}
403
404static inline void ionic_v1_cqe_clean(struct ionic_v1_cqe *cqe)
405{
406 cqe->qid_type_flags |= cpu_to_be32(~0u << IONIC_V1_CQE_QID_SHIFT);
407}
408
409static inline u32 ionic_v1_cqe_qtf(struct ionic_v1_cqe *cqe)
410{
411 return be32_to_cpu(cqe->qid_type_flags);
412}
413
414static inline u8 ionic_v1_cqe_qtf_type(u32 qtf)
415{
416 return (qtf >> IONIC_V1_CQE_TYPE_SHIFT) & IONIC_V1_CQE_TYPE_MASK;
417}
418
419static inline u32 ionic_v1_cqe_qtf_qid(u32 qtf)
420{
421 return qtf >> IONIC_V1_CQE_QID_SHIFT;
422}
423
424/* v1 base wqe header */
425struct ionic_v1_base_hdr {
426 __u64 wqe_id;
427 __u8 op;
428 __u8 num_sge_key;
429 __be16 flags;
430 __be32 imm_data_key;
431};
432
433/* v1 receive wqe body */
434struct ionic_v1_recv_bdy {
435 __u8 rsvd[16];
436 union ionic_v1_pld pld;
437};
438
439/* v1 send/rdma wqe body (common, has sgl) */
440struct ionic_v1_common_bdy {
441 union {
442 struct {
443 __be32 ah_id;
444 __be32 dest_qpn;
445 __be32 dest_qkey;
446 } send;
447 struct {
448 __be32 remote_va_high;
449 __be32 remote_va_low;
450 __be32 remote_rkey;
451 } rdma;
452 };
453 __be32 length;
454 union ionic_v1_pld pld;
455};
456
457/* v1 atomic wqe body */
458struct ionic_v1_atomic_bdy {
459 __be32 remote_va_high;
460 __be32 remote_va_low;
461 __be32 remote_rkey;
462 __be32 swap_add_high;
463 __be32 swap_add_low;
464 __be32 compare_high;
465 __be32 compare_low;
466 __u8 rsvd[4];
467 struct ionic_sge sge;
468};
469
470/* v1 reg mr wqe body */
471struct ionic_v1_reg_mr_bdy {
472 __be64 va;
473 __be64 length;
474 __be64 offset;
475 __be64 dma_addr;
476 __be32 map_count;
477 __be16 flags;
478 __u8 dir_size_log2;
479 __u8 page_size_log2;
480 __u8 rsvd[8];
481};
482
483/* v1 bind mw wqe body */
484struct ionic_v1_bind_mw_bdy {
485 __be64 va;
486 __be64 length;
487 __be32 lkey;
488 __be16 flags;
489 __u8 rsvd[26];
490};
491
492/* v1 send/recv wqe */
493struct ionic_v1_wqe {
494 struct ionic_v1_base_hdr base;
495 union {
496 struct ionic_v1_recv_bdy recv;
497 struct ionic_v1_common_bdy common;
498 struct ionic_v1_atomic_bdy atomic;
499 struct ionic_v1_reg_mr_bdy reg_mr;
500 struct ionic_v1_bind_mw_bdy bind_mw;
501 };
502};
503
504/* queue pair v1 send opcodes */
505enum ionic_v1_op {
506 IONIC_V1_OP_SEND,
507 IONIC_V1_OP_SEND_INV,
508 IONIC_V1_OP_SEND_IMM,
509 IONIC_V1_OP_RDMA_READ,
510 IONIC_V1_OP_RDMA_WRITE,
511 IONIC_V1_OP_RDMA_WRITE_IMM,
512 IONIC_V1_OP_ATOMIC_CS,
513 IONIC_V1_OP_ATOMIC_FA,
514 IONIC_V1_OP_REG_MR,
515 IONIC_V1_OP_LOCAL_INV,
516 IONIC_V1_OP_BIND_MW,
517
518 /* flags */
519 IONIC_V1_FLAG_FENCE = BIT(0),
520 IONIC_V1_FLAG_SOL = BIT(1),
521 IONIC_V1_FLAG_INL = BIT(2),
522 IONIC_V1_FLAG_SIG = BIT(3),
523
524 /* flags last four bits for sgl spec format */
525 IONIC_V1_FLAG_SPEC32 = (1u << 12),
526 IONIC_V1_FLAG_SPEC16 = (2u << 12),
527 IONIC_V1_SPEC_FIRST_SGE = 2,
528};
529
530/* queue pair v2 send opcodes */
531enum ionic_v2_op {
532 IONIC_V2_OPSL_OUT = 0x20,
533 IONIC_V2_OPSL_IMM = 0x40,
534 IONIC_V2_OPSL_INV = 0x80,
535
536 IONIC_V2_OP_SEND = 0x0 | IONIC_V2_OPSL_OUT,
537 IONIC_V2_OP_SEND_IMM = IONIC_V2_OP_SEND | IONIC_V2_OPSL_IMM,
538 IONIC_V2_OP_SEND_INV = IONIC_V2_OP_SEND | IONIC_V2_OPSL_INV,
539
540 IONIC_V2_OP_RDMA_WRITE = 0x1 | IONIC_V2_OPSL_OUT,
541 IONIC_V2_OP_RDMA_WRITE_IMM = IONIC_V2_OP_RDMA_WRITE | IONIC_V2_OPSL_IMM,
542
543 IONIC_V2_OP_RDMA_READ = 0x2,
544
545 IONIC_V2_OP_ATOMIC_CS = 0x4,
546 IONIC_V2_OP_ATOMIC_FA = 0x5,
547 IONIC_V2_OP_REG_MR = 0x6,
548 IONIC_V2_OP_LOCAL_INV = 0x7,
549 IONIC_V2_OP_BIND_MW = 0x8,
550};
551
552static inline size_t ionic_v1_send_wqe_min_size(int min_sge, int min_data,
553 int spec, bool expdb)
554{
555 size_t sz_wqe, sz_sgl, sz_data;
556
557 if (spec > IONIC_V1_SPEC_FIRST_SGE)
558 min_sge += IONIC_V1_SPEC_FIRST_SGE;
559
560 if (expdb) {
561 min_sge += 1;
562 min_data += IONIC_EXP_DBELL_SZ;
563 }
564
565 sz_wqe = sizeof(struct ionic_v1_wqe);
566 sz_sgl = offsetof(struct ionic_v1_wqe, common.pld.sgl[min_sge]);
567 sz_data = offsetof(struct ionic_v1_wqe, common.pld.data[min_data]);
568
569 if (sz_sgl > sz_wqe)
570 sz_wqe = sz_sgl;
571
572 if (sz_data > sz_wqe)
573 sz_wqe = sz_data;
574
575 return sz_wqe;
576}
577
578static inline int ionic_v1_send_wqe_max_sge(u8 stride_log2, int spec,
579 bool expdb)
580{
581 struct ionic_sge *sge = (void *)(1ull << stride_log2);
582 struct ionic_v1_wqe *wqe = (void *)0;
583 int num_sge = 0;
584
585 if (expdb)
586 sge -= 1;
587
588 if (spec > IONIC_V1_SPEC_FIRST_SGE)
589 num_sge = IONIC_V1_SPEC_FIRST_SGE;
590
591 num_sge = sge - &wqe->common.pld.sgl[num_sge];
592
593 if (spec && num_sge > spec)
594 num_sge = spec;
595
596 return num_sge;
597}
598
599static inline int ionic_v1_send_wqe_max_data(u8 stride_log2, bool expdb)
600{
601 struct ionic_v1_wqe *wqe = (void *)0;
602 __u8 *data = (void *)(1ull << stride_log2);
603
604 if (expdb)
605 data -= IONIC_EXP_DBELL_SZ;
606
607 return data - wqe->common.pld.data;
608}
609
610static inline size_t ionic_v1_recv_wqe_min_size(int min_sge, int spec,
611 bool expdb)
612{
613 size_t sz_wqe, sz_sgl;
614
615 if (spec > IONIC_V1_SPEC_FIRST_SGE)
616 min_sge += IONIC_V1_SPEC_FIRST_SGE;
617
618 if (expdb)
619 min_sge += 1;
620
621 sz_wqe = sizeof(struct ionic_v1_wqe);
622 sz_sgl = offsetof(struct ionic_v1_wqe, recv.pld.sgl[min_sge]);
623
624 if (sz_sgl > sz_wqe)
625 sz_wqe = sz_sgl;
626
627 return sz_wqe;
628}
629
630static inline int ionic_v1_recv_wqe_max_sge(u8 stride_log2, int spec,
631 bool expdb)
632{
633 struct ionic_sge *sge = (void *)(1ull << stride_log2);
634 struct ionic_v1_wqe *wqe = (void *)0;
635 int num_sge = 0;
636
637 if (expdb)
638 sge -= 1;
639
640 if (spec > IONIC_V1_SPEC_FIRST_SGE)
641 num_sge = IONIC_V1_SPEC_FIRST_SGE;
642
643 num_sge = sge - &wqe->recv.pld.sgl[num_sge];
644
645 if (spec && num_sge > spec)
646 num_sge = spec;
647
648 return num_sge;
649}
650
651static inline int ionic_v1_use_spec_sge(int min_sge, int spec)
652{
653 if (!spec || min_sge > spec)
654 return 0;
655
656 if (min_sge <= IONIC_V1_SPEC_FIRST_SGE)
657 return IONIC_V1_SPEC_FIRST_SGE;
658
659 return spec;
660}
661
662struct ionic_admin_stats_hdr {
663 __le64 dma_addr;
664 __le32 length;
665 __le32 id_ver;
666 __u8 type_state;
667} __packed;
668
669#define IONIC_ADMIN_STATS_HDRS_IN_V1_LEN 17
670static_assert(sizeof(struct ionic_admin_stats_hdr) ==
671 IONIC_ADMIN_STATS_HDRS_IN_V1_LEN);
672
673struct ionic_admin_create_ah {
674 __le64 dma_addr;
675 __le32 length;
676 __le32 pd_id;
677 __le32 id_ver;
678 __le16 dbid_flags;
679 __u8 csum_profile;
680 __u8 crypto;
681} __packed;
682
683#define IONIC_ADMIN_CREATE_AH_IN_V1_LEN 24
684static_assert(sizeof(struct ionic_admin_create_ah) ==
685 IONIC_ADMIN_CREATE_AH_IN_V1_LEN);
686
687struct ionic_admin_destroy_ah {
688 __le32 ah_id;
689} __packed;
690
691#define IONIC_ADMIN_DESTROY_AH_IN_V1_LEN 4
692static_assert(sizeof(struct ionic_admin_destroy_ah) ==
693 IONIC_ADMIN_DESTROY_AH_IN_V1_LEN);
694
695struct ionic_admin_query_ah {
696 __le64 dma_addr;
697} __packed;
698
699#define IONIC_ADMIN_QUERY_AH_IN_V1_LEN 8
700static_assert(sizeof(struct ionic_admin_query_ah) ==
701 IONIC_ADMIN_QUERY_AH_IN_V1_LEN);
702
703struct ionic_admin_create_mr {
704 __le64 va;
705 __le64 length;
706 __le32 pd_id;
707 __le32 id_ver;
708 __le32 tbl_index;
709 __le32 map_count;
710 __le64 dma_addr;
711 __le16 dbid_flags;
712 __u8 pt_type;
713 __u8 dir_size_log2;
714 __u8 page_size_log2;
715} __packed;
716
717#define IONIC_ADMIN_CREATE_MR_IN_V1_LEN 45
718static_assert(sizeof(struct ionic_admin_create_mr) ==
719 IONIC_ADMIN_CREATE_MR_IN_V1_LEN);
720
721struct ionic_admin_destroy_mr {
722 __le32 mr_id;
723} __packed;
724
725#define IONIC_ADMIN_DESTROY_MR_IN_V1_LEN 4
726static_assert(sizeof(struct ionic_admin_destroy_mr) ==
727 IONIC_ADMIN_DESTROY_MR_IN_V1_LEN);
728
729struct ionic_admin_create_cq {
730 __le32 eq_id;
731 __u8 depth_log2;
732 __u8 stride_log2;
733 __u8 dir_size_log2_rsvd;
734 __u8 page_size_log2;
735 __le32 cq_flags;
736 __le32 id_ver;
737 __le32 tbl_index;
738 __le32 map_count;
739 __le64 dma_addr;
740 __le16 dbid_flags;
741} __packed;
742
743#define IONIC_ADMIN_CREATE_CQ_IN_V1_LEN 34
744static_assert(sizeof(struct ionic_admin_create_cq) ==
745 IONIC_ADMIN_CREATE_CQ_IN_V1_LEN);
746
747struct ionic_admin_destroy_cq {
748 __le32 cq_id;
749} __packed;
750
751#define IONIC_ADMIN_DESTROY_CQ_IN_V1_LEN 4
752static_assert(sizeof(struct ionic_admin_destroy_cq) ==
753 IONIC_ADMIN_DESTROY_CQ_IN_V1_LEN);
754
755struct ionic_admin_create_qp {
756 __le32 pd_id;
757 __be32 priv_flags;
758 __le32 sq_cq_id;
759 __u8 sq_depth_log2;
760 __u8 sq_stride_log2;
761 __u8 sq_dir_size_log2_rsvd;
762 __u8 sq_page_size_log2;
763 __le32 sq_tbl_index_xrcd_id;
764 __le32 sq_map_count;
765 __le64 sq_dma_addr;
766 __le32 rq_cq_id;
767 __u8 rq_depth_log2;
768 __u8 rq_stride_log2;
769 __u8 rq_dir_size_log2_rsvd;
770 __u8 rq_page_size_log2;
771 __le32 rq_tbl_index_srq_id;
772 __le32 rq_map_count;
773 __le64 rq_dma_addr;
774 __le32 id_ver;
775 __le16 dbid_flags;
776 __u8 type_state;
777 __u8 rsvd;
778} __packed;
779
780#define IONIC_ADMIN_CREATE_QP_IN_V1_LEN 64
781static_assert(sizeof(struct ionic_admin_create_qp) ==
782 IONIC_ADMIN_CREATE_QP_IN_V1_LEN);
783
784struct ionic_admin_destroy_qp {
785 __le32 qp_id;
786} __packed;
787
788#define IONIC_ADMIN_DESTROY_QP_IN_V1_LEN 4
789static_assert(sizeof(struct ionic_admin_destroy_qp) ==
790 IONIC_ADMIN_DESTROY_QP_IN_V1_LEN);
791
792struct ionic_admin_mod_qp {
793 __be32 attr_mask;
794 __u8 dcqcn_profile;
795 __u8 tfp_csum_profile;
796 __be16 access_flags;
797 __le32 rq_psn;
798 __le32 sq_psn;
799 __le32 qkey_dest_qpn;
800 __le32 rate_limit_kbps;
801 __u8 pmtu;
802 __u8 retry;
803 __u8 rnr_timer;
804 __u8 retry_timeout;
805 __u8 rsq_depth;
806 __u8 rrq_depth;
807 __le16 pkey_id;
808 __le32 ah_id_len;
809 __u8 en_pcp;
810 __u8 ip_dscp;
811 __u8 rsvd2;
812 __u8 type_state;
813 union {
814 struct {
815 __le16 rsvd1;
816 };
817 __le32 rrq_index;
818 };
819 __le32 rsq_index;
820 __le64 dma_addr;
821 __le32 id_ver;
822} __packed;
823
824#define IONIC_ADMIN_MODIFY_QP_IN_V1_LEN 60
825static_assert(sizeof(struct ionic_admin_mod_qp) ==
826 IONIC_ADMIN_MODIFY_QP_IN_V1_LEN);
827
828struct ionic_admin_query_qp {
829 __le64 hdr_dma_addr;
830 __le64 sq_dma_addr;
831 __le64 rq_dma_addr;
832 __le32 ah_id;
833 __le32 id_ver;
834 __le16 dbid_flags;
835} __packed;
836
837#define IONIC_ADMIN_QUERY_QP_IN_V1_LEN 34
838static_assert(sizeof(struct ionic_admin_query_qp) ==
839 IONIC_ADMIN_QUERY_QP_IN_V1_LEN);
840
841#define ADMIN_WQE_STRIDE 64
842#define ADMIN_WQE_HDR_LEN 4
843
844/* admin queue v1 wqe */
845struct ionic_v1_admin_wqe {
846 __u8 op;
847 __u8 rsvd;
848 __le16 len;
849
850 union {
851 struct ionic_admin_stats_hdr stats;
852 struct ionic_admin_create_ah create_ah;
853 struct ionic_admin_destroy_ah destroy_ah;
854 struct ionic_admin_query_ah query_ah;
855 struct ionic_admin_create_mr create_mr;
856 struct ionic_admin_destroy_mr destroy_mr;
857 struct ionic_admin_create_cq create_cq;
858 struct ionic_admin_destroy_cq destroy_cq;
859 struct ionic_admin_create_qp create_qp;
860 struct ionic_admin_destroy_qp destroy_qp;
861 struct ionic_admin_mod_qp mod_qp;
862 struct ionic_admin_query_qp query_qp;
863 } cmd;
864};
865
866/* side data for query qp */
867struct ionic_v1_admin_query_qp_sq {
868 __u8 rnr_timer;
869 __u8 retry_timeout;
870 __be16 access_perms_flags;
871 __be16 rsvd;
872 __be16 pkey_id;
873 __be32 qkey_dest_qpn;
874 __be32 rate_limit_kbps;
875 __be32 rq_psn;
876};
877
878struct ionic_v1_admin_query_qp_rq {
879 __u8 state_pmtu;
880 __u8 retry_rnrtry;
881 __u8 rrq_depth;
882 __u8 rsq_depth;
883 __be32 sq_psn;
884 __be16 access_perms_flags;
885 __be16 rsvd;
886};
887
888/* admin queue v1 opcodes */
889enum ionic_v1_admin_op {
890 IONIC_V1_ADMIN_NOOP,
891 IONIC_V1_ADMIN_CREATE_CQ,
892 IONIC_V1_ADMIN_CREATE_QP,
893 IONIC_V1_ADMIN_CREATE_MR,
894 IONIC_V1_ADMIN_STATS_HDRS,
895 IONIC_V1_ADMIN_STATS_VALS,
896 IONIC_V1_ADMIN_DESTROY_MR,
897 IONIC_V1_ADMIN_RSVD_7, /* RESIZE_CQ */
898 IONIC_V1_ADMIN_DESTROY_CQ,
899 IONIC_V1_ADMIN_MODIFY_QP,
900 IONIC_V1_ADMIN_QUERY_QP,
901 IONIC_V1_ADMIN_DESTROY_QP,
902 IONIC_V1_ADMIN_DEBUG,
903 IONIC_V1_ADMIN_CREATE_AH,
904 IONIC_V1_ADMIN_QUERY_AH,
905 IONIC_V1_ADMIN_MODIFY_DCQCN,
906 IONIC_V1_ADMIN_DESTROY_AH,
907 IONIC_V1_ADMIN_QP_STATS_HDRS,
908 IONIC_V1_ADMIN_QP_STATS_VALS,
909 IONIC_V1_ADMIN_OPCODES_MAX,
910};
911
912/* admin queue v1 cqe status */
913enum ionic_v1_admin_status {
914 IONIC_V1_ASTS_OK,
915 IONIC_V1_ASTS_BAD_CMD,
916 IONIC_V1_ASTS_BAD_INDEX,
917 IONIC_V1_ASTS_BAD_STATE,
918 IONIC_V1_ASTS_BAD_TYPE,
919 IONIC_V1_ASTS_BAD_ATTR,
920 IONIC_V1_ASTS_MSG_TOO_BIG,
921};
922
923/* event queue v1 eqe */
924struct ionic_v1_eqe {
925 __be32 evt;
926};
927
928/* bits for cqe queue_type_flags */
929enum ionic_v1_eqe_evt_bits {
930 IONIC_V1_EQE_COLOR = BIT(0),
931 IONIC_V1_EQE_TYPE_SHIFT = 1,
932 IONIC_V1_EQE_TYPE_MASK = 0x7,
933 IONIC_V1_EQE_CODE_SHIFT = 4,
934 IONIC_V1_EQE_CODE_MASK = 0xf,
935 IONIC_V1_EQE_QID_SHIFT = 8,
936
937 /* cq events */
938 IONIC_V1_EQE_TYPE_CQ = 0,
939 /* cq normal events */
940 IONIC_V1_EQE_CQ_NOTIFY = 0,
941 /* cq error events */
942 IONIC_V1_EQE_CQ_ERR = 8,
943
944 /* qp and srq events */
945 IONIC_V1_EQE_TYPE_QP = 1,
946 /* qp normal events */
947 IONIC_V1_EQE_SRQ_LEVEL = 0,
948 IONIC_V1_EQE_SQ_DRAIN = 1,
949 IONIC_V1_EQE_QP_COMM_EST = 2,
950 IONIC_V1_EQE_QP_LAST_WQE = 3,
951 /* qp error events */
952 IONIC_V1_EQE_QP_ERR = 8,
953 IONIC_V1_EQE_QP_ERR_REQUEST = 9,
954 IONIC_V1_EQE_QP_ERR_ACCESS = 10,
955};
956
957enum ionic_tfp_csum_profiles {
958 IONIC_TFP_CSUM_PROF_ETH_IPV4_UDP = 0,
959 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV4_UDP = 1,
960 IONIC_TFP_CSUM_PROF_ETH_IPV6_UDP = 2,
961 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV6_UDP = 3,
962 IONIC_TFP_CSUM_PROF_IPV4_UDP_VXLAN_ETH_QTAG_IPV4_UDP = 4,
963 IONIC_TFP_CSUM_PROF_IPV4_UDP_VXLAN_ETH_QTAG_IPV6_UDP = 5,
964 IONIC_TFP_CSUM_PROF_QTAG_IPV4_UDP_VXLAN_ETH_QTAG_IPV4_UDP = 6,
965 IONIC_TFP_CSUM_PROF_QTAG_IPV4_UDP_VXLAN_ETH_QTAG_IPV6_UDP = 7,
966 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV4_UDP_ESP_IPV4_UDP = 8,
967 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV4_ESP_UDP = 9,
968 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV4_UDP_ESP_UDP = 10,
969 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV6_ESP_UDP = 11,
970 IONIC_TFP_CSUM_PROF_ETH_QTAG_IPV4_UDP_CSUM = 12,
971};
972
973static inline bool ionic_v1_eqe_color(struct ionic_v1_eqe *eqe)
974{
975 return eqe->evt & cpu_to_be32(IONIC_V1_EQE_COLOR);
976}
977
978static inline u32 ionic_v1_eqe_evt(struct ionic_v1_eqe *eqe)
979{
980 return be32_to_cpu(eqe->evt);
981}
982
983static inline u8 ionic_v1_eqe_evt_type(u32 evt)
984{
985 return (evt >> IONIC_V1_EQE_TYPE_SHIFT) & IONIC_V1_EQE_TYPE_MASK;
986}
987
988static inline u8 ionic_v1_eqe_evt_code(u32 evt)
989{
990 return (evt >> IONIC_V1_EQE_CODE_SHIFT) & IONIC_V1_EQE_CODE_MASK;
991}
992
993static inline u32 ionic_v1_eqe_evt_qid(u32 evt)
994{
995 return evt >> IONIC_V1_EQE_QID_SHIFT;
996}
997
998enum ionic_v1_stat_bits {
999 IONIC_V1_STAT_TYPE_SHIFT = 28,
1000 IONIC_V1_STAT_TYPE_NONE = 0,
1001 IONIC_V1_STAT_TYPE_8 = 1,
1002 IONIC_V1_STAT_TYPE_LE16 = 2,
1003 IONIC_V1_STAT_TYPE_LE32 = 3,
1004 IONIC_V1_STAT_TYPE_LE64 = 4,
1005 IONIC_V1_STAT_TYPE_BE16 = 5,
1006 IONIC_V1_STAT_TYPE_BE32 = 6,
1007 IONIC_V1_STAT_TYPE_BE64 = 7,
1008 IONIC_V1_STAT_OFF_MASK = BIT(IONIC_V1_STAT_TYPE_SHIFT) - 1,
1009};
1010
1011struct ionic_v1_stat {
1012 union {
1013 __be32 be_type_off;
1014 u32 type_off;
1015 };
1016 char name[28];
1017};
1018
1019static inline int ionic_v1_stat_type(struct ionic_v1_stat *hdr)
1020{
1021 return hdr->type_off >> IONIC_V1_STAT_TYPE_SHIFT;
1022}
1023
1024static inline unsigned int ionic_v1_stat_off(struct ionic_v1_stat *hdr)
1025{
1026 return hdr->type_off & IONIC_V1_STAT_OFF_MASK;
1027}
1028
1029#endif /* _IONIC_FW_H_ */