Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 1 | /* QLogic qedr NIC Driver |
| 2 | * Copyright (c) 2015-2016 QLogic Corporation |
| 3 | * |
| 4 | * This software is available to you under a choice of one of two |
| 5 | * licenses. You may choose to be licensed under the terms of the GNU |
| 6 | * General Public License (GPL) Version 2, available from the file |
| 7 | * COPYING in the main directory of this source tree, or the |
| 8 | * OpenIB.org BSD license below: |
| 9 | * |
| 10 | * Redistribution and use in source and binary forms, with or |
| 11 | * without modification, are permitted provided that the following |
| 12 | * conditions are met: |
| 13 | * |
| 14 | * - Redistributions of source code must retain the above |
| 15 | * copyright notice, this list of conditions and the following |
| 16 | * disclaimer. |
| 17 | * |
| 18 | * - Redistributions in binary form must reproduce the above |
| 19 | * copyright notice, this list of conditions and the following |
| 20 | * disclaimer in the documentation and /or other materials |
| 21 | * provided with the distribution. |
| 22 | * |
| 23 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
| 24 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| 25 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
| 26 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS |
| 27 | * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN |
| 28 | * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN |
| 29 | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
| 30 | * SOFTWARE. |
| 31 | */ |
| 32 | #ifndef __QED_HSI_RDMA__ |
| 33 | #define __QED_HSI_RDMA__ |
| 34 | |
| 35 | #include <linux/qed/rdma_common.h> |
| 36 | |
| 37 | /* rdma completion notification queue element */ |
| 38 | struct rdma_cnqe { |
| 39 | struct regpair cq_handle; |
| 40 | }; |
| 41 | |
| 42 | struct rdma_cqe_responder { |
| 43 | struct regpair srq_wr_id; |
| 44 | struct regpair qp_handle; |
| 45 | __le32 imm_data_or_inv_r_Key; |
| 46 | __le32 length; |
| 47 | __le32 imm_data_hi; |
| 48 | __le16 rq_cons; |
| 49 | u8 flags; |
Ram Amrani | a7efd77 | 2016-10-10 13:15:33 +0300 | [diff] [blame] | 50 | #define RDMA_CQE_RESPONDER_TOGGLE_BIT_MASK 0x1 |
| 51 | #define RDMA_CQE_RESPONDER_TOGGLE_BIT_SHIFT 0 |
| 52 | #define RDMA_CQE_RESPONDER_TYPE_MASK 0x3 |
| 53 | #define RDMA_CQE_RESPONDER_TYPE_SHIFT 1 |
| 54 | #define RDMA_CQE_RESPONDER_INV_FLG_MASK 0x1 |
| 55 | #define RDMA_CQE_RESPONDER_INV_FLG_SHIFT 3 |
| 56 | #define RDMA_CQE_RESPONDER_IMM_FLG_MASK 0x1 |
| 57 | #define RDMA_CQE_RESPONDER_IMM_FLG_SHIFT 4 |
| 58 | #define RDMA_CQE_RESPONDER_RDMA_FLG_MASK 0x1 |
| 59 | #define RDMA_CQE_RESPONDER_RDMA_FLG_SHIFT 5 |
| 60 | #define RDMA_CQE_RESPONDER_RESERVED2_MASK 0x3 |
| 61 | #define RDMA_CQE_RESPONDER_RESERVED2_SHIFT 6 |
| 62 | u8 status; |
Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 63 | }; |
| 64 | |
| 65 | struct rdma_cqe_requester { |
| 66 | __le16 sq_cons; |
| 67 | __le16 reserved0; |
| 68 | __le32 reserved1; |
| 69 | struct regpair qp_handle; |
| 70 | struct regpair reserved2; |
| 71 | __le32 reserved3; |
| 72 | __le16 reserved4; |
| 73 | u8 flags; |
Ram Amrani | a7efd77 | 2016-10-10 13:15:33 +0300 | [diff] [blame] | 74 | #define RDMA_CQE_REQUESTER_TOGGLE_BIT_MASK 0x1 |
| 75 | #define RDMA_CQE_REQUESTER_TOGGLE_BIT_SHIFT 0 |
| 76 | #define RDMA_CQE_REQUESTER_TYPE_MASK 0x3 |
| 77 | #define RDMA_CQE_REQUESTER_TYPE_SHIFT 1 |
| 78 | #define RDMA_CQE_REQUESTER_RESERVED5_MASK 0x1F |
| 79 | #define RDMA_CQE_REQUESTER_RESERVED5_SHIFT 3 |
Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 80 | u8 status; |
| 81 | }; |
| 82 | |
| 83 | struct rdma_cqe_common { |
| 84 | struct regpair reserved0; |
| 85 | struct regpair qp_handle; |
| 86 | __le16 reserved1[7]; |
| 87 | u8 flags; |
Ram Amrani | a7efd77 | 2016-10-10 13:15:33 +0300 | [diff] [blame] | 88 | #define RDMA_CQE_COMMON_TOGGLE_BIT_MASK 0x1 |
| 89 | #define RDMA_CQE_COMMON_TOGGLE_BIT_SHIFT 0 |
| 90 | #define RDMA_CQE_COMMON_TYPE_MASK 0x3 |
| 91 | #define RDMA_CQE_COMMON_TYPE_SHIFT 1 |
| 92 | #define RDMA_CQE_COMMON_RESERVED2_MASK 0x1F |
| 93 | #define RDMA_CQE_COMMON_RESERVED2_SHIFT 3 |
Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 94 | u8 status; |
| 95 | }; |
| 96 | |
| 97 | /* rdma completion queue element */ |
| 98 | union rdma_cqe { |
| 99 | struct rdma_cqe_responder resp; |
| 100 | struct rdma_cqe_requester req; |
| 101 | struct rdma_cqe_common cmn; |
| 102 | }; |
| 103 | |
Ram Amrani | a7efd77 | 2016-10-10 13:15:33 +0300 | [diff] [blame] | 104 | /* * CQE requester status enumeration */ |
| 105 | enum rdma_cqe_requester_status_enum { |
| 106 | RDMA_CQE_REQ_STS_OK, |
| 107 | RDMA_CQE_REQ_STS_BAD_RESPONSE_ERR, |
| 108 | RDMA_CQE_REQ_STS_LOCAL_LENGTH_ERR, |
| 109 | RDMA_CQE_REQ_STS_LOCAL_QP_OPERATION_ERR, |
| 110 | RDMA_CQE_REQ_STS_LOCAL_PROTECTION_ERR, |
| 111 | RDMA_CQE_REQ_STS_MEMORY_MGT_OPERATION_ERR, |
| 112 | RDMA_CQE_REQ_STS_REMOTE_INVALID_REQUEST_ERR, |
| 113 | RDMA_CQE_REQ_STS_REMOTE_ACCESS_ERR, |
| 114 | RDMA_CQE_REQ_STS_REMOTE_OPERATION_ERR, |
| 115 | RDMA_CQE_REQ_STS_RNR_NAK_RETRY_CNT_ERR, |
| 116 | RDMA_CQE_REQ_STS_TRANSPORT_RETRY_CNT_ERR, |
| 117 | RDMA_CQE_REQ_STS_WORK_REQUEST_FLUSHED_ERR, |
| 118 | MAX_RDMA_CQE_REQUESTER_STATUS_ENUM |
| 119 | }; |
| 120 | |
| 121 | /* CQE responder status enumeration */ |
| 122 | enum rdma_cqe_responder_status_enum { |
| 123 | RDMA_CQE_RESP_STS_OK, |
| 124 | RDMA_CQE_RESP_STS_LOCAL_ACCESS_ERR, |
| 125 | RDMA_CQE_RESP_STS_LOCAL_LENGTH_ERR, |
| 126 | RDMA_CQE_RESP_STS_LOCAL_QP_OPERATION_ERR, |
| 127 | RDMA_CQE_RESP_STS_LOCAL_PROTECTION_ERR, |
| 128 | RDMA_CQE_RESP_STS_MEMORY_MGT_OPERATION_ERR, |
| 129 | RDMA_CQE_RESP_STS_REMOTE_INVALID_REQUEST_ERR, |
| 130 | RDMA_CQE_RESP_STS_WORK_REQUEST_FLUSHED_ERR, |
| 131 | MAX_RDMA_CQE_RESPONDER_STATUS_ENUM |
| 132 | }; |
| 133 | |
| 134 | /* CQE type enumeration */ |
| 135 | enum rdma_cqe_type { |
| 136 | RDMA_CQE_TYPE_REQUESTER, |
| 137 | RDMA_CQE_TYPE_RESPONDER_RQ, |
| 138 | RDMA_CQE_TYPE_RESPONDER_SRQ, |
| 139 | RDMA_CQE_TYPE_INVALID, |
| 140 | MAX_RDMA_CQE_TYPE |
| 141 | }; |
| 142 | |
Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 143 | struct rdma_sq_sge { |
| 144 | __le32 length; |
| 145 | struct regpair addr; |
| 146 | __le32 l_key; |
| 147 | }; |
| 148 | |
| 149 | struct rdma_rq_sge { |
| 150 | struct regpair addr; |
| 151 | __le32 length; |
| 152 | __le32 flags; |
Ram Amrani | afa0e13 | 2016-10-10 13:15:36 +0300 | [diff] [blame] | 153 | #define RDMA_RQ_SGE_L_KEY_MASK 0x3FFFFFF |
| 154 | #define RDMA_RQ_SGE_L_KEY_SHIFT 0 |
| 155 | #define RDMA_RQ_SGE_NUM_SGES_MASK 0x7 |
| 156 | #define RDMA_RQ_SGE_NUM_SGES_SHIFT 26 |
| 157 | #define RDMA_RQ_SGE_RESERVED0_MASK 0x7 |
| 158 | #define RDMA_RQ_SGE_RESERVED0_SHIFT 29 |
Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 159 | }; |
| 160 | |
| 161 | struct rdma_srq_sge { |
| 162 | struct regpair addr; |
| 163 | __le32 length; |
| 164 | __le32 l_key; |
| 165 | }; |
Ram Amrani | a7efd77 | 2016-10-10 13:15:33 +0300 | [diff] [blame] | 166 | |
Ram Amrani | cecbcdd | 2016-10-10 13:15:34 +0300 | [diff] [blame] | 167 | /* Rdma doorbell data for SQ and RQ */ |
| 168 | struct rdma_pwm_val16_data { |
| 169 | __le16 icid; |
| 170 | __le16 value; |
| 171 | }; |
| 172 | |
| 173 | union rdma_pwm_val16_data_union { |
| 174 | struct rdma_pwm_val16_data as_struct; |
| 175 | __le32 as_dword; |
| 176 | }; |
| 177 | |
Ram Amrani | a7efd77 | 2016-10-10 13:15:33 +0300 | [diff] [blame] | 178 | /* Rdma doorbell data for CQ */ |
| 179 | struct rdma_pwm_val32_data { |
| 180 | __le16 icid; |
| 181 | u8 agg_flags; |
| 182 | u8 params; |
| 183 | #define RDMA_PWM_VAL32_DATA_AGG_CMD_MASK 0x3 |
| 184 | #define RDMA_PWM_VAL32_DATA_AGG_CMD_SHIFT 0 |
| 185 | #define RDMA_PWM_VAL32_DATA_BYPASS_EN_MASK 0x1 |
| 186 | #define RDMA_PWM_VAL32_DATA_BYPASS_EN_SHIFT 2 |
| 187 | #define RDMA_PWM_VAL32_DATA_RESERVED_MASK 0x1F |
| 188 | #define RDMA_PWM_VAL32_DATA_RESERVED_SHIFT 3 |
| 189 | __le32 value; |
| 190 | }; |
| 191 | |
Ram Amrani | afa0e13 | 2016-10-10 13:15:36 +0300 | [diff] [blame] | 192 | /* DIF Block size options */ |
| 193 | enum rdma_dif_block_size { |
| 194 | RDMA_DIF_BLOCK_512 = 0, |
| 195 | RDMA_DIF_BLOCK_4096 = 1, |
| 196 | MAX_RDMA_DIF_BLOCK_SIZE |
| 197 | }; |
| 198 | |
| 199 | /* DIF CRC initial value */ |
| 200 | enum rdma_dif_crc_seed { |
| 201 | RDMA_DIF_CRC_SEED_0000 = 0, |
| 202 | RDMA_DIF_CRC_SEED_FFFF = 1, |
| 203 | MAX_RDMA_DIF_CRC_SEED |
| 204 | }; |
| 205 | |
| 206 | /* RDMA DIF Error Result Structure */ |
| 207 | struct rdma_dif_error_result { |
| 208 | __le32 error_intervals; |
| 209 | __le32 dif_error_1st_interval; |
| 210 | u8 flags; |
| 211 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_CRC_MASK 0x1 |
| 212 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_CRC_SHIFT 0 |
| 213 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_APP_TAG_MASK 0x1 |
| 214 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_APP_TAG_SHIFT 1 |
| 215 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_REF_TAG_MASK 0x1 |
| 216 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_REF_TAG_SHIFT 2 |
| 217 | #define RDMA_DIF_ERROR_RESULT_RESERVED0_MASK 0xF |
| 218 | #define RDMA_DIF_ERROR_RESULT_RESERVED0_SHIFT 3 |
| 219 | #define RDMA_DIF_ERROR_RESULT_TOGGLE_BIT_MASK 0x1 |
| 220 | #define RDMA_DIF_ERROR_RESULT_TOGGLE_BIT_SHIFT 7 |
| 221 | u8 reserved1[55]; |
| 222 | }; |
| 223 | |
| 224 | /* DIF IO direction */ |
| 225 | enum rdma_dif_io_direction_flg { |
| 226 | RDMA_DIF_DIR_RX = 0, |
| 227 | RDMA_DIF_DIR_TX = 1, |
| 228 | MAX_RDMA_DIF_IO_DIRECTION_FLG |
| 229 | }; |
| 230 | |
| 231 | /* RDMA DIF Runt Result Structure */ |
| 232 | struct rdma_dif_runt_result { |
| 233 | __le16 guard_tag; |
| 234 | __le16 reserved[3]; |
| 235 | }; |
| 236 | |
| 237 | /* Memory window type enumeration */ |
| 238 | enum rdma_mw_type { |
| 239 | RDMA_MW_TYPE_1, |
| 240 | RDMA_MW_TYPE_2A, |
| 241 | MAX_RDMA_MW_TYPE |
| 242 | }; |
| 243 | |
| 244 | struct rdma_sq_atomic_wqe { |
| 245 | __le32 reserved1; |
| 246 | __le32 length; |
| 247 | __le32 xrc_srq; |
| 248 | u8 req_type; |
| 249 | u8 flags; |
| 250 | #define RDMA_SQ_ATOMIC_WQE_COMP_FLG_MASK 0x1 |
| 251 | #define RDMA_SQ_ATOMIC_WQE_COMP_FLG_SHIFT 0 |
| 252 | #define RDMA_SQ_ATOMIC_WQE_RD_FENCE_FLG_MASK 0x1 |
| 253 | #define RDMA_SQ_ATOMIC_WQE_RD_FENCE_FLG_SHIFT 1 |
| 254 | #define RDMA_SQ_ATOMIC_WQE_INV_FENCE_FLG_MASK 0x1 |
| 255 | #define RDMA_SQ_ATOMIC_WQE_INV_FENCE_FLG_SHIFT 2 |
| 256 | #define RDMA_SQ_ATOMIC_WQE_SE_FLG_MASK 0x1 |
| 257 | #define RDMA_SQ_ATOMIC_WQE_SE_FLG_SHIFT 3 |
| 258 | #define RDMA_SQ_ATOMIC_WQE_INLINE_FLG_MASK 0x1 |
| 259 | #define RDMA_SQ_ATOMIC_WQE_INLINE_FLG_SHIFT 4 |
| 260 | #define RDMA_SQ_ATOMIC_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
| 261 | #define RDMA_SQ_ATOMIC_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
| 262 | #define RDMA_SQ_ATOMIC_WQE_RESERVED0_MASK 0x3 |
| 263 | #define RDMA_SQ_ATOMIC_WQE_RESERVED0_SHIFT 6 |
| 264 | u8 wqe_size; |
| 265 | u8 prev_wqe_size; |
| 266 | struct regpair remote_va; |
| 267 | __le32 r_key; |
| 268 | __le32 reserved2; |
| 269 | struct regpair cmp_data; |
| 270 | struct regpair swap_data; |
| 271 | }; |
| 272 | |
| 273 | /* First element (16 bytes) of atomic wqe */ |
| 274 | struct rdma_sq_atomic_wqe_1st { |
| 275 | __le32 reserved1; |
| 276 | __le32 length; |
| 277 | __le32 xrc_srq; |
| 278 | u8 req_type; |
| 279 | u8 flags; |
| 280 | #define RDMA_SQ_ATOMIC_WQE_1ST_COMP_FLG_MASK 0x1 |
| 281 | #define RDMA_SQ_ATOMIC_WQE_1ST_COMP_FLG_SHIFT 0 |
| 282 | #define RDMA_SQ_ATOMIC_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
| 283 | #define RDMA_SQ_ATOMIC_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
| 284 | #define RDMA_SQ_ATOMIC_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
| 285 | #define RDMA_SQ_ATOMIC_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
| 286 | #define RDMA_SQ_ATOMIC_WQE_1ST_SE_FLG_MASK 0x1 |
| 287 | #define RDMA_SQ_ATOMIC_WQE_1ST_SE_FLG_SHIFT 3 |
| 288 | #define RDMA_SQ_ATOMIC_WQE_1ST_INLINE_FLG_MASK 0x1 |
| 289 | #define RDMA_SQ_ATOMIC_WQE_1ST_INLINE_FLG_SHIFT 4 |
| 290 | #define RDMA_SQ_ATOMIC_WQE_1ST_RESERVED0_MASK 0x7 |
| 291 | #define RDMA_SQ_ATOMIC_WQE_1ST_RESERVED0_SHIFT 5 |
| 292 | u8 wqe_size; |
| 293 | u8 prev_wqe_size; |
| 294 | }; |
| 295 | |
| 296 | /* Second element (16 bytes) of atomic wqe */ |
| 297 | struct rdma_sq_atomic_wqe_2nd { |
| 298 | struct regpair remote_va; |
| 299 | __le32 r_key; |
| 300 | __le32 reserved2; |
| 301 | }; |
| 302 | |
| 303 | /* Third element (16 bytes) of atomic wqe */ |
| 304 | struct rdma_sq_atomic_wqe_3rd { |
| 305 | struct regpair cmp_data; |
| 306 | struct regpair swap_data; |
| 307 | }; |
| 308 | |
| 309 | struct rdma_sq_bind_wqe { |
| 310 | struct regpair addr; |
| 311 | __le32 l_key; |
| 312 | u8 req_type; |
| 313 | u8 flags; |
| 314 | #define RDMA_SQ_BIND_WQE_COMP_FLG_MASK 0x1 |
| 315 | #define RDMA_SQ_BIND_WQE_COMP_FLG_SHIFT 0 |
| 316 | #define RDMA_SQ_BIND_WQE_RD_FENCE_FLG_MASK 0x1 |
| 317 | #define RDMA_SQ_BIND_WQE_RD_FENCE_FLG_SHIFT 1 |
| 318 | #define RDMA_SQ_BIND_WQE_INV_FENCE_FLG_MASK 0x1 |
| 319 | #define RDMA_SQ_BIND_WQE_INV_FENCE_FLG_SHIFT 2 |
| 320 | #define RDMA_SQ_BIND_WQE_SE_FLG_MASK 0x1 |
| 321 | #define RDMA_SQ_BIND_WQE_SE_FLG_SHIFT 3 |
| 322 | #define RDMA_SQ_BIND_WQE_INLINE_FLG_MASK 0x1 |
| 323 | #define RDMA_SQ_BIND_WQE_INLINE_FLG_SHIFT 4 |
| 324 | #define RDMA_SQ_BIND_WQE_RESERVED0_MASK 0x7 |
| 325 | #define RDMA_SQ_BIND_WQE_RESERVED0_SHIFT 5 |
| 326 | u8 wqe_size; |
| 327 | u8 prev_wqe_size; |
| 328 | u8 bind_ctrl; |
| 329 | #define RDMA_SQ_BIND_WQE_ZERO_BASED_MASK 0x1 |
| 330 | #define RDMA_SQ_BIND_WQE_ZERO_BASED_SHIFT 0 |
| 331 | #define RDMA_SQ_BIND_WQE_MW_TYPE_MASK 0x1 |
| 332 | #define RDMA_SQ_BIND_WQE_MW_TYPE_SHIFT 1 |
| 333 | #define RDMA_SQ_BIND_WQE_RESERVED1_MASK 0x3F |
| 334 | #define RDMA_SQ_BIND_WQE_RESERVED1_SHIFT 2 |
| 335 | u8 access_ctrl; |
| 336 | #define RDMA_SQ_BIND_WQE_REMOTE_READ_MASK 0x1 |
| 337 | #define RDMA_SQ_BIND_WQE_REMOTE_READ_SHIFT 0 |
| 338 | #define RDMA_SQ_BIND_WQE_REMOTE_WRITE_MASK 0x1 |
| 339 | #define RDMA_SQ_BIND_WQE_REMOTE_WRITE_SHIFT 1 |
| 340 | #define RDMA_SQ_BIND_WQE_ENABLE_ATOMIC_MASK 0x1 |
| 341 | #define RDMA_SQ_BIND_WQE_ENABLE_ATOMIC_SHIFT 2 |
| 342 | #define RDMA_SQ_BIND_WQE_LOCAL_READ_MASK 0x1 |
| 343 | #define RDMA_SQ_BIND_WQE_LOCAL_READ_SHIFT 3 |
| 344 | #define RDMA_SQ_BIND_WQE_LOCAL_WRITE_MASK 0x1 |
| 345 | #define RDMA_SQ_BIND_WQE_LOCAL_WRITE_SHIFT 4 |
| 346 | #define RDMA_SQ_BIND_WQE_RESERVED2_MASK 0x7 |
| 347 | #define RDMA_SQ_BIND_WQE_RESERVED2_SHIFT 5 |
| 348 | u8 reserved3; |
| 349 | u8 length_hi; |
| 350 | __le32 length_lo; |
| 351 | __le32 parent_l_key; |
| 352 | __le32 reserved4; |
| 353 | }; |
| 354 | |
| 355 | /* First element (16 bytes) of bind wqe */ |
| 356 | struct rdma_sq_bind_wqe_1st { |
| 357 | struct regpair addr; |
| 358 | __le32 l_key; |
| 359 | u8 req_type; |
| 360 | u8 flags; |
| 361 | #define RDMA_SQ_BIND_WQE_1ST_COMP_FLG_MASK 0x1 |
| 362 | #define RDMA_SQ_BIND_WQE_1ST_COMP_FLG_SHIFT 0 |
| 363 | #define RDMA_SQ_BIND_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
| 364 | #define RDMA_SQ_BIND_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
| 365 | #define RDMA_SQ_BIND_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
| 366 | #define RDMA_SQ_BIND_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
| 367 | #define RDMA_SQ_BIND_WQE_1ST_SE_FLG_MASK 0x1 |
| 368 | #define RDMA_SQ_BIND_WQE_1ST_SE_FLG_SHIFT 3 |
| 369 | #define RDMA_SQ_BIND_WQE_1ST_INLINE_FLG_MASK 0x1 |
| 370 | #define RDMA_SQ_BIND_WQE_1ST_INLINE_FLG_SHIFT 4 |
| 371 | #define RDMA_SQ_BIND_WQE_1ST_RESERVED0_MASK 0x7 |
| 372 | #define RDMA_SQ_BIND_WQE_1ST_RESERVED0_SHIFT 5 |
| 373 | u8 wqe_size; |
| 374 | u8 prev_wqe_size; |
| 375 | }; |
| 376 | |
| 377 | /* Second element (16 bytes) of bind wqe */ |
| 378 | struct rdma_sq_bind_wqe_2nd { |
| 379 | u8 bind_ctrl; |
| 380 | #define RDMA_SQ_BIND_WQE_2ND_ZERO_BASED_MASK 0x1 |
| 381 | #define RDMA_SQ_BIND_WQE_2ND_ZERO_BASED_SHIFT 0 |
| 382 | #define RDMA_SQ_BIND_WQE_2ND_MW_TYPE_MASK 0x1 |
| 383 | #define RDMA_SQ_BIND_WQE_2ND_MW_TYPE_SHIFT 1 |
| 384 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED1_MASK 0x3F |
| 385 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED1_SHIFT 2 |
| 386 | u8 access_ctrl; |
| 387 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_READ_MASK 0x1 |
| 388 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_READ_SHIFT 0 |
| 389 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_WRITE_MASK 0x1 |
| 390 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_WRITE_SHIFT 1 |
| 391 | #define RDMA_SQ_BIND_WQE_2ND_ENABLE_ATOMIC_MASK 0x1 |
| 392 | #define RDMA_SQ_BIND_WQE_2ND_ENABLE_ATOMIC_SHIFT 2 |
| 393 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_READ_MASK 0x1 |
| 394 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_READ_SHIFT 3 |
| 395 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_WRITE_MASK 0x1 |
| 396 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_WRITE_SHIFT 4 |
| 397 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED2_MASK 0x7 |
| 398 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED2_SHIFT 5 |
| 399 | u8 reserved3; |
| 400 | u8 length_hi; |
| 401 | __le32 length_lo; |
| 402 | __le32 parent_l_key; |
| 403 | __le32 reserved4; |
| 404 | }; |
| 405 | |
| 406 | /* Structure with only the SQ WQE common |
| 407 | * fields. Size is of one SQ element (16B) |
| 408 | */ |
| 409 | struct rdma_sq_common_wqe { |
| 410 | __le32 reserved1[3]; |
| 411 | u8 req_type; |
| 412 | u8 flags; |
| 413 | #define RDMA_SQ_COMMON_WQE_COMP_FLG_MASK 0x1 |
| 414 | #define RDMA_SQ_COMMON_WQE_COMP_FLG_SHIFT 0 |
| 415 | #define RDMA_SQ_COMMON_WQE_RD_FENCE_FLG_MASK 0x1 |
| 416 | #define RDMA_SQ_COMMON_WQE_RD_FENCE_FLG_SHIFT 1 |
| 417 | #define RDMA_SQ_COMMON_WQE_INV_FENCE_FLG_MASK 0x1 |
| 418 | #define RDMA_SQ_COMMON_WQE_INV_FENCE_FLG_SHIFT 2 |
| 419 | #define RDMA_SQ_COMMON_WQE_SE_FLG_MASK 0x1 |
| 420 | #define RDMA_SQ_COMMON_WQE_SE_FLG_SHIFT 3 |
| 421 | #define RDMA_SQ_COMMON_WQE_INLINE_FLG_MASK 0x1 |
| 422 | #define RDMA_SQ_COMMON_WQE_INLINE_FLG_SHIFT 4 |
| 423 | #define RDMA_SQ_COMMON_WQE_RESERVED0_MASK 0x7 |
| 424 | #define RDMA_SQ_COMMON_WQE_RESERVED0_SHIFT 5 |
| 425 | u8 wqe_size; |
| 426 | u8 prev_wqe_size; |
| 427 | }; |
| 428 | |
| 429 | struct rdma_sq_fmr_wqe { |
| 430 | struct regpair addr; |
| 431 | __le32 l_key; |
| 432 | u8 req_type; |
| 433 | u8 flags; |
| 434 | #define RDMA_SQ_FMR_WQE_COMP_FLG_MASK 0x1 |
| 435 | #define RDMA_SQ_FMR_WQE_COMP_FLG_SHIFT 0 |
| 436 | #define RDMA_SQ_FMR_WQE_RD_FENCE_FLG_MASK 0x1 |
| 437 | #define RDMA_SQ_FMR_WQE_RD_FENCE_FLG_SHIFT 1 |
| 438 | #define RDMA_SQ_FMR_WQE_INV_FENCE_FLG_MASK 0x1 |
| 439 | #define RDMA_SQ_FMR_WQE_INV_FENCE_FLG_SHIFT 2 |
| 440 | #define RDMA_SQ_FMR_WQE_SE_FLG_MASK 0x1 |
| 441 | #define RDMA_SQ_FMR_WQE_SE_FLG_SHIFT 3 |
| 442 | #define RDMA_SQ_FMR_WQE_INLINE_FLG_MASK 0x1 |
| 443 | #define RDMA_SQ_FMR_WQE_INLINE_FLG_SHIFT 4 |
| 444 | #define RDMA_SQ_FMR_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
| 445 | #define RDMA_SQ_FMR_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
| 446 | #define RDMA_SQ_FMR_WQE_RESERVED0_MASK 0x3 |
| 447 | #define RDMA_SQ_FMR_WQE_RESERVED0_SHIFT 6 |
| 448 | u8 wqe_size; |
| 449 | u8 prev_wqe_size; |
| 450 | u8 fmr_ctrl; |
| 451 | #define RDMA_SQ_FMR_WQE_PAGE_SIZE_LOG_MASK 0x1F |
| 452 | #define RDMA_SQ_FMR_WQE_PAGE_SIZE_LOG_SHIFT 0 |
| 453 | #define RDMA_SQ_FMR_WQE_ZERO_BASED_MASK 0x1 |
| 454 | #define RDMA_SQ_FMR_WQE_ZERO_BASED_SHIFT 5 |
| 455 | #define RDMA_SQ_FMR_WQE_BIND_EN_MASK 0x1 |
| 456 | #define RDMA_SQ_FMR_WQE_BIND_EN_SHIFT 6 |
| 457 | #define RDMA_SQ_FMR_WQE_RESERVED1_MASK 0x1 |
| 458 | #define RDMA_SQ_FMR_WQE_RESERVED1_SHIFT 7 |
| 459 | u8 access_ctrl; |
| 460 | #define RDMA_SQ_FMR_WQE_REMOTE_READ_MASK 0x1 |
| 461 | #define RDMA_SQ_FMR_WQE_REMOTE_READ_SHIFT 0 |
| 462 | #define RDMA_SQ_FMR_WQE_REMOTE_WRITE_MASK 0x1 |
| 463 | #define RDMA_SQ_FMR_WQE_REMOTE_WRITE_SHIFT 1 |
| 464 | #define RDMA_SQ_FMR_WQE_ENABLE_ATOMIC_MASK 0x1 |
| 465 | #define RDMA_SQ_FMR_WQE_ENABLE_ATOMIC_SHIFT 2 |
| 466 | #define RDMA_SQ_FMR_WQE_LOCAL_READ_MASK 0x1 |
| 467 | #define RDMA_SQ_FMR_WQE_LOCAL_READ_SHIFT 3 |
| 468 | #define RDMA_SQ_FMR_WQE_LOCAL_WRITE_MASK 0x1 |
| 469 | #define RDMA_SQ_FMR_WQE_LOCAL_WRITE_SHIFT 4 |
| 470 | #define RDMA_SQ_FMR_WQE_RESERVED2_MASK 0x7 |
| 471 | #define RDMA_SQ_FMR_WQE_RESERVED2_SHIFT 5 |
| 472 | u8 reserved3; |
| 473 | u8 length_hi; |
| 474 | __le32 length_lo; |
| 475 | struct regpair pbl_addr; |
| 476 | __le32 dif_base_ref_tag; |
| 477 | __le16 dif_app_tag; |
| 478 | __le16 dif_app_tag_mask; |
| 479 | __le16 dif_runt_crc_value; |
| 480 | __le16 dif_flags; |
| 481 | #define RDMA_SQ_FMR_WQE_DIF_IO_DIRECTION_FLG_MASK 0x1 |
| 482 | #define RDMA_SQ_FMR_WQE_DIF_IO_DIRECTION_FLG_SHIFT 0 |
| 483 | #define RDMA_SQ_FMR_WQE_DIF_BLOCK_SIZE_MASK 0x1 |
| 484 | #define RDMA_SQ_FMR_WQE_DIF_BLOCK_SIZE_SHIFT 1 |
| 485 | #define RDMA_SQ_FMR_WQE_DIF_RUNT_VALID_FLG_MASK 0x1 |
| 486 | #define RDMA_SQ_FMR_WQE_DIF_RUNT_VALID_FLG_SHIFT 2 |
| 487 | #define RDMA_SQ_FMR_WQE_DIF_VALIDATE_CRC_GUARD_MASK 0x1 |
| 488 | #define RDMA_SQ_FMR_WQE_DIF_VALIDATE_CRC_GUARD_SHIFT 3 |
| 489 | #define RDMA_SQ_FMR_WQE_DIF_VALIDATE_REF_TAG_MASK 0x1 |
| 490 | #define RDMA_SQ_FMR_WQE_DIF_VALIDATE_REF_TAG_SHIFT 4 |
| 491 | #define RDMA_SQ_FMR_WQE_DIF_VALIDATE_APP_TAG_MASK 0x1 |
| 492 | #define RDMA_SQ_FMR_WQE_DIF_VALIDATE_APP_TAG_SHIFT 5 |
| 493 | #define RDMA_SQ_FMR_WQE_DIF_CRC_SEED_MASK 0x1 |
| 494 | #define RDMA_SQ_FMR_WQE_DIF_CRC_SEED_SHIFT 6 |
| 495 | #define RDMA_SQ_FMR_WQE_RESERVED4_MASK 0x1FF |
| 496 | #define RDMA_SQ_FMR_WQE_RESERVED4_SHIFT 7 |
| 497 | __le32 Reserved5; |
| 498 | }; |
| 499 | |
| 500 | /* First element (16 bytes) of fmr wqe */ |
| 501 | struct rdma_sq_fmr_wqe_1st { |
| 502 | struct regpair addr; |
| 503 | __le32 l_key; |
| 504 | u8 req_type; |
| 505 | u8 flags; |
| 506 | #define RDMA_SQ_FMR_WQE_1ST_COMP_FLG_MASK 0x1 |
| 507 | #define RDMA_SQ_FMR_WQE_1ST_COMP_FLG_SHIFT 0 |
| 508 | #define RDMA_SQ_FMR_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
| 509 | #define RDMA_SQ_FMR_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
| 510 | #define RDMA_SQ_FMR_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
| 511 | #define RDMA_SQ_FMR_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
| 512 | #define RDMA_SQ_FMR_WQE_1ST_SE_FLG_MASK 0x1 |
| 513 | #define RDMA_SQ_FMR_WQE_1ST_SE_FLG_SHIFT 3 |
| 514 | #define RDMA_SQ_FMR_WQE_1ST_INLINE_FLG_MASK 0x1 |
| 515 | #define RDMA_SQ_FMR_WQE_1ST_INLINE_FLG_SHIFT 4 |
| 516 | #define RDMA_SQ_FMR_WQE_1ST_DIF_ON_HOST_FLG_MASK 0x1 |
| 517 | #define RDMA_SQ_FMR_WQE_1ST_DIF_ON_HOST_FLG_SHIFT 5 |
| 518 | #define RDMA_SQ_FMR_WQE_1ST_RESERVED0_MASK 0x3 |
| 519 | #define RDMA_SQ_FMR_WQE_1ST_RESERVED0_SHIFT 6 |
| 520 | u8 wqe_size; |
| 521 | u8 prev_wqe_size; |
| 522 | }; |
| 523 | |
| 524 | /* Second element (16 bytes) of fmr wqe */ |
| 525 | struct rdma_sq_fmr_wqe_2nd { |
| 526 | u8 fmr_ctrl; |
| 527 | #define RDMA_SQ_FMR_WQE_2ND_PAGE_SIZE_LOG_MASK 0x1F |
| 528 | #define RDMA_SQ_FMR_WQE_2ND_PAGE_SIZE_LOG_SHIFT 0 |
| 529 | #define RDMA_SQ_FMR_WQE_2ND_ZERO_BASED_MASK 0x1 |
| 530 | #define RDMA_SQ_FMR_WQE_2ND_ZERO_BASED_SHIFT 5 |
| 531 | #define RDMA_SQ_FMR_WQE_2ND_BIND_EN_MASK 0x1 |
| 532 | #define RDMA_SQ_FMR_WQE_2ND_BIND_EN_SHIFT 6 |
| 533 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED1_MASK 0x1 |
| 534 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED1_SHIFT 7 |
| 535 | u8 access_ctrl; |
| 536 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_READ_MASK 0x1 |
| 537 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_READ_SHIFT 0 |
| 538 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_WRITE_MASK 0x1 |
| 539 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_WRITE_SHIFT 1 |
| 540 | #define RDMA_SQ_FMR_WQE_2ND_ENABLE_ATOMIC_MASK 0x1 |
| 541 | #define RDMA_SQ_FMR_WQE_2ND_ENABLE_ATOMIC_SHIFT 2 |
| 542 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_READ_MASK 0x1 |
| 543 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_READ_SHIFT 3 |
| 544 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_WRITE_MASK 0x1 |
| 545 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_WRITE_SHIFT 4 |
| 546 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED2_MASK 0x7 |
| 547 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED2_SHIFT 5 |
| 548 | u8 reserved3; |
| 549 | u8 length_hi; |
| 550 | __le32 length_lo; |
| 551 | struct regpair pbl_addr; |
| 552 | }; |
| 553 | |
| 554 | /* Third element (16 bytes) of fmr wqe */ |
| 555 | struct rdma_sq_fmr_wqe_3rd { |
| 556 | __le32 dif_base_ref_tag; |
| 557 | __le16 dif_app_tag; |
| 558 | __le16 dif_app_tag_mask; |
| 559 | __le16 dif_runt_crc_value; |
| 560 | __le16 dif_flags; |
| 561 | #define RDMA_SQ_FMR_WQE_3RD_DIF_IO_DIRECTION_FLG_MASK 0x1 |
| 562 | #define RDMA_SQ_FMR_WQE_3RD_DIF_IO_DIRECTION_FLG_SHIFT 0 |
| 563 | #define RDMA_SQ_FMR_WQE_3RD_DIF_BLOCK_SIZE_MASK 0x1 |
| 564 | #define RDMA_SQ_FMR_WQE_3RD_DIF_BLOCK_SIZE_SHIFT 1 |
| 565 | #define RDMA_SQ_FMR_WQE_3RD_DIF_RUNT_VALID_FLG_MASK 0x1 |
| 566 | #define RDMA_SQ_FMR_WQE_3RD_DIF_RUNT_VALID_FLG_SHIFT 2 |
| 567 | #define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_CRC_GUARD_MASK 0x1 |
| 568 | #define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_CRC_GUARD_SHIFT 3 |
| 569 | #define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_REF_TAG_MASK 0x1 |
| 570 | #define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_REF_TAG_SHIFT 4 |
| 571 | #define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_APP_TAG_MASK 0x1 |
| 572 | #define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_APP_TAG_SHIFT 5 |
| 573 | #define RDMA_SQ_FMR_WQE_3RD_DIF_CRC_SEED_MASK 0x1 |
| 574 | #define RDMA_SQ_FMR_WQE_3RD_DIF_CRC_SEED_SHIFT 6 |
| 575 | #define RDMA_SQ_FMR_WQE_3RD_RESERVED4_MASK 0x1FF |
| 576 | #define RDMA_SQ_FMR_WQE_3RD_RESERVED4_SHIFT 7 |
| 577 | __le32 Reserved5; |
| 578 | }; |
| 579 | |
| 580 | struct rdma_sq_local_inv_wqe { |
| 581 | struct regpair reserved; |
| 582 | __le32 inv_l_key; |
| 583 | u8 req_type; |
| 584 | u8 flags; |
| 585 | #define RDMA_SQ_LOCAL_INV_WQE_COMP_FLG_MASK 0x1 |
| 586 | #define RDMA_SQ_LOCAL_INV_WQE_COMP_FLG_SHIFT 0 |
| 587 | #define RDMA_SQ_LOCAL_INV_WQE_RD_FENCE_FLG_MASK 0x1 |
| 588 | #define RDMA_SQ_LOCAL_INV_WQE_RD_FENCE_FLG_SHIFT 1 |
| 589 | #define RDMA_SQ_LOCAL_INV_WQE_INV_FENCE_FLG_MASK 0x1 |
| 590 | #define RDMA_SQ_LOCAL_INV_WQE_INV_FENCE_FLG_SHIFT 2 |
| 591 | #define RDMA_SQ_LOCAL_INV_WQE_SE_FLG_MASK 0x1 |
| 592 | #define RDMA_SQ_LOCAL_INV_WQE_SE_FLG_SHIFT 3 |
| 593 | #define RDMA_SQ_LOCAL_INV_WQE_INLINE_FLG_MASK 0x1 |
| 594 | #define RDMA_SQ_LOCAL_INV_WQE_INLINE_FLG_SHIFT 4 |
| 595 | #define RDMA_SQ_LOCAL_INV_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
| 596 | #define RDMA_SQ_LOCAL_INV_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
| 597 | #define RDMA_SQ_LOCAL_INV_WQE_RESERVED0_MASK 0x3 |
| 598 | #define RDMA_SQ_LOCAL_INV_WQE_RESERVED0_SHIFT 6 |
| 599 | u8 wqe_size; |
| 600 | u8 prev_wqe_size; |
| 601 | }; |
| 602 | |
| 603 | struct rdma_sq_rdma_wqe { |
| 604 | __le32 imm_data; |
| 605 | __le32 length; |
| 606 | __le32 xrc_srq; |
| 607 | u8 req_type; |
| 608 | u8 flags; |
| 609 | #define RDMA_SQ_RDMA_WQE_COMP_FLG_MASK 0x1 |
| 610 | #define RDMA_SQ_RDMA_WQE_COMP_FLG_SHIFT 0 |
| 611 | #define RDMA_SQ_RDMA_WQE_RD_FENCE_FLG_MASK 0x1 |
| 612 | #define RDMA_SQ_RDMA_WQE_RD_FENCE_FLG_SHIFT 1 |
| 613 | #define RDMA_SQ_RDMA_WQE_INV_FENCE_FLG_MASK 0x1 |
| 614 | #define RDMA_SQ_RDMA_WQE_INV_FENCE_FLG_SHIFT 2 |
| 615 | #define RDMA_SQ_RDMA_WQE_SE_FLG_MASK 0x1 |
| 616 | #define RDMA_SQ_RDMA_WQE_SE_FLG_SHIFT 3 |
| 617 | #define RDMA_SQ_RDMA_WQE_INLINE_FLG_MASK 0x1 |
| 618 | #define RDMA_SQ_RDMA_WQE_INLINE_FLG_SHIFT 4 |
| 619 | #define RDMA_SQ_RDMA_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
| 620 | #define RDMA_SQ_RDMA_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
| 621 | #define RDMA_SQ_RDMA_WQE_RESERVED0_MASK 0x3 |
| 622 | #define RDMA_SQ_RDMA_WQE_RESERVED0_SHIFT 6 |
| 623 | u8 wqe_size; |
| 624 | u8 prev_wqe_size; |
| 625 | struct regpair remote_va; |
| 626 | __le32 r_key; |
| 627 | u8 dif_flags; |
| 628 | #define RDMA_SQ_RDMA_WQE_DIF_BLOCK_SIZE_MASK 0x1 |
| 629 | #define RDMA_SQ_RDMA_WQE_DIF_BLOCK_SIZE_SHIFT 0 |
| 630 | #define RDMA_SQ_RDMA_WQE_DIF_FIRST_RDMA_IN_IO_FLG_MASK 0x1 |
| 631 | #define RDMA_SQ_RDMA_WQE_DIF_FIRST_RDMA_IN_IO_FLG_SHIFT 1 |
| 632 | #define RDMA_SQ_RDMA_WQE_DIF_LAST_RDMA_IN_IO_FLG_MASK 0x1 |
| 633 | #define RDMA_SQ_RDMA_WQE_DIF_LAST_RDMA_IN_IO_FLG_SHIFT 2 |
| 634 | #define RDMA_SQ_RDMA_WQE_RESERVED1_MASK 0x1F |
| 635 | #define RDMA_SQ_RDMA_WQE_RESERVED1_SHIFT 3 |
| 636 | u8 reserved2[3]; |
| 637 | }; |
| 638 | |
| 639 | /* First element (16 bytes) of rdma wqe */ |
| 640 | struct rdma_sq_rdma_wqe_1st { |
| 641 | __le32 imm_data; |
| 642 | __le32 length; |
| 643 | __le32 xrc_srq; |
| 644 | u8 req_type; |
| 645 | u8 flags; |
| 646 | #define RDMA_SQ_RDMA_WQE_1ST_COMP_FLG_MASK 0x1 |
| 647 | #define RDMA_SQ_RDMA_WQE_1ST_COMP_FLG_SHIFT 0 |
| 648 | #define RDMA_SQ_RDMA_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
| 649 | #define RDMA_SQ_RDMA_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
| 650 | #define RDMA_SQ_RDMA_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
| 651 | #define RDMA_SQ_RDMA_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
| 652 | #define RDMA_SQ_RDMA_WQE_1ST_SE_FLG_MASK 0x1 |
| 653 | #define RDMA_SQ_RDMA_WQE_1ST_SE_FLG_SHIFT 3 |
| 654 | #define RDMA_SQ_RDMA_WQE_1ST_INLINE_FLG_MASK 0x1 |
| 655 | #define RDMA_SQ_RDMA_WQE_1ST_INLINE_FLG_SHIFT 4 |
| 656 | #define RDMA_SQ_RDMA_WQE_1ST_DIF_ON_HOST_FLG_MASK 0x1 |
| 657 | #define RDMA_SQ_RDMA_WQE_1ST_DIF_ON_HOST_FLG_SHIFT 5 |
| 658 | #define RDMA_SQ_RDMA_WQE_1ST_RESERVED0_MASK 0x3 |
| 659 | #define RDMA_SQ_RDMA_WQE_1ST_RESERVED0_SHIFT 6 |
| 660 | u8 wqe_size; |
| 661 | u8 prev_wqe_size; |
| 662 | }; |
| 663 | |
| 664 | /* Second element (16 bytes) of rdma wqe */ |
| 665 | struct rdma_sq_rdma_wqe_2nd { |
| 666 | struct regpair remote_va; |
| 667 | __le32 r_key; |
| 668 | u8 dif_flags; |
| 669 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_BLOCK_SIZE_MASK 0x1 |
| 670 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_BLOCK_SIZE_SHIFT 0 |
| 671 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_FIRST_SEGMENT_FLG_MASK 0x1 |
| 672 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_FIRST_SEGMENT_FLG_SHIFT 1 |
| 673 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_LAST_SEGMENT_FLG_MASK 0x1 |
| 674 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_LAST_SEGMENT_FLG_SHIFT 2 |
| 675 | #define RDMA_SQ_RDMA_WQE_2ND_RESERVED1_MASK 0x1F |
| 676 | #define RDMA_SQ_RDMA_WQE_2ND_RESERVED1_SHIFT 3 |
| 677 | u8 reserved2[3]; |
| 678 | }; |
| 679 | |
| 680 | /* SQ WQE req type enumeration */ |
| 681 | enum rdma_sq_req_type { |
| 682 | RDMA_SQ_REQ_TYPE_SEND, |
| 683 | RDMA_SQ_REQ_TYPE_SEND_WITH_IMM, |
| 684 | RDMA_SQ_REQ_TYPE_SEND_WITH_INVALIDATE, |
| 685 | RDMA_SQ_REQ_TYPE_RDMA_WR, |
| 686 | RDMA_SQ_REQ_TYPE_RDMA_WR_WITH_IMM, |
| 687 | RDMA_SQ_REQ_TYPE_RDMA_RD, |
| 688 | RDMA_SQ_REQ_TYPE_ATOMIC_CMP_AND_SWAP, |
| 689 | RDMA_SQ_REQ_TYPE_ATOMIC_ADD, |
| 690 | RDMA_SQ_REQ_TYPE_LOCAL_INVALIDATE, |
| 691 | RDMA_SQ_REQ_TYPE_FAST_MR, |
| 692 | RDMA_SQ_REQ_TYPE_BIND, |
| 693 | RDMA_SQ_REQ_TYPE_INVALID, |
| 694 | MAX_RDMA_SQ_REQ_TYPE |
| 695 | }; |
| 696 | |
| 697 | struct rdma_sq_send_wqe { |
| 698 | __le32 inv_key_or_imm_data; |
| 699 | __le32 length; |
| 700 | __le32 xrc_srq; |
| 701 | u8 req_type; |
| 702 | u8 flags; |
| 703 | #define RDMA_SQ_SEND_WQE_COMP_FLG_MASK 0x1 |
| 704 | #define RDMA_SQ_SEND_WQE_COMP_FLG_SHIFT 0 |
| 705 | #define RDMA_SQ_SEND_WQE_RD_FENCE_FLG_MASK 0x1 |
| 706 | #define RDMA_SQ_SEND_WQE_RD_FENCE_FLG_SHIFT 1 |
| 707 | #define RDMA_SQ_SEND_WQE_INV_FENCE_FLG_MASK 0x1 |
| 708 | #define RDMA_SQ_SEND_WQE_INV_FENCE_FLG_SHIFT 2 |
| 709 | #define RDMA_SQ_SEND_WQE_SE_FLG_MASK 0x1 |
| 710 | #define RDMA_SQ_SEND_WQE_SE_FLG_SHIFT 3 |
| 711 | #define RDMA_SQ_SEND_WQE_INLINE_FLG_MASK 0x1 |
| 712 | #define RDMA_SQ_SEND_WQE_INLINE_FLG_SHIFT 4 |
| 713 | #define RDMA_SQ_SEND_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
| 714 | #define RDMA_SQ_SEND_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
| 715 | #define RDMA_SQ_SEND_WQE_RESERVED0_MASK 0x3 |
| 716 | #define RDMA_SQ_SEND_WQE_RESERVED0_SHIFT 6 |
| 717 | u8 wqe_size; |
| 718 | u8 prev_wqe_size; |
| 719 | __le32 reserved1[4]; |
| 720 | }; |
| 721 | |
| 722 | struct rdma_sq_send_wqe_1st { |
| 723 | __le32 inv_key_or_imm_data; |
| 724 | __le32 length; |
| 725 | __le32 xrc_srq; |
| 726 | u8 req_type; |
| 727 | u8 flags; |
| 728 | #define RDMA_SQ_SEND_WQE_1ST_COMP_FLG_MASK 0x1 |
| 729 | #define RDMA_SQ_SEND_WQE_1ST_COMP_FLG_SHIFT 0 |
| 730 | #define RDMA_SQ_SEND_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
| 731 | #define RDMA_SQ_SEND_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
| 732 | #define RDMA_SQ_SEND_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
| 733 | #define RDMA_SQ_SEND_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
| 734 | #define RDMA_SQ_SEND_WQE_1ST_SE_FLG_MASK 0x1 |
| 735 | #define RDMA_SQ_SEND_WQE_1ST_SE_FLG_SHIFT 3 |
| 736 | #define RDMA_SQ_SEND_WQE_1ST_INLINE_FLG_MASK 0x1 |
| 737 | #define RDMA_SQ_SEND_WQE_1ST_INLINE_FLG_SHIFT 4 |
| 738 | #define RDMA_SQ_SEND_WQE_1ST_RESERVED0_MASK 0x7 |
| 739 | #define RDMA_SQ_SEND_WQE_1ST_RESERVED0_SHIFT 5 |
| 740 | u8 wqe_size; |
| 741 | u8 prev_wqe_size; |
| 742 | }; |
| 743 | |
| 744 | struct rdma_sq_send_wqe_2st { |
| 745 | __le32 reserved1[4]; |
| 746 | }; |
| 747 | |
Ram Amrani | ec72fce | 2016-10-10 13:15:31 +0300 | [diff] [blame] | 748 | #endif /* __QED_HSI_RDMA__ */ |