blob: 60ac1fbe940e4fa759781f170bdcfd3a75480feb [file] [log] [blame]
Yishai Hadasa8b92ca2018-06-17 12:59:57 +03001// SPDX-License-Identifier: GPL-2.0 OR Linux-OpenIB
2/*
3 * Copyright (c) 2018, Mellanox Technologies inc. All rights reserved.
4 */
5
6#include <rdma/ib_user_verbs.h>
7#include <rdma/ib_verbs.h>
8#include <rdma/uverbs_types.h>
9#include <rdma/uverbs_ioctl.h>
10#include <rdma/mlx5_user_ioctl_cmds.h>
11#include <rdma/ib_umem.h>
12#include <linux/mlx5/driver.h>
13#include <linux/mlx5/fs.h>
14#include "mlx5_ib.h"
15
Yishai Hadas8aa8c952018-06-17 13:00:00 +030016#define UVERBS_MODULE_NAME mlx5_ib
17#include <rdma/uverbs_named_ioctl.h>
18
Yishai Hadas7efce362018-06-17 13:00:01 +030019#define MLX5_MAX_DESTROY_INBOX_SIZE_DW MLX5_ST_SZ_DW(delete_fte_in)
20struct devx_obj {
21 struct mlx5_core_dev *mdev;
22 u32 obj_id;
23 u32 dinlen; /* destroy inbox length */
24 u32 dinbox[MLX5_MAX_DESTROY_INBOX_SIZE_DW];
25};
26
Yishai Hadasaeae9452018-06-17 13:00:04 +030027struct devx_umem {
28 struct mlx5_core_dev *mdev;
29 struct ib_umem *umem;
30 u32 page_offset;
31 int page_shift;
32 int ncont;
33 u32 dinlen;
34 u32 dinbox[MLX5_ST_SZ_DW(general_obj_in_cmd_hdr)];
35};
36
37struct devx_umem_reg_cmd {
38 void *in;
39 u32 inlen;
40 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
41};
42
Yishai Hadas8aa8c952018-06-17 13:00:00 +030043static struct mlx5_ib_ucontext *devx_ufile2uctx(struct ib_uverbs_file *file)
44{
45 return to_mucontext(ib_uverbs_get_ucontext(file));
46}
47
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030048int mlx5_ib_devx_create(struct mlx5_ib_dev *dev, struct mlx5_ib_ucontext *context)
49{
50 u32 in[MLX5_ST_SZ_DW(create_uctx_in)] = {0};
51 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)] = {0};
52 u64 general_obj_types;
53 void *uctx;
54 void *hdr;
55 int err;
56
57 uctx = MLX5_ADDR_OF(create_uctx_in, in, uctx);
58 hdr = MLX5_ADDR_OF(create_uctx_in, in, hdr);
59
60 general_obj_types = MLX5_CAP_GEN_64(dev->mdev, general_obj_types);
61 if (!(general_obj_types & MLX5_GENERAL_OBJ_TYPES_CAP_UCTX) ||
62 !(general_obj_types & MLX5_GENERAL_OBJ_TYPES_CAP_UMEM))
63 return -EINVAL;
64
65 if (!capable(CAP_NET_RAW))
66 return -EPERM;
67
68 MLX5_SET(general_obj_in_cmd_hdr, hdr, opcode, MLX5_CMD_OP_CREATE_GENERAL_OBJECT);
69 MLX5_SET(general_obj_in_cmd_hdr, hdr, obj_type, MLX5_OBJ_TYPE_UCTX);
70
71 err = mlx5_cmd_exec(dev->mdev, in, sizeof(in), out, sizeof(out));
72 if (err)
73 return err;
74
75 context->devx_uid = MLX5_GET(general_obj_out_cmd_hdr, out, obj_id);
76 return 0;
77}
78
79void mlx5_ib_devx_destroy(struct mlx5_ib_dev *dev,
80 struct mlx5_ib_ucontext *context)
81{
82 u32 in[MLX5_ST_SZ_DW(general_obj_in_cmd_hdr)] = {0};
83 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)] = {0};
84
85 MLX5_SET(general_obj_in_cmd_hdr, in, opcode, MLX5_CMD_OP_DESTROY_GENERAL_OBJECT);
86 MLX5_SET(general_obj_in_cmd_hdr, in, obj_type, MLX5_OBJ_TYPE_UCTX);
87 MLX5_SET(general_obj_in_cmd_hdr, in, obj_id, context->devx_uid);
88
89 mlx5_cmd_exec(dev->mdev, in, sizeof(in), out, sizeof(out));
90}
Yishai Hadas8aa8c952018-06-17 13:00:00 +030091
Yishai Hadase662e142018-06-17 13:00:02 +030092static int devx_is_valid_obj_id(struct devx_obj *obj, const void *in)
93{
94 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
95 u32 obj_id;
96
97 switch (opcode) {
98 case MLX5_CMD_OP_MODIFY_GENERAL_OBJECT:
99 case MLX5_CMD_OP_QUERY_GENERAL_OBJECT:
100 obj_id = MLX5_GET(general_obj_in_cmd_hdr, in, obj_id);
101 break;
102 case MLX5_CMD_OP_QUERY_MKEY:
103 obj_id = MLX5_GET(query_mkey_in, in, mkey_index);
104 break;
105 case MLX5_CMD_OP_QUERY_CQ:
106 obj_id = MLX5_GET(query_cq_in, in, cqn);
107 break;
108 case MLX5_CMD_OP_MODIFY_CQ:
109 obj_id = MLX5_GET(modify_cq_in, in, cqn);
110 break;
111 case MLX5_CMD_OP_QUERY_SQ:
112 obj_id = MLX5_GET(query_sq_in, in, sqn);
113 break;
114 case MLX5_CMD_OP_MODIFY_SQ:
115 obj_id = MLX5_GET(modify_sq_in, in, sqn);
116 break;
117 case MLX5_CMD_OP_QUERY_RQ:
118 obj_id = MLX5_GET(query_rq_in, in, rqn);
119 break;
120 case MLX5_CMD_OP_MODIFY_RQ:
121 obj_id = MLX5_GET(modify_rq_in, in, rqn);
122 break;
123 case MLX5_CMD_OP_QUERY_RMP:
124 obj_id = MLX5_GET(query_rmp_in, in, rmpn);
125 break;
126 case MLX5_CMD_OP_MODIFY_RMP:
127 obj_id = MLX5_GET(modify_rmp_in, in, rmpn);
128 break;
129 case MLX5_CMD_OP_QUERY_RQT:
130 obj_id = MLX5_GET(query_rqt_in, in, rqtn);
131 break;
132 case MLX5_CMD_OP_MODIFY_RQT:
133 obj_id = MLX5_GET(modify_rqt_in, in, rqtn);
134 break;
135 case MLX5_CMD_OP_QUERY_TIR:
136 obj_id = MLX5_GET(query_tir_in, in, tirn);
137 break;
138 case MLX5_CMD_OP_MODIFY_TIR:
139 obj_id = MLX5_GET(modify_tir_in, in, tirn);
140 break;
141 case MLX5_CMD_OP_QUERY_TIS:
142 obj_id = MLX5_GET(query_tis_in, in, tisn);
143 break;
144 case MLX5_CMD_OP_MODIFY_TIS:
145 obj_id = MLX5_GET(modify_tis_in, in, tisn);
146 break;
147 case MLX5_CMD_OP_QUERY_FLOW_TABLE:
148 obj_id = MLX5_GET(query_flow_table_in, in, table_id);
149 break;
150 case MLX5_CMD_OP_MODIFY_FLOW_TABLE:
151 obj_id = MLX5_GET(modify_flow_table_in, in, table_id);
152 break;
153 case MLX5_CMD_OP_QUERY_FLOW_GROUP:
154 obj_id = MLX5_GET(query_flow_group_in, in, group_id);
155 break;
156 case MLX5_CMD_OP_QUERY_FLOW_TABLE_ENTRY:
157 obj_id = MLX5_GET(query_fte_in, in, flow_index);
158 break;
159 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
160 obj_id = MLX5_GET(set_fte_in, in, flow_index);
161 break;
162 case MLX5_CMD_OP_QUERY_Q_COUNTER:
163 obj_id = MLX5_GET(query_q_counter_in, in, counter_set_id);
164 break;
165 case MLX5_CMD_OP_QUERY_FLOW_COUNTER:
166 obj_id = MLX5_GET(query_flow_counter_in, in, flow_counter_id);
167 break;
168 case MLX5_CMD_OP_QUERY_MODIFY_HEADER_CONTEXT:
169 obj_id = MLX5_GET(general_obj_in_cmd_hdr, in, obj_id);
170 break;
171 case MLX5_CMD_OP_QUERY_SCHEDULING_ELEMENT:
172 obj_id = MLX5_GET(query_scheduling_element_in, in,
173 scheduling_element_id);
174 break;
175 case MLX5_CMD_OP_MODIFY_SCHEDULING_ELEMENT:
176 obj_id = MLX5_GET(modify_scheduling_element_in, in,
177 scheduling_element_id);
178 break;
179 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
180 obj_id = MLX5_GET(add_vxlan_udp_dport_in, in, vxlan_udp_port);
181 break;
182 case MLX5_CMD_OP_QUERY_L2_TABLE_ENTRY:
183 obj_id = MLX5_GET(query_l2_table_entry_in, in, table_index);
184 break;
185 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
186 obj_id = MLX5_GET(set_l2_table_entry_in, in, table_index);
187 break;
188 case MLX5_CMD_OP_QUERY_QP:
189 obj_id = MLX5_GET(query_qp_in, in, qpn);
190 break;
191 case MLX5_CMD_OP_RST2INIT_QP:
192 obj_id = MLX5_GET(rst2init_qp_in, in, qpn);
193 break;
194 case MLX5_CMD_OP_INIT2RTR_QP:
195 obj_id = MLX5_GET(init2rtr_qp_in, in, qpn);
196 break;
197 case MLX5_CMD_OP_RTR2RTS_QP:
198 obj_id = MLX5_GET(rtr2rts_qp_in, in, qpn);
199 break;
200 case MLX5_CMD_OP_RTS2RTS_QP:
201 obj_id = MLX5_GET(rts2rts_qp_in, in, qpn);
202 break;
203 case MLX5_CMD_OP_SQERR2RTS_QP:
204 obj_id = MLX5_GET(sqerr2rts_qp_in, in, qpn);
205 break;
206 case MLX5_CMD_OP_2ERR_QP:
207 obj_id = MLX5_GET(qp_2err_in, in, qpn);
208 break;
209 case MLX5_CMD_OP_2RST_QP:
210 obj_id = MLX5_GET(qp_2rst_in, in, qpn);
211 break;
212 case MLX5_CMD_OP_QUERY_DCT:
213 obj_id = MLX5_GET(query_dct_in, in, dctn);
214 break;
215 case MLX5_CMD_OP_QUERY_XRQ:
216 obj_id = MLX5_GET(query_xrq_in, in, xrqn);
217 break;
218 case MLX5_CMD_OP_QUERY_XRC_SRQ:
219 obj_id = MLX5_GET(query_xrc_srq_in, in, xrc_srqn);
220 break;
221 case MLX5_CMD_OP_ARM_XRC_SRQ:
222 obj_id = MLX5_GET(arm_xrc_srq_in, in, xrc_srqn);
223 break;
224 case MLX5_CMD_OP_QUERY_SRQ:
225 obj_id = MLX5_GET(query_srq_in, in, srqn);
226 break;
227 case MLX5_CMD_OP_ARM_RQ:
228 obj_id = MLX5_GET(arm_rq_in, in, srq_number);
229 break;
230 case MLX5_CMD_OP_DRAIN_DCT:
231 case MLX5_CMD_OP_ARM_DCT_FOR_KEY_VIOLATION:
232 obj_id = MLX5_GET(drain_dct_in, in, dctn);
233 break;
234 case MLX5_CMD_OP_ARM_XRQ:
235 obj_id = MLX5_GET(arm_xrq_in, in, xrqn);
236 break;
237 default:
238 return false;
239 }
240
241 if (obj_id == obj->obj_id)
242 return true;
243
244 return false;
245}
246
Yishai Hadas7efce362018-06-17 13:00:01 +0300247static bool devx_is_obj_create_cmd(const void *in)
248{
249 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
250
251 switch (opcode) {
252 case MLX5_CMD_OP_CREATE_GENERAL_OBJECT:
253 case MLX5_CMD_OP_CREATE_MKEY:
254 case MLX5_CMD_OP_CREATE_CQ:
255 case MLX5_CMD_OP_ALLOC_PD:
256 case MLX5_CMD_OP_ALLOC_TRANSPORT_DOMAIN:
257 case MLX5_CMD_OP_CREATE_RMP:
258 case MLX5_CMD_OP_CREATE_SQ:
259 case MLX5_CMD_OP_CREATE_RQ:
260 case MLX5_CMD_OP_CREATE_RQT:
261 case MLX5_CMD_OP_CREATE_TIR:
262 case MLX5_CMD_OP_CREATE_TIS:
263 case MLX5_CMD_OP_ALLOC_Q_COUNTER:
264 case MLX5_CMD_OP_CREATE_FLOW_TABLE:
265 case MLX5_CMD_OP_CREATE_FLOW_GROUP:
266 case MLX5_CMD_OP_ALLOC_FLOW_COUNTER:
267 case MLX5_CMD_OP_ALLOC_ENCAP_HEADER:
268 case MLX5_CMD_OP_ALLOC_MODIFY_HEADER_CONTEXT:
269 case MLX5_CMD_OP_CREATE_SCHEDULING_ELEMENT:
270 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
271 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
272 case MLX5_CMD_OP_CREATE_QP:
273 case MLX5_CMD_OP_CREATE_SRQ:
274 case MLX5_CMD_OP_CREATE_XRC_SRQ:
275 case MLX5_CMD_OP_CREATE_DCT:
276 case MLX5_CMD_OP_CREATE_XRQ:
277 case MLX5_CMD_OP_ATTACH_TO_MCG:
278 case MLX5_CMD_OP_ALLOC_XRCD:
279 return true;
280 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
281 {
282 u16 op_mod = MLX5_GET(set_fte_in, in, op_mod);
283 if (op_mod == 0)
284 return true;
285 return false;
286 }
287 default:
288 return false;
289 }
290}
291
Yishai Hadase662e142018-06-17 13:00:02 +0300292static bool devx_is_obj_modify_cmd(const void *in)
293{
294 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
295
296 switch (opcode) {
297 case MLX5_CMD_OP_MODIFY_GENERAL_OBJECT:
298 case MLX5_CMD_OP_MODIFY_CQ:
299 case MLX5_CMD_OP_MODIFY_RMP:
300 case MLX5_CMD_OP_MODIFY_SQ:
301 case MLX5_CMD_OP_MODIFY_RQ:
302 case MLX5_CMD_OP_MODIFY_RQT:
303 case MLX5_CMD_OP_MODIFY_TIR:
304 case MLX5_CMD_OP_MODIFY_TIS:
305 case MLX5_CMD_OP_MODIFY_FLOW_TABLE:
306 case MLX5_CMD_OP_MODIFY_SCHEDULING_ELEMENT:
307 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
308 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
309 case MLX5_CMD_OP_RST2INIT_QP:
310 case MLX5_CMD_OP_INIT2RTR_QP:
311 case MLX5_CMD_OP_RTR2RTS_QP:
312 case MLX5_CMD_OP_RTS2RTS_QP:
313 case MLX5_CMD_OP_SQERR2RTS_QP:
314 case MLX5_CMD_OP_2ERR_QP:
315 case MLX5_CMD_OP_2RST_QP:
316 case MLX5_CMD_OP_ARM_XRC_SRQ:
317 case MLX5_CMD_OP_ARM_RQ:
318 case MLX5_CMD_OP_DRAIN_DCT:
319 case MLX5_CMD_OP_ARM_DCT_FOR_KEY_VIOLATION:
320 case MLX5_CMD_OP_ARM_XRQ:
321 return true;
322 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
323 {
324 u16 op_mod = MLX5_GET(set_fte_in, in, op_mod);
325
326 if (op_mod == 1)
327 return true;
328 return false;
329 }
330 default:
331 return false;
332 }
333}
334
335static bool devx_is_obj_query_cmd(const void *in)
336{
337 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
338
339 switch (opcode) {
340 case MLX5_CMD_OP_QUERY_GENERAL_OBJECT:
341 case MLX5_CMD_OP_QUERY_MKEY:
342 case MLX5_CMD_OP_QUERY_CQ:
343 case MLX5_CMD_OP_QUERY_RMP:
344 case MLX5_CMD_OP_QUERY_SQ:
345 case MLX5_CMD_OP_QUERY_RQ:
346 case MLX5_CMD_OP_QUERY_RQT:
347 case MLX5_CMD_OP_QUERY_TIR:
348 case MLX5_CMD_OP_QUERY_TIS:
349 case MLX5_CMD_OP_QUERY_Q_COUNTER:
350 case MLX5_CMD_OP_QUERY_FLOW_TABLE:
351 case MLX5_CMD_OP_QUERY_FLOW_GROUP:
352 case MLX5_CMD_OP_QUERY_FLOW_TABLE_ENTRY:
353 case MLX5_CMD_OP_QUERY_FLOW_COUNTER:
354 case MLX5_CMD_OP_QUERY_MODIFY_HEADER_CONTEXT:
355 case MLX5_CMD_OP_QUERY_SCHEDULING_ELEMENT:
356 case MLX5_CMD_OP_QUERY_L2_TABLE_ENTRY:
357 case MLX5_CMD_OP_QUERY_QP:
358 case MLX5_CMD_OP_QUERY_SRQ:
359 case MLX5_CMD_OP_QUERY_XRC_SRQ:
360 case MLX5_CMD_OP_QUERY_DCT:
361 case MLX5_CMD_OP_QUERY_XRQ:
362 return true;
363 default:
364 return false;
365 }
366}
367
368static bool devx_is_general_cmd(void *in)
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300369{
370 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
371
372 switch (opcode) {
373 case MLX5_CMD_OP_QUERY_HCA_CAP:
374 case MLX5_CMD_OP_QUERY_VPORT_STATE:
375 case MLX5_CMD_OP_QUERY_ADAPTER:
376 case MLX5_CMD_OP_QUERY_ISSI:
377 case MLX5_CMD_OP_QUERY_NIC_VPORT_CONTEXT:
378 case MLX5_CMD_OP_QUERY_ROCE_ADDRESS:
379 case MLX5_CMD_OP_QUERY_VNIC_ENV:
380 case MLX5_CMD_OP_QUERY_VPORT_COUNTER:
381 case MLX5_CMD_OP_GET_DROPPED_PACKET_LOG:
382 case MLX5_CMD_OP_NOP:
383 case MLX5_CMD_OP_QUERY_CONG_STATUS:
384 case MLX5_CMD_OP_QUERY_CONG_PARAMS:
385 case MLX5_CMD_OP_QUERY_CONG_STATISTICS:
386 return true;
387 default:
388 return false;
389 }
390}
391
Yishai Hadasf6fe01b2018-06-17 13:00:05 +0300392static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_QUERY_EQN)(struct ib_device *ib_dev,
393 struct ib_uverbs_file *file,
394 struct uverbs_attr_bundle *attrs)
395{
396 struct mlx5_ib_dev *dev = to_mdev(ib_dev);
397 int user_vector;
398 int dev_eqn;
399 unsigned int irqn;
400 int err;
401
402 if (uverbs_copy_from(&user_vector, attrs,
403 MLX5_IB_ATTR_DEVX_QUERY_EQN_USER_VEC))
404 return -EFAULT;
405
406 err = mlx5_vector2eqn(dev->mdev, user_vector, &dev_eqn, &irqn);
407 if (err < 0)
408 return err;
409
410 if (uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_QUERY_EQN_DEV_EQN,
411 &dev_eqn, sizeof(dev_eqn)))
412 return -EFAULT;
413
414 return 0;
415}
416
Yishai Hadas7c043e92018-06-17 13:00:03 +0300417/*
418 *Security note:
419 * The hardware protection mechanism works like this: Each device object that
420 * is subject to UAR doorbells (QP/SQ/CQ) gets a UAR ID (called uar_page in
421 * the device specification manual) upon its creation. Then upon doorbell,
422 * hardware fetches the object context for which the doorbell was rang, and
423 * validates that the UAR through which the DB was rang matches the UAR ID
424 * of the object.
425 * If no match the doorbell is silently ignored by the hardware. Of course,
426 * the user cannot ring a doorbell on a UAR that was not mapped to it.
427 * Now in devx, as the devx kernel does not manipulate the QP/SQ/CQ command
428 * mailboxes (except tagging them with UID), we expose to the user its UAR
429 * ID, so it can embed it in these objects in the expected specification
430 * format. So the only thing the user can do is hurt itself by creating a
431 * QP/SQ/CQ with a UAR ID other than his, and then in this case other users
432 * may ring a doorbell on its objects.
433 * The consequence of that will be that another user can schedule a QP/SQ
434 * of the buggy user for execution (just insert it to the hardware schedule
435 * queue or arm its CQ for event generation), no further harm is expected.
436 */
437static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_QUERY_UAR)(struct ib_device *ib_dev,
438 struct ib_uverbs_file *file,
439 struct uverbs_attr_bundle *attrs)
440{
441 struct mlx5_ib_ucontext *c = devx_ufile2uctx(file);
442 u32 user_idx;
443 s32 dev_idx;
444
445 if (uverbs_copy_from(&user_idx, attrs,
446 MLX5_IB_ATTR_DEVX_QUERY_UAR_USER_IDX))
447 return -EFAULT;
448
449 dev_idx = bfregn_to_uar_index(to_mdev(ib_dev),
450 &c->bfregi, user_idx, true);
451 if (dev_idx < 0)
452 return dev_idx;
453
454 if (uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_QUERY_UAR_DEV_IDX,
455 &dev_idx, sizeof(dev_idx)))
456 return -EFAULT;
457
458 return 0;
459}
460
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300461static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OTHER)(struct ib_device *ib_dev,
462 struct ib_uverbs_file *file,
463 struct uverbs_attr_bundle *attrs)
464{
465 struct mlx5_ib_ucontext *c = devx_ufile2uctx(file);
466 struct mlx5_ib_dev *dev = to_mdev(ib_dev);
Yishai Hadas7efce362018-06-17 13:00:01 +0300467 void *cmd_in = uverbs_attr_get_alloced_ptr(
468 attrs, MLX5_IB_ATTR_DEVX_OTHER_CMD_IN);
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300469 int cmd_out_len = uverbs_attr_get_len(attrs,
470 MLX5_IB_ATTR_DEVX_OTHER_CMD_OUT);
471 void *cmd_out;
472 int err;
473
474 if (!c->devx_uid)
475 return -EPERM;
476
477 /* Only white list of some general HCA commands are allowed for this method. */
478 if (!devx_is_general_cmd(cmd_in))
479 return -EINVAL;
480
481 cmd_out = kvzalloc(cmd_out_len, GFP_KERNEL);
482 if (!cmd_out)
483 return -ENOMEM;
484
485 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, c->devx_uid);
486 err = mlx5_cmd_exec(dev->mdev, cmd_in,
487 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OTHER_CMD_IN),
488 cmd_out, cmd_out_len);
489 if (err)
490 goto other_cmd_free;
491
492 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OTHER_CMD_OUT, cmd_out, cmd_out_len);
493
494other_cmd_free:
495 kvfree(cmd_out);
496 return err;
497}
498
Yishai Hadas7efce362018-06-17 13:00:01 +0300499static void devx_obj_build_destroy_cmd(void *in, void *out, void *din,
500 u32 *dinlen,
501 u32 *obj_id)
502{
503 u16 obj_type = MLX5_GET(general_obj_in_cmd_hdr, in, obj_type);
504 u16 uid = MLX5_GET(general_obj_in_cmd_hdr, in, uid);
505
506 *obj_id = MLX5_GET(general_obj_out_cmd_hdr, out, obj_id);
507 *dinlen = MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr);
508
509 MLX5_SET(general_obj_in_cmd_hdr, din, obj_id, *obj_id);
510 MLX5_SET(general_obj_in_cmd_hdr, din, uid, uid);
511
512 switch (MLX5_GET(general_obj_in_cmd_hdr, in, opcode)) {
513 case MLX5_CMD_OP_CREATE_GENERAL_OBJECT:
514 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_GENERAL_OBJECT);
515 MLX5_SET(general_obj_in_cmd_hdr, din, obj_type, obj_type);
516 break;
517
518 case MLX5_CMD_OP_CREATE_MKEY:
519 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_MKEY);
520 break;
521 case MLX5_CMD_OP_CREATE_CQ:
522 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_CQ);
523 break;
524 case MLX5_CMD_OP_ALLOC_PD:
525 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DEALLOC_PD);
526 break;
527 case MLX5_CMD_OP_ALLOC_TRANSPORT_DOMAIN:
528 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
529 MLX5_CMD_OP_DEALLOC_TRANSPORT_DOMAIN);
530 break;
531 case MLX5_CMD_OP_CREATE_RMP:
532 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_RMP);
533 break;
534 case MLX5_CMD_OP_CREATE_SQ:
535 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_SQ);
536 break;
537 case MLX5_CMD_OP_CREATE_RQ:
538 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_RQ);
539 break;
540 case MLX5_CMD_OP_CREATE_RQT:
541 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_RQT);
542 break;
543 case MLX5_CMD_OP_CREATE_TIR:
544 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_TIR);
545 break;
546 case MLX5_CMD_OP_CREATE_TIS:
547 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_TIS);
548 break;
549 case MLX5_CMD_OP_ALLOC_Q_COUNTER:
550 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
551 MLX5_CMD_OP_DEALLOC_Q_COUNTER);
552 break;
553 case MLX5_CMD_OP_CREATE_FLOW_TABLE:
554 *dinlen = MLX5_ST_SZ_BYTES(destroy_flow_table_in);
555 *obj_id = MLX5_GET(create_flow_table_out, out, table_id);
556 MLX5_SET(destroy_flow_table_in, din, other_vport,
557 MLX5_GET(create_flow_table_in, in, other_vport));
558 MLX5_SET(destroy_flow_table_in, din, vport_number,
559 MLX5_GET(create_flow_table_in, in, vport_number));
560 MLX5_SET(destroy_flow_table_in, din, table_type,
561 MLX5_GET(create_flow_table_in, in, table_type));
562 MLX5_SET(destroy_flow_table_in, din, table_id, *obj_id);
563 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
564 MLX5_CMD_OP_DESTROY_FLOW_TABLE);
565 break;
566 case MLX5_CMD_OP_CREATE_FLOW_GROUP:
567 *dinlen = MLX5_ST_SZ_BYTES(destroy_flow_group_in);
568 *obj_id = MLX5_GET(create_flow_group_out, out, group_id);
569 MLX5_SET(destroy_flow_group_in, din, other_vport,
570 MLX5_GET(create_flow_group_in, in, other_vport));
571 MLX5_SET(destroy_flow_group_in, din, vport_number,
572 MLX5_GET(create_flow_group_in, in, vport_number));
573 MLX5_SET(destroy_flow_group_in, din, table_type,
574 MLX5_GET(create_flow_group_in, in, table_type));
575 MLX5_SET(destroy_flow_group_in, din, table_id,
576 MLX5_GET(create_flow_group_in, in, table_id));
577 MLX5_SET(destroy_flow_group_in, din, group_id, *obj_id);
578 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
579 MLX5_CMD_OP_DESTROY_FLOW_GROUP);
580 break;
581 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
582 *dinlen = MLX5_ST_SZ_BYTES(delete_fte_in);
583 *obj_id = MLX5_GET(set_fte_in, in, flow_index);
584 MLX5_SET(delete_fte_in, din, other_vport,
585 MLX5_GET(set_fte_in, in, other_vport));
586 MLX5_SET(delete_fte_in, din, vport_number,
587 MLX5_GET(set_fte_in, in, vport_number));
588 MLX5_SET(delete_fte_in, din, table_type,
589 MLX5_GET(set_fte_in, in, table_type));
590 MLX5_SET(delete_fte_in, din, table_id,
591 MLX5_GET(set_fte_in, in, table_id));
592 MLX5_SET(delete_fte_in, din, flow_index, *obj_id);
593 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
594 MLX5_CMD_OP_DELETE_FLOW_TABLE_ENTRY);
595 break;
596 case MLX5_CMD_OP_ALLOC_FLOW_COUNTER:
597 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
598 MLX5_CMD_OP_DEALLOC_FLOW_COUNTER);
599 break;
600 case MLX5_CMD_OP_ALLOC_ENCAP_HEADER:
601 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
602 MLX5_CMD_OP_DEALLOC_ENCAP_HEADER);
603 break;
604 case MLX5_CMD_OP_ALLOC_MODIFY_HEADER_CONTEXT:
605 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
606 MLX5_CMD_OP_DEALLOC_MODIFY_HEADER_CONTEXT);
607 break;
608 case MLX5_CMD_OP_CREATE_SCHEDULING_ELEMENT:
609 *dinlen = MLX5_ST_SZ_BYTES(destroy_scheduling_element_in);
610 *obj_id = MLX5_GET(create_scheduling_element_out, out,
611 scheduling_element_id);
612 MLX5_SET(destroy_scheduling_element_in, din,
613 scheduling_hierarchy,
614 MLX5_GET(create_scheduling_element_in, in,
615 scheduling_hierarchy));
616 MLX5_SET(destroy_scheduling_element_in, din,
617 scheduling_element_id, *obj_id);
618 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
619 MLX5_CMD_OP_DESTROY_SCHEDULING_ELEMENT);
620 break;
621 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
622 *dinlen = MLX5_ST_SZ_BYTES(delete_vxlan_udp_dport_in);
623 *obj_id = MLX5_GET(add_vxlan_udp_dport_in, in, vxlan_udp_port);
624 MLX5_SET(delete_vxlan_udp_dport_in, din, vxlan_udp_port, *obj_id);
625 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
626 MLX5_CMD_OP_DELETE_VXLAN_UDP_DPORT);
627 break;
628 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
629 *dinlen = MLX5_ST_SZ_BYTES(delete_l2_table_entry_in);
630 *obj_id = MLX5_GET(set_l2_table_entry_in, in, table_index);
631 MLX5_SET(delete_l2_table_entry_in, din, table_index, *obj_id);
632 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
633 MLX5_CMD_OP_DELETE_L2_TABLE_ENTRY);
634 break;
635 case MLX5_CMD_OP_CREATE_QP:
636 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_QP);
637 break;
638 case MLX5_CMD_OP_CREATE_SRQ:
639 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_SRQ);
640 break;
641 case MLX5_CMD_OP_CREATE_XRC_SRQ:
642 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
643 MLX5_CMD_OP_DESTROY_XRC_SRQ);
644 break;
645 case MLX5_CMD_OP_CREATE_DCT:
646 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_DCT);
647 break;
648 case MLX5_CMD_OP_CREATE_XRQ:
649 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_XRQ);
650 break;
651 case MLX5_CMD_OP_ATTACH_TO_MCG:
652 *dinlen = MLX5_ST_SZ_BYTES(detach_from_mcg_in);
653 MLX5_SET(detach_from_mcg_in, din, qpn,
654 MLX5_GET(attach_to_mcg_in, in, qpn));
655 memcpy(MLX5_ADDR_OF(detach_from_mcg_in, din, multicast_gid),
656 MLX5_ADDR_OF(attach_to_mcg_in, in, multicast_gid),
657 MLX5_FLD_SZ_BYTES(attach_to_mcg_in, multicast_gid));
658 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DETACH_FROM_MCG);
659 break;
660 case MLX5_CMD_OP_ALLOC_XRCD:
661 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DEALLOC_XRCD);
662 break;
663 default:
664 /* The entry must match to one of the devx_is_obj_create_cmd */
665 WARN_ON(true);
666 break;
667 }
668}
669
670static int devx_obj_cleanup(struct ib_uobject *uobject,
671 enum rdma_remove_reason why)
672{
673 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
674 struct devx_obj *obj = uobject->object;
675 int ret;
676
677 ret = mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, out, sizeof(out));
Yishai Hadas1c774832018-06-20 17:11:39 +0300678 if (ib_is_destroy_retryable(ret, why, uobject))
Yishai Hadas7efce362018-06-17 13:00:01 +0300679 return ret;
680
681 kfree(obj);
682 return ret;
683}
684
Yishai Hadas7efce362018-06-17 13:00:01 +0300685static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_CREATE)(struct ib_device *ib_dev,
686 struct ib_uverbs_file *file,
687 struct uverbs_attr_bundle *attrs)
688{
689 struct mlx5_ib_ucontext *c = devx_ufile2uctx(file);
690 struct mlx5_ib_dev *dev = to_mdev(ib_dev);
691 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_IN);
692 int cmd_out_len = uverbs_attr_get_len(attrs,
693 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_OUT);
694 void *cmd_out;
695 struct ib_uobject *uobj;
696 struct devx_obj *obj;
697 int err;
698
699 if (!c->devx_uid)
700 return -EPERM;
701
702 if (!devx_is_obj_create_cmd(cmd_in))
703 return -EINVAL;
704
705 obj = kzalloc(sizeof(struct devx_obj), GFP_KERNEL);
706 if (!obj)
707 return -ENOMEM;
708
709 cmd_out = kvzalloc(cmd_out_len, GFP_KERNEL);
710 if (!cmd_out) {
711 err = -ENOMEM;
712 goto obj_free;
713 }
714
715 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, c->devx_uid);
716 err = mlx5_cmd_exec(dev->mdev, cmd_in,
717 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_IN),
718 cmd_out, cmd_out_len);
719 if (err)
720 goto cmd_free;
721
722 uobj = uverbs_attr_get_uobject(attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_HANDLE);
723 uobj->object = obj;
724 obj->mdev = dev->mdev;
725 devx_obj_build_destroy_cmd(cmd_in, cmd_out, obj->dinbox, &obj->dinlen, &obj->obj_id);
726 WARN_ON(obj->dinlen > MLX5_MAX_DESTROY_INBOX_SIZE_DW * sizeof(u32));
727
728 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_OUT, cmd_out, cmd_out_len);
729 if (err)
730 goto cmd_free;
731
732 kvfree(cmd_out);
733 return 0;
734
735cmd_free:
736 kvfree(cmd_out);
737obj_free:
738 kfree(obj);
739 return err;
740}
741
Yishai Hadase662e142018-06-17 13:00:02 +0300742static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_MODIFY)(struct ib_device *ib_dev,
743 struct ib_uverbs_file *file,
744 struct uverbs_attr_bundle *attrs)
745{
746 struct mlx5_ib_ucontext *c = devx_ufile2uctx(file);
747 struct mlx5_ib_dev *dev = to_mdev(ib_dev);
748 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs, MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_IN);
749 int cmd_out_len = uverbs_attr_get_len(attrs,
750 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_OUT);
751 struct ib_uobject *uobj = uverbs_attr_get_uobject(attrs,
752 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_HANDLE);
753 void *cmd_out;
754 int err;
755
756 if (!c->devx_uid)
757 return -EPERM;
758
759 if (!devx_is_obj_modify_cmd(cmd_in))
760 return -EINVAL;
761
762 if (!devx_is_valid_obj_id(uobj->object, cmd_in))
763 return -EINVAL;
764
765 cmd_out = kvzalloc(cmd_out_len, GFP_KERNEL);
766 if (!cmd_out)
767 return -ENOMEM;
768
769 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, c->devx_uid);
770 err = mlx5_cmd_exec(dev->mdev, cmd_in,
771 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_IN),
772 cmd_out, cmd_out_len);
773 if (err)
774 goto other_cmd_free;
775
776 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_OUT,
777 cmd_out, cmd_out_len);
778
779other_cmd_free:
780 kvfree(cmd_out);
781 return err;
782}
783
784static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_QUERY)(struct ib_device *ib_dev,
785 struct ib_uverbs_file *file,
786 struct uverbs_attr_bundle *attrs)
787{
788 struct mlx5_ib_ucontext *c = devx_ufile2uctx(file);
789 struct mlx5_ib_dev *dev = to_mdev(ib_dev);
790 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs, MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN);
791 int cmd_out_len = uverbs_attr_get_len(attrs,
792 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_OUT);
793 struct ib_uobject *uobj = uverbs_attr_get_uobject(attrs,
794 MLX5_IB_ATTR_DEVX_OBJ_QUERY_HANDLE);
795 void *cmd_out;
796 int err;
797
798 if (!c->devx_uid)
799 return -EPERM;
800
801 if (!devx_is_obj_query_cmd(cmd_in))
802 return -EINVAL;
803
804 if (!devx_is_valid_obj_id(uobj->object, cmd_in))
805 return -EINVAL;
806
807 cmd_out = kvzalloc(cmd_out_len, GFP_KERNEL);
808 if (!cmd_out)
809 return -ENOMEM;
810
811 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, c->devx_uid);
812 err = mlx5_cmd_exec(dev->mdev, cmd_in,
813 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN),
814 cmd_out, cmd_out_len);
815 if (err)
816 goto other_cmd_free;
817
818 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_OUT, cmd_out, cmd_out_len);
819
820other_cmd_free:
821 kvfree(cmd_out);
822 return err;
823}
824
Yishai Hadasaeae9452018-06-17 13:00:04 +0300825static int devx_umem_get(struct mlx5_ib_dev *dev, struct ib_ucontext *ucontext,
826 struct uverbs_attr_bundle *attrs,
827 struct devx_umem *obj)
828{
829 u64 addr;
830 size_t size;
831 int access;
832 int npages;
833 int err;
834 u32 page_mask;
835
836 if (uverbs_copy_from(&addr, attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_ADDR) ||
837 uverbs_copy_from(&size, attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_LEN) ||
838 uverbs_copy_from(&access, attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_ACCESS))
839 return -EFAULT;
840
841 err = ib_check_mr_access(access);
842 if (err)
843 return err;
844
845 obj->umem = ib_umem_get(ucontext, addr, size, access, 0);
846 if (IS_ERR(obj->umem))
847 return PTR_ERR(obj->umem);
848
849 mlx5_ib_cont_pages(obj->umem, obj->umem->address,
850 MLX5_MKEY_PAGE_SHIFT_MASK, &npages,
851 &obj->page_shift, &obj->ncont, NULL);
852
853 if (!npages) {
854 ib_umem_release(obj->umem);
855 return -EINVAL;
856 }
857
858 page_mask = (1 << obj->page_shift) - 1;
859 obj->page_offset = obj->umem->address & page_mask;
860
861 return 0;
862}
863
864static int devx_umem_reg_cmd_alloc(struct devx_umem *obj,
865 struct devx_umem_reg_cmd *cmd)
866{
867 cmd->inlen = MLX5_ST_SZ_BYTES(create_umem_in) +
868 (MLX5_ST_SZ_BYTES(mtt) * obj->ncont);
869 cmd->in = kvzalloc(cmd->inlen, GFP_KERNEL);
870 return cmd->in ? 0 : -ENOMEM;
871}
872
873static void devx_umem_reg_cmd_free(struct devx_umem_reg_cmd *cmd)
874{
875 kvfree(cmd->in);
876}
877
878static void devx_umem_reg_cmd_build(struct mlx5_ib_dev *dev,
879 struct devx_umem *obj,
880 struct devx_umem_reg_cmd *cmd)
881{
882 void *umem;
883 __be64 *mtt;
884
885 umem = MLX5_ADDR_OF(create_umem_in, cmd->in, umem);
886 mtt = (__be64 *)MLX5_ADDR_OF(umem, umem, mtt);
887
888 MLX5_SET(general_obj_in_cmd_hdr, cmd->in, opcode, MLX5_CMD_OP_CREATE_GENERAL_OBJECT);
889 MLX5_SET(general_obj_in_cmd_hdr, cmd->in, obj_type, MLX5_OBJ_TYPE_UMEM);
890 MLX5_SET64(umem, umem, num_of_mtt, obj->ncont);
891 MLX5_SET(umem, umem, log_page_size, obj->page_shift -
892 MLX5_ADAPTER_PAGE_SHIFT);
893 MLX5_SET(umem, umem, page_offset, obj->page_offset);
894 mlx5_ib_populate_pas(dev, obj->umem, obj->page_shift, mtt,
895 (obj->umem->writable ? MLX5_IB_MTT_WRITE : 0) |
896 MLX5_IB_MTT_READ);
897}
898
899static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_UMEM_REG)(struct ib_device *ib_dev,
900 struct ib_uverbs_file *file,
901 struct uverbs_attr_bundle *attrs)
902{
903 struct mlx5_ib_ucontext *c = devx_ufile2uctx(file);
904 struct mlx5_ib_dev *dev = to_mdev(ib_dev);
905 struct devx_umem_reg_cmd cmd;
906 struct devx_umem *obj;
907 struct ib_uobject *uobj;
908 u32 obj_id;
909 int err;
910
911 if (!c->devx_uid)
912 return -EPERM;
913
914 uobj = uverbs_attr_get_uobject(attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_HANDLE);
915 obj = kzalloc(sizeof(struct devx_umem), GFP_KERNEL);
916 if (!obj)
917 return -ENOMEM;
918
919 err = devx_umem_get(dev, &c->ibucontext, attrs, obj);
920 if (err)
921 goto err_obj_free;
922
923 err = devx_umem_reg_cmd_alloc(obj, &cmd);
924 if (err)
925 goto err_umem_release;
926
927 devx_umem_reg_cmd_build(dev, obj, &cmd);
928
929 MLX5_SET(general_obj_in_cmd_hdr, cmd.in, uid, c->devx_uid);
930 err = mlx5_cmd_exec(dev->mdev, cmd.in, cmd.inlen, cmd.out,
931 sizeof(cmd.out));
932 if (err)
933 goto err_umem_reg_cmd_free;
934
935 obj->mdev = dev->mdev;
936 uobj->object = obj;
937 devx_obj_build_destroy_cmd(cmd.in, cmd.out, obj->dinbox, &obj->dinlen, &obj_id);
938 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_OUT_ID, &obj_id, sizeof(obj_id));
939 if (err)
940 goto err_umem_destroy;
941
942 devx_umem_reg_cmd_free(&cmd);
943
944 return 0;
945
946err_umem_destroy:
947 mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, cmd.out, sizeof(cmd.out));
948err_umem_reg_cmd_free:
949 devx_umem_reg_cmd_free(&cmd);
950err_umem_release:
951 ib_umem_release(obj->umem);
952err_obj_free:
953 kfree(obj);
954 return err;
955}
956
Yishai Hadasaeae9452018-06-17 13:00:04 +0300957static int devx_umem_cleanup(struct ib_uobject *uobject,
958 enum rdma_remove_reason why)
959{
960 struct devx_umem *obj = uobject->object;
961 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
962 int err;
963
964 err = mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, out, sizeof(out));
Yishai Hadas1c774832018-06-20 17:11:39 +0300965 if (ib_is_destroy_retryable(err, why, uobject))
Yishai Hadasaeae9452018-06-17 13:00:04 +0300966 return err;
967
968 ib_umem_release(obj->umem);
969 kfree(obj);
970 return 0;
971}
972
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300973DECLARE_UVERBS_NAMED_METHOD(
974 MLX5_IB_METHOD_DEVX_UMEM_REG,
975 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_UMEM_REG_HANDLE,
976 MLX5_IB_OBJECT_DEVX_UMEM,
977 UVERBS_ACCESS_NEW,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +0300978 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300979 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_UMEM_REG_ADDR,
980 UVERBS_ATTR_TYPE(u64),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +0300981 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300982 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_UMEM_REG_LEN,
983 UVERBS_ATTR_TYPE(u64),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +0300984 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300985 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_UMEM_REG_ACCESS,
986 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +0300987 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300988 UVERBS_ATTR_PTR_OUT(MLX5_IB_ATTR_DEVX_UMEM_REG_OUT_ID,
989 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +0300990 UA_MANDATORY));
Yishai Hadasaeae9452018-06-17 13:00:04 +0300991
Yishai Hadas528922a2018-07-08 13:24:39 +0300992DECLARE_UVERBS_NAMED_METHOD_DESTROY(
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300993 MLX5_IB_METHOD_DEVX_UMEM_DEREG,
994 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_UMEM_DEREG_HANDLE,
995 MLX5_IB_OBJECT_DEVX_UMEM,
996 UVERBS_ACCESS_DESTROY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +0300997 UA_MANDATORY));
Yishai Hadasaeae9452018-06-17 13:00:04 +0300998
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +0300999DECLARE_UVERBS_NAMED_METHOD(
1000 MLX5_IB_METHOD_DEVX_QUERY_EQN,
1001 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_QUERY_EQN_USER_VEC,
1002 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001003 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001004 UVERBS_ATTR_PTR_OUT(MLX5_IB_ATTR_DEVX_QUERY_EQN_DEV_EQN,
1005 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001006 UA_MANDATORY));
Yishai Hadasf6fe01b2018-06-17 13:00:05 +03001007
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001008DECLARE_UVERBS_NAMED_METHOD(
1009 MLX5_IB_METHOD_DEVX_QUERY_UAR,
1010 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_QUERY_UAR_USER_IDX,
1011 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001012 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001013 UVERBS_ATTR_PTR_OUT(MLX5_IB_ATTR_DEVX_QUERY_UAR_DEV_IDX,
1014 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001015 UA_MANDATORY));
Yishai Hadas7c043e92018-06-17 13:00:03 +03001016
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001017DECLARE_UVERBS_NAMED_METHOD(
1018 MLX5_IB_METHOD_DEVX_OTHER,
1019 UVERBS_ATTR_PTR_IN(
1020 MLX5_IB_ATTR_DEVX_OTHER_CMD_IN,
1021 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001022 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001023 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001024 UVERBS_ATTR_PTR_OUT(
1025 MLX5_IB_ATTR_DEVX_OTHER_CMD_OUT,
1026 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001027 UA_MANDATORY));
Yishai Hadas8aa8c952018-06-17 13:00:00 +03001028
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001029DECLARE_UVERBS_NAMED_METHOD(
1030 MLX5_IB_METHOD_DEVX_OBJ_CREATE,
1031 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_CREATE_HANDLE,
1032 MLX5_IB_OBJECT_DEVX_OBJ,
1033 UVERBS_ACCESS_NEW,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001034 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001035 UVERBS_ATTR_PTR_IN(
1036 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_IN,
1037 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001038 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001039 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001040 UVERBS_ATTR_PTR_OUT(
1041 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_OUT,
1042 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001043 UA_MANDATORY));
Yishai Hadas7efce362018-06-17 13:00:01 +03001044
Yishai Hadas528922a2018-07-08 13:24:39 +03001045DECLARE_UVERBS_NAMED_METHOD_DESTROY(
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001046 MLX5_IB_METHOD_DEVX_OBJ_DESTROY,
1047 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_DESTROY_HANDLE,
1048 MLX5_IB_OBJECT_DEVX_OBJ,
1049 UVERBS_ACCESS_DESTROY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001050 UA_MANDATORY));
Yishai Hadas7efce362018-06-17 13:00:01 +03001051
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001052DECLARE_UVERBS_NAMED_METHOD(
1053 MLX5_IB_METHOD_DEVX_OBJ_MODIFY,
1054 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_MODIFY_HANDLE,
1055 MLX5_IB_OBJECT_DEVX_OBJ,
1056 UVERBS_ACCESS_WRITE,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001057 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001058 UVERBS_ATTR_PTR_IN(
1059 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_IN,
1060 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001061 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001062 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001063 UVERBS_ATTR_PTR_OUT(
1064 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_OUT,
1065 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001066 UA_MANDATORY));
Yishai Hadase662e142018-06-17 13:00:02 +03001067
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001068DECLARE_UVERBS_NAMED_METHOD(
1069 MLX5_IB_METHOD_DEVX_OBJ_QUERY,
1070 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_QUERY_HANDLE,
1071 MLX5_IB_OBJECT_DEVX_OBJ,
1072 UVERBS_ACCESS_READ,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001073 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001074 UVERBS_ATTR_PTR_IN(
1075 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN,
1076 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001077 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001078 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001079 UVERBS_ATTR_PTR_OUT(
1080 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_OUT,
1081 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001082 UA_MANDATORY));
Yishai Hadase662e142018-06-17 13:00:02 +03001083
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001084DECLARE_UVERBS_GLOBAL_METHODS(MLX5_IB_OBJECT_DEVX,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001085 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OTHER),
1086 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_QUERY_UAR),
1087 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_QUERY_EQN));
Yishai Hadas8aa8c952018-06-17 13:00:00 +03001088
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001089DECLARE_UVERBS_NAMED_OBJECT(MLX5_IB_OBJECT_DEVX_OBJ,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001090 UVERBS_TYPE_ALLOC_IDR(devx_obj_cleanup),
1091 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_CREATE),
1092 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_DESTROY),
1093 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_MODIFY),
1094 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_QUERY));
Yishai Hadas7efce362018-06-17 13:00:01 +03001095
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001096DECLARE_UVERBS_NAMED_OBJECT(MLX5_IB_OBJECT_DEVX_UMEM,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001097 UVERBS_TYPE_ALLOC_IDR(devx_umem_cleanup),
1098 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_UMEM_REG),
1099 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_UMEM_DEREG));
Yishai Hadasaeae9452018-06-17 13:00:04 +03001100
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001101DECLARE_UVERBS_OBJECT_TREE(devx_objects,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001102 &UVERBS_OBJECT(MLX5_IB_OBJECT_DEVX),
1103 &UVERBS_OBJECT(MLX5_IB_OBJECT_DEVX_OBJ),
1104 &UVERBS_OBJECT(MLX5_IB_OBJECT_DEVX_UMEM));
Yishai Hadasc59450c2018-06-17 13:00:06 +03001105
1106const struct uverbs_object_tree_def *mlx5_ib_get_devx_tree(void)
1107{
1108 return &devx_objects;
1109}