blob: 4a85aca4c7e90c1890942754d2cd3e43cebb483e [file] [log] [blame]
Jan-Bernd Themann7a291082006-09-13 17:44:31 +02001/*
2 * linux/drivers/net/ehea/ehea_phyp.c
3 *
4 * eHEA ethernet device driver for IBM eServer System p
5 *
6 * (C) Copyright IBM Corp. 2006
7 *
8 * Authors:
9 * Christoph Raisch <raisch@de.ibm.com>
10 * Jan-Bernd Themann <themann@de.ibm.com>
11 * Thomas Klein <tklein@de.ibm.com>
12 *
13 *
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2, or (at your option)
17 * any later version.
18 *
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
23 *
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, write to the Free Software
26 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
27 */
28
29#include "ehea_phyp.h"
30
31
32static inline u16 get_order_of_qentries(u16 queue_entries)
33{
34 u8 ld = 1; /* logarithmus dualis */
35 while (((1U << ld) - 1) < queue_entries)
36 ld++;
37 return ld - 1;
38}
39
40/* Defines for H_CALL H_ALLOC_RESOURCE */
41#define H_ALL_RES_TYPE_QP 1
42#define H_ALL_RES_TYPE_CQ 2
43#define H_ALL_RES_TYPE_EQ 3
44#define H_ALL_RES_TYPE_MR 5
45#define H_ALL_RES_TYPE_MW 6
46
47static long ehea_hcall_9arg_9ret(unsigned long opcode,
48 unsigned long arg1, unsigned long arg2,
49 unsigned long arg3, unsigned long arg4,
50 unsigned long arg5, unsigned long arg6,
51 unsigned long arg7, unsigned long arg8,
52 unsigned long arg9, unsigned long *out1,
53 unsigned long *out2,unsigned long *out3,
54 unsigned long *out4,unsigned long *out5,
55 unsigned long *out6,unsigned long *out7,
56 unsigned long *out8,unsigned long *out9)
57{
58 long hret;
59 int i, sleep_msecs;
60
61 for (i = 0; i < 5; i++) {
62 hret = plpar_hcall_9arg_9ret(opcode,arg1, arg2, arg3, arg4,
63 arg5, arg6, arg7, arg8, arg9, out1,
64 out2, out3, out4, out5, out6, out7,
65 out8, out9);
66 if (H_IS_LONG_BUSY(hret)) {
67 sleep_msecs = get_longbusy_msecs(hret);
68 msleep_interruptible(sleep_msecs);
69 continue;
70 }
71
72 if (hret < H_SUCCESS)
73 ehea_error("op=%lx hret=%lx "
74 "i1=%lx i2=%lx i3=%lx i4=%lx i5=%lx i6=%lx "
75 "i7=%lx i8=%lx i9=%lx "
76 "o1=%lx o2=%lx o3=%lx o4=%lx o5=%lx o6=%lx "
77 "o7=%lx o8=%lx o9=%lx",
78 opcode, hret, arg1, arg2, arg3, arg4, arg5,
79 arg6, arg7, arg8, arg9, *out1, *out2, *out3,
80 *out4, *out5, *out6, *out7, *out8, *out9);
81 return hret;
82 }
83 return H_BUSY;
84}
85
86u64 ehea_h_query_ehea_qp(const u64 adapter_handle, const u8 qp_category,
87 const u64 qp_handle, const u64 sel_mask, void *cb_addr)
88{
89 u64 dummy;
90
91 if ((((u64)cb_addr) & (PAGE_SIZE - 1)) != 0) {
92 ehea_error("not on pageboundary");
93 return H_PARAMETER;
94 }
95
96 return ehea_hcall_9arg_9ret(H_QUERY_HEA_QP,
97 adapter_handle, /* R4 */
98 qp_category, /* R5 */
99 qp_handle, /* R6 */
100 sel_mask, /* R7 */
101 virt_to_abs(cb_addr), /* R8 */
102 0, 0, 0, 0, /* R9-R12 */
103 &dummy, /* R4 */
104 &dummy, /* R5 */
105 &dummy, /* R6 */
106 &dummy, /* R7 */
107 &dummy, /* R8 */
108 &dummy, /* R9 */
109 &dummy, /* R10 */
110 &dummy, /* R11 */
111 &dummy); /* R12 */
112}
113
114/* input param R5 */
115#define H_ALL_RES_QP_EQPO EHEA_BMASK_IBM(9, 11)
116#define H_ALL_RES_QP_QPP EHEA_BMASK_IBM(12, 12)
117#define H_ALL_RES_QP_RQR EHEA_BMASK_IBM(13, 15)
118#define H_ALL_RES_QP_EQEG EHEA_BMASK_IBM(16, 16)
119#define H_ALL_RES_QP_LL_QP EHEA_BMASK_IBM(17, 17)
120#define H_ALL_RES_QP_DMA128 EHEA_BMASK_IBM(19, 19)
121#define H_ALL_RES_QP_HSM EHEA_BMASK_IBM(20, 21)
122#define H_ALL_RES_QP_SIGT EHEA_BMASK_IBM(22, 23)
123#define H_ALL_RES_QP_TENURE EHEA_BMASK_IBM(48, 55)
124#define H_ALL_RES_QP_RES_TYP EHEA_BMASK_IBM(56, 63)
125
126/* input param R9 */
127#define H_ALL_RES_QP_TOKEN EHEA_BMASK_IBM(0, 31)
128#define H_ALL_RES_QP_PD EHEA_BMASK_IBM(32,63)
129
130/* input param R10 */
131#define H_ALL_RES_QP_MAX_SWQE EHEA_BMASK_IBM(4, 7)
132#define H_ALL_RES_QP_MAX_R1WQE EHEA_BMASK_IBM(12, 15)
133#define H_ALL_RES_QP_MAX_R2WQE EHEA_BMASK_IBM(20, 23)
134#define H_ALL_RES_QP_MAX_R3WQE EHEA_BMASK_IBM(28, 31)
135/* Max Send Scatter Gather Elements */
136#define H_ALL_RES_QP_MAX_SSGE EHEA_BMASK_IBM(37, 39)
137#define H_ALL_RES_QP_MAX_R1SGE EHEA_BMASK_IBM(45, 47)
138/* Max Receive SG Elements RQ1 */
139#define H_ALL_RES_QP_MAX_R2SGE EHEA_BMASK_IBM(53, 55)
140#define H_ALL_RES_QP_MAX_R3SGE EHEA_BMASK_IBM(61, 63)
141
142/* input param R11 */
143#define H_ALL_RES_QP_SWQE_IDL EHEA_BMASK_IBM(0, 7)
144/* max swqe immediate data length */
145#define H_ALL_RES_QP_PORT_NUM EHEA_BMASK_IBM(48, 63)
146
147/* input param R12 */
148#define H_ALL_RES_QP_TH_RQ2 EHEA_BMASK_IBM(0, 15)
149/* Threshold RQ2 */
150#define H_ALL_RES_QP_TH_RQ3 EHEA_BMASK_IBM(16, 31)
151/* Threshold RQ3 */
152
153/* output param R6 */
154#define H_ALL_RES_QP_ACT_SWQE EHEA_BMASK_IBM(0, 15)
155#define H_ALL_RES_QP_ACT_R1WQE EHEA_BMASK_IBM(16, 31)
156#define H_ALL_RES_QP_ACT_R2WQE EHEA_BMASK_IBM(32, 47)
157#define H_ALL_RES_QP_ACT_R3WQE EHEA_BMASK_IBM(48, 63)
158
159/* output param, R7 */
160#define H_ALL_RES_QP_ACT_SSGE EHEA_BMASK_IBM(0, 7)
161#define H_ALL_RES_QP_ACT_R1SGE EHEA_BMASK_IBM(8, 15)
162#define H_ALL_RES_QP_ACT_R2SGE EHEA_BMASK_IBM(16, 23)
163#define H_ALL_RES_QP_ACT_R3SGE EHEA_BMASK_IBM(24, 31)
164#define H_ALL_RES_QP_ACT_SWQE_IDL EHEA_BMASK_IBM(32, 39)
165
166/* output param R8,R9 */
167#define H_ALL_RES_QP_SIZE_SQ EHEA_BMASK_IBM(0, 31)
168#define H_ALL_RES_QP_SIZE_RQ1 EHEA_BMASK_IBM(32, 63)
169#define H_ALL_RES_QP_SIZE_RQ2 EHEA_BMASK_IBM(0, 31)
170#define H_ALL_RES_QP_SIZE_RQ3 EHEA_BMASK_IBM(32, 63)
171
172/* output param R11,R12 */
173#define H_ALL_RES_QP_LIOBN_SQ EHEA_BMASK_IBM(0, 31)
174#define H_ALL_RES_QP_LIOBN_RQ1 EHEA_BMASK_IBM(32, 63)
175#define H_ALL_RES_QP_LIOBN_RQ2 EHEA_BMASK_IBM(0, 31)
176#define H_ALL_RES_QP_LIOBN_RQ3 EHEA_BMASK_IBM(32, 63)
177
178u64 ehea_h_alloc_resource_qp(const u64 adapter_handle,
179 struct ehea_qp_init_attr *init_attr, const u32 pd,
180 u64 *qp_handle, struct h_epas *h_epas)
181{
182 u64 hret;
183
184 u64 allocate_controls =
185 EHEA_BMASK_SET(H_ALL_RES_QP_EQPO, init_attr->low_lat_rq1 ? 1 : 0)
186 | EHEA_BMASK_SET(H_ALL_RES_QP_QPP, 0)
187 | EHEA_BMASK_SET(H_ALL_RES_QP_RQR, 6) /* rq1 & rq2 & rq3 */
188 | EHEA_BMASK_SET(H_ALL_RES_QP_EQEG, 0) /* EQE gen. disabled */
189 | EHEA_BMASK_SET(H_ALL_RES_QP_LL_QP, init_attr->low_lat_rq1)
190 | EHEA_BMASK_SET(H_ALL_RES_QP_DMA128, 0)
191 | EHEA_BMASK_SET(H_ALL_RES_QP_HSM, 0)
192 | EHEA_BMASK_SET(H_ALL_RES_QP_SIGT, init_attr->signalingtype)
193 | EHEA_BMASK_SET(H_ALL_RES_QP_RES_TYP, H_ALL_RES_TYPE_QP);
194
195 u64 r9_reg = EHEA_BMASK_SET(H_ALL_RES_QP_PD, pd)
196 | EHEA_BMASK_SET(H_ALL_RES_QP_TOKEN, init_attr->qp_token);
197
198 u64 max_r10_reg =
199 EHEA_BMASK_SET(H_ALL_RES_QP_MAX_SWQE,
200 get_order_of_qentries(init_attr->max_nr_send_wqes))
201 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_R1WQE,
202 get_order_of_qentries(init_attr->max_nr_rwqes_rq1))
203 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_R2WQE,
204 get_order_of_qentries(init_attr->max_nr_rwqes_rq2))
205 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_R3WQE,
206 get_order_of_qentries(init_attr->max_nr_rwqes_rq3))
207 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_SSGE, init_attr->wqe_size_enc_sq)
208 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_R1SGE,
209 init_attr->wqe_size_enc_rq1)
210 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_R2SGE,
211 init_attr->wqe_size_enc_rq2)
212 | EHEA_BMASK_SET(H_ALL_RES_QP_MAX_R3SGE,
213 init_attr->wqe_size_enc_rq3);
214
215 u64 r11_in =
216 EHEA_BMASK_SET(H_ALL_RES_QP_SWQE_IDL, init_attr->swqe_imm_data_len)
217 | EHEA_BMASK_SET(H_ALL_RES_QP_PORT_NUM, init_attr->port_nr);
218 u64 threshold =
219 EHEA_BMASK_SET(H_ALL_RES_QP_TH_RQ2, init_attr->rq2_threshold)
220 | EHEA_BMASK_SET(H_ALL_RES_QP_TH_RQ3, init_attr->rq3_threshold);
221
222 u64 r5_out = 0;
223 u64 r6_out = 0;
224 u64 r7_out = 0;
225 u64 r8_out = 0;
226 u64 r9_out = 0;
227 u64 g_la_user_out = 0;
228 u64 r11_out = 0;
229 u64 r12_out = 0;
230
231 hret = ehea_hcall_9arg_9ret(H_ALLOC_HEA_RESOURCE,
232 adapter_handle, /* R4 */
233 allocate_controls, /* R5 */
234 init_attr->send_cq_handle, /* R6 */
235 init_attr->recv_cq_handle, /* R7 */
236 init_attr->aff_eq_handle, /* R8 */
237 r9_reg, /* R9 */
238 max_r10_reg, /* R10 */
239 r11_in, /* R11 */
240 threshold, /* R12 */
241 qp_handle, /* R4 */
242 &r5_out, /* R5 */
243 &r6_out, /* R6 */
244 &r7_out, /* R7 */
245 &r8_out, /* R8 */
246 &r9_out, /* R9 */
247 &g_la_user_out, /* R10 */
248 &r11_out, /* R11 */
249 &r12_out); /* R12 */
250
251 init_attr->qp_nr = (u32)r5_out;
252
253 init_attr->act_nr_send_wqes =
254 (u16)EHEA_BMASK_GET(H_ALL_RES_QP_ACT_SWQE, r6_out);
255 init_attr->act_nr_rwqes_rq1 =
256 (u16)EHEA_BMASK_GET(H_ALL_RES_QP_ACT_R1WQE, r6_out);
257 init_attr->act_nr_rwqes_rq2 =
258 (u16)EHEA_BMASK_GET(H_ALL_RES_QP_ACT_R2WQE, r6_out);
259 init_attr->act_nr_rwqes_rq3 =
260 (u16)EHEA_BMASK_GET(H_ALL_RES_QP_ACT_R3WQE, r6_out);
261
262 init_attr->act_wqe_size_enc_sq = init_attr->wqe_size_enc_sq;
263 init_attr->act_wqe_size_enc_rq1 = init_attr->wqe_size_enc_rq1;
264 init_attr->act_wqe_size_enc_rq2 = init_attr->wqe_size_enc_rq2;
265 init_attr->act_wqe_size_enc_rq3 = init_attr->wqe_size_enc_rq3;
266
267 init_attr->nr_sq_pages =
268 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_SIZE_SQ, r8_out);
269 init_attr->nr_rq1_pages =
270 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_SIZE_RQ1, r8_out);
271 init_attr->nr_rq2_pages =
272 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_SIZE_RQ2, r9_out);
273 init_attr->nr_rq3_pages =
274 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_SIZE_RQ3, r9_out);
275
276 init_attr->liobn_sq =
277 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_LIOBN_SQ, r11_out);
278 init_attr->liobn_rq1 =
279 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_LIOBN_RQ1, r11_out);
280 init_attr->liobn_rq2 =
281 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_LIOBN_RQ2, r12_out);
282 init_attr->liobn_rq3 =
283 (u32)EHEA_BMASK_GET(H_ALL_RES_QP_LIOBN_RQ3, r12_out);
284
285 if (!hret)
286 hcp_epas_ctor(h_epas, g_la_user_out, g_la_user_out);
287
288 return hret;
289}
290
291u64 ehea_h_alloc_resource_cq(const u64 adapter_handle,
292 struct ehea_cq_attr *cq_attr,
293 u64 *cq_handle, struct h_epas *epas)
294{
295 u64 hret, dummy, act_nr_of_cqes_out, act_pages_out;
296 u64 g_la_privileged_out, g_la_user_out;
297
298 hret = ehea_hcall_9arg_9ret(H_ALLOC_HEA_RESOURCE,
299 adapter_handle, /* R4 */
300 H_ALL_RES_TYPE_CQ, /* R5 */
301 cq_attr->eq_handle, /* R6 */
302 cq_attr->cq_token, /* R7 */
303 cq_attr->max_nr_of_cqes, /* R8 */
304 0, 0, 0, 0, /* R9-R12 */
305 cq_handle, /* R4 */
306 &dummy, /* R5 */
307 &dummy, /* R6 */
308 &act_nr_of_cqes_out, /* R7 */
309 &act_pages_out, /* R8 */
310 &g_la_privileged_out, /* R9 */
311 &g_la_user_out, /* R10 */
312 &dummy, /* R11 */
313 &dummy); /* R12 */
314
315 cq_attr->act_nr_of_cqes = act_nr_of_cqes_out;
316 cq_attr->nr_pages = act_pages_out;
317
318 if (!hret)
319 hcp_epas_ctor(epas, g_la_privileged_out, g_la_user_out);
320
321 return hret;
322}
323
324/* Defines for H_CALL H_ALLOC_RESOURCE */
325#define H_ALL_RES_TYPE_QP 1
326#define H_ALL_RES_TYPE_CQ 2
327#define H_ALL_RES_TYPE_EQ 3
328#define H_ALL_RES_TYPE_MR 5
329#define H_ALL_RES_TYPE_MW 6
330
331/* input param R5 */
332#define H_ALL_RES_EQ_NEQ EHEA_BMASK_IBM(0, 0)
333#define H_ALL_RES_EQ_NON_NEQ_ISN EHEA_BMASK_IBM(6, 7)
334#define H_ALL_RES_EQ_INH_EQE_GEN EHEA_BMASK_IBM(16, 16)
335#define H_ALL_RES_EQ_RES_TYPE EHEA_BMASK_IBM(56, 63)
336/* input param R6 */
337#define H_ALL_RES_EQ_MAX_EQE EHEA_BMASK_IBM(32, 63)
338
339/* output param R6 */
340#define H_ALL_RES_EQ_LIOBN EHEA_BMASK_IBM(32, 63)
341
342/* output param R7 */
343#define H_ALL_RES_EQ_ACT_EQE EHEA_BMASK_IBM(32, 63)
344
345/* output param R8 */
346#define H_ALL_RES_EQ_ACT_PS EHEA_BMASK_IBM(32, 63)
347
348/* output param R9 */
349#define H_ALL_RES_EQ_ACT_EQ_IST_C EHEA_BMASK_IBM(30, 31)
350#define H_ALL_RES_EQ_ACT_EQ_IST_1 EHEA_BMASK_IBM(40, 63)
351
352/* output param R10 */
353#define H_ALL_RES_EQ_ACT_EQ_IST_2 EHEA_BMASK_IBM(40, 63)
354
355/* output param R11 */
356#define H_ALL_RES_EQ_ACT_EQ_IST_3 EHEA_BMASK_IBM(40, 63)
357
358/* output param R12 */
359#define H_ALL_RES_EQ_ACT_EQ_IST_4 EHEA_BMASK_IBM(40, 63)
360
361u64 ehea_h_alloc_resource_eq(const u64 adapter_handle,
362 struct ehea_eq_attr *eq_attr, u64 *eq_handle)
363{
364 u64 hret, dummy, eq_liobn, allocate_controls;
365 u64 ist1_out, ist2_out, ist3_out, ist4_out;
366 u64 act_nr_of_eqes_out, act_pages_out;
367
368 /* resource type */
369 allocate_controls =
370 EHEA_BMASK_SET(H_ALL_RES_EQ_RES_TYPE, H_ALL_RES_TYPE_EQ)
371 | EHEA_BMASK_SET(H_ALL_RES_EQ_NEQ, eq_attr->type ? 1 : 0)
372 | EHEA_BMASK_SET(H_ALL_RES_EQ_INH_EQE_GEN, !eq_attr->eqe_gen)
373 | EHEA_BMASK_SET(H_ALL_RES_EQ_NON_NEQ_ISN, 1);
374
375 hret = ehea_hcall_9arg_9ret(H_ALLOC_HEA_RESOURCE,
376 adapter_handle, /* R4 */
377 allocate_controls, /* R5 */
378 eq_attr->max_nr_of_eqes, /* R6 */
379 0, 0, 0, 0, 0, 0, /* R7-R10 */
380 eq_handle, /* R4 */
381 &dummy, /* R5 */
382 &eq_liobn, /* R6 */
383 &act_nr_of_eqes_out, /* R7 */
384 &act_pages_out, /* R8 */
385 &ist1_out, /* R9 */
386 &ist2_out, /* R10 */
387 &ist3_out, /* R11 */
388 &ist4_out); /* R12 */
389
390 eq_attr->act_nr_of_eqes = act_nr_of_eqes_out;
391 eq_attr->nr_pages = act_pages_out;
392 eq_attr->ist1 = ist1_out;
393 eq_attr->ist2 = ist2_out;
394 eq_attr->ist3 = ist3_out;
395 eq_attr->ist4 = ist4_out;
396
397 return hret;
398}
399
400u64 ehea_h_modify_ehea_qp(const u64 adapter_handle, const u8 cat,
401 const u64 qp_handle, const u64 sel_mask,
402 void *cb_addr, u64 *inv_attr_id, u64 *proc_mask,
403 u16 *out_swr, u16 *out_rwr)
404{
405 u64 hret, dummy, act_out_swr, act_out_rwr;
406
407 if ((((u64)cb_addr) & (PAGE_SIZE - 1)) != 0) {
408 ehea_error("not on page boundary");
409 return H_PARAMETER;
410 }
411
412 hret = ehea_hcall_9arg_9ret(H_MODIFY_HEA_QP,
413 adapter_handle, /* R4 */
414 (u64) cat, /* R5 */
415 qp_handle, /* R6 */
416 sel_mask, /* R7 */
417 virt_to_abs(cb_addr), /* R8 */
418 0, 0, 0, 0, /* R9-R12 */
419 inv_attr_id, /* R4 */
420 &dummy, /* R5 */
421 &dummy, /* R6 */
422 &act_out_swr, /* R7 */
423 &act_out_rwr, /* R8 */
424 proc_mask, /* R9 */
425 &dummy, /* R10 */
426 &dummy, /* R11 */
427 &dummy); /* R12 */
428 *out_swr = act_out_swr;
429 *out_rwr = act_out_rwr;
430
431 return hret;
432}
433
434u64 ehea_h_register_rpage(const u64 adapter_handle, const u8 pagesize,
435 const u8 queue_type, const u64 resource_handle,
436 const u64 log_pageaddr, u64 count)
437{
438 u64 dummy, reg_control;
439
440 reg_control = EHEA_BMASK_SET(H_REG_RPAGE_PAGE_SIZE, pagesize)
441 | EHEA_BMASK_SET(H_REG_RPAGE_QT, queue_type);
442
443 return ehea_hcall_9arg_9ret(H_REGISTER_HEA_RPAGES,
444 adapter_handle, /* R4 */
445 reg_control, /* R5 */
446 resource_handle, /* R6 */
447 log_pageaddr, /* R7 */
448 count, /* R8 */
449 0, 0, 0, 0, /* R9-R12 */
450 &dummy, /* R4 */
451 &dummy, /* R5 */
452 &dummy, /* R6 */
453 &dummy, /* R7 */
454 &dummy, /* R8 */
455 &dummy, /* R9 */
456 &dummy, /* R10 */
457 &dummy, /* R11 */
458 &dummy); /* R12 */
459}
460
461u64 ehea_h_register_smr(const u64 adapter_handle, const u64 orig_mr_handle,
462 const u64 vaddr_in, const u32 access_ctrl, const u32 pd,
463 struct ehea_mr *mr)
464{
465 u64 hret, dummy, lkey_out;
466
467 hret = ehea_hcall_9arg_9ret(H_REGISTER_SMR,
468 adapter_handle , /* R4 */
469 orig_mr_handle, /* R5 */
470 vaddr_in, /* R6 */
471 (((u64)access_ctrl) << 32ULL), /* R7 */
472 pd, /* R8 */
473 0, 0, 0, 0, /* R9-R12 */
474 &mr->handle, /* R4 */
475 &dummy, /* R5 */
476 &lkey_out, /* R6 */
477 &dummy, /* R7 */
478 &dummy, /* R8 */
479 &dummy, /* R9 */
480 &dummy, /* R10 */
481 &dummy, /* R11 */
482 &dummy); /* R12 */
483 mr->lkey = (u32)lkey_out;
484
485 return hret;
486}
487
488u64 ehea_h_disable_and_get_hea(const u64 adapter_handle, const u64 qp_handle)
489{
490 u64 hret, dummy, ladr_next_sq_wqe_out;
491 u64 ladr_next_rq1_wqe_out, ladr_next_rq2_wqe_out, ladr_next_rq3_wqe_out;
492
493 hret = ehea_hcall_9arg_9ret(H_DISABLE_AND_GET_HEA,
494 adapter_handle, /* R4 */
495 H_DISABLE_GET_EHEA_WQE_P, /* R5 */
496 qp_handle, /* R6 */
497 0, 0, 0, 0, 0, 0, /* R7-R12 */
498 &ladr_next_sq_wqe_out, /* R4 */
499 &ladr_next_rq1_wqe_out, /* R5 */
500 &ladr_next_rq2_wqe_out, /* R6 */
501 &ladr_next_rq3_wqe_out, /* R7 */
502 &dummy, /* R8 */
503 &dummy, /* R9 */
504 &dummy, /* R10 */
505 &dummy, /* R11 */
506 &dummy); /* R12 */
507 return hret;
508}
509
510u64 ehea_h_free_resource(const u64 adapter_handle, const u64 res_handle)
511{
512 u64 dummy;
513
514 return ehea_hcall_9arg_9ret(H_FREE_RESOURCE,
515 adapter_handle, /* R4 */
516 res_handle, /* R5 */
517 0, 0, 0, 0, 0, 0, 0, /* R6-R12 */
518 &dummy, /* R4 */
519 &dummy, /* R5 */
520 &dummy, /* R6 */
521 &dummy, /* R7 */
522 &dummy, /* R8 */
523 &dummy, /* R9 */
524 &dummy, /* R10 */
525 &dummy, /* R11 */
526 &dummy); /* R12 */
527}
528
529u64 ehea_h_alloc_resource_mr(const u64 adapter_handle, const u64 vaddr,
530 const u64 length, const u32 access_ctrl,
531 const u32 pd, u64 *mr_handle, u32 *lkey)
532{
533 u64 hret, dummy, lkey_out;
534
535 hret = ehea_hcall_9arg_9ret(H_ALLOC_HEA_RESOURCE,
536 adapter_handle, /* R4 */
537 5, /* R5 */
538 vaddr, /* R6 */
539 length, /* R7 */
540 (((u64) access_ctrl) << 32ULL),/* R8 */
541 pd, /* R9 */
542 0, 0, 0, /* R10-R12 */
543 mr_handle, /* R4 */
544 &dummy, /* R5 */
545 &lkey_out, /* R6 */
546 &dummy, /* R7 */
547 &dummy, /* R8 */
548 &dummy, /* R9 */
549 &dummy, /* R10 */
550 &dummy, /* R11 */
551 &dummy); /* R12 */
552 *lkey = (u32) lkey_out;
553
554 return hret;
555}
556
557u64 ehea_h_register_rpage_mr(const u64 adapter_handle, const u64 mr_handle,
558 const u8 pagesize, const u8 queue_type,
559 const u64 log_pageaddr, const u64 count)
560{
561 if ((count > 1) && (log_pageaddr & 0xfff)) {
562 ehea_error("not on pageboundary");
563 return H_PARAMETER;
564 }
565
566 return ehea_h_register_rpage(adapter_handle, pagesize,
567 queue_type, mr_handle,
568 log_pageaddr, count);
569}
570
571u64 ehea_h_query_ehea(const u64 adapter_handle, void *cb_addr)
572{
573 u64 hret, dummy, cb_logaddr;
574
575 cb_logaddr = virt_to_abs(cb_addr);
576
577 hret = ehea_hcall_9arg_9ret(H_QUERY_HEA,
578 adapter_handle, /* R4 */
579 cb_logaddr, /* R5 */
580 0, 0, 0, 0, 0, 0, 0, /* R6-R12 */
581 &dummy, /* R4 */
582 &dummy, /* R5 */
583 &dummy, /* R6 */
584 &dummy, /* R7 */
585 &dummy, /* R8 */
586 &dummy, /* R9 */
587 &dummy, /* R10 */
588 &dummy, /* R11 */
589 &dummy); /* R12 */
590#ifdef DEBUG
591 ehea_dmp(cb_addr, sizeof(struct hcp_query_ehea), "hcp_query_ehea");
592#endif
593 return hret;
594}
595
596u64 ehea_h_query_ehea_port(const u64 adapter_handle, const u16 port_num,
597 const u8 cb_cat, const u64 select_mask,
598 void *cb_addr)
599{
600 u64 port_info, dummy;
601 u64 cb_logaddr = virt_to_abs(cb_addr);
602 u64 arr_index = 0;
603
604 port_info = EHEA_BMASK_SET(H_MEHEAPORT_CAT, cb_cat)
605 | EHEA_BMASK_SET(H_MEHEAPORT_PN, port_num);
606
607 return ehea_hcall_9arg_9ret(H_QUERY_HEA_PORT,
608 adapter_handle, /* R4 */
609 port_info, /* R5 */
610 select_mask, /* R6 */
611 arr_index, /* R7 */
612 cb_logaddr, /* R8 */
613 0, 0, 0, 0, /* R9-R12 */
614 &dummy, /* R4 */
615 &dummy, /* R5 */
616 &dummy, /* R6 */
617 &dummy, /* R7 */
618 &dummy, /* R8 */
619 &dummy, /* R9 */
620 &dummy, /* R10 */
621 &dummy, /* R11 */
622 &dummy); /* R12 */
623}
624
625u64 ehea_h_modify_ehea_port(const u64 adapter_handle, const u16 port_num,
626 const u8 cb_cat, const u64 select_mask,
627 void *cb_addr)
628{
629 u64 port_info, dummy, inv_attr_ident, proc_mask;
630 u64 arr_index = 0;
631 u64 cb_logaddr = virt_to_abs(cb_addr);
632
633 port_info = EHEA_BMASK_SET(H_MEHEAPORT_CAT, cb_cat)
634 | EHEA_BMASK_SET(H_MEHEAPORT_PN, port_num);
635#ifdef DEBUG
636 ehea_dump(cb_addr, sizeof(struct hcp_ehea_port_cb0), "Before HCALL");
637#endif
638 return ehea_hcall_9arg_9ret(H_MODIFY_HEA_PORT,
639 adapter_handle, /* R4 */
640 port_info, /* R5 */
641 select_mask, /* R6 */
642 arr_index, /* R7 */
643 cb_logaddr, /* R8 */
644 0, 0, 0, 0, /* R9-R12 */
645 &inv_attr_ident, /* R4 */
646 &proc_mask, /* R5 */
647 &dummy, /* R6 */
648 &dummy, /* R7 */
649 &dummy, /* R8 */
650 &dummy, /* R9 */
651 &dummy, /* R10 */
652 &dummy, /* R11 */
653 &dummy); /* R12 */
654}
655
656u64 ehea_h_reg_dereg_bcmc(const u64 adapter_handle, const u16 port_num,
657 const u8 reg_type, const u64 mc_mac_addr,
658 const u16 vlan_id, const u32 hcall_id)
659{
660 u64 r5_port_num, r6_reg_type, r7_mc_mac_addr, r8_vlan_id, dummy;
661 u64 mac_addr = mc_mac_addr >> 16;
662
663 r5_port_num = EHEA_BMASK_SET(H_REGBCMC_PN, port_num);
664 r6_reg_type = EHEA_BMASK_SET(H_REGBCMC_REGTYPE, reg_type);
665 r7_mc_mac_addr = EHEA_BMASK_SET(H_REGBCMC_MACADDR, mac_addr);
666 r8_vlan_id = EHEA_BMASK_SET(H_REGBCMC_VLANID, vlan_id);
667
668 return ehea_hcall_9arg_9ret(hcall_id,
669 adapter_handle, /* R4 */
670 r5_port_num, /* R5 */
671 r6_reg_type, /* R6 */
672 r7_mc_mac_addr, /* R7 */
673 r8_vlan_id, /* R8 */
674 0, 0, 0, 0, /* R9-R12 */
675 &dummy, /* R4 */
676 &dummy, /* R5 */
677 &dummy, /* R6 */
678 &dummy, /* R7 */
679 &dummy, /* R8 */
680 &dummy, /* R9 */
681 &dummy, /* R10 */
682 &dummy, /* R11 */
683 &dummy); /* R12 */
684}
685
686u64 ehea_h_reset_events(const u64 adapter_handle, const u64 neq_handle,
687 const u64 event_mask)
688{
689 u64 dummy;
690
691 return ehea_hcall_9arg_9ret(H_RESET_EVENTS,
692 adapter_handle, /* R4 */
693 neq_handle, /* R5 */
694 event_mask, /* R6 */
695 0, 0, 0, 0, 0, 0, /* R7-R12 */
696 &dummy, /* R4 */
697 &dummy, /* R5 */
698 &dummy, /* R6 */
699 &dummy, /* R7 */
700 &dummy, /* R8 */
701 &dummy, /* R9 */
702 &dummy, /* R10 */
703 &dummy, /* R11 */
704 &dummy); /* R12 */
705}