blob: db99240d00bdfea565ea6ed838bbfe5eeba4aa33 [file] [log] [blame]
Gil Rockahcb3c7fd2016-06-23 17:02:41 +03001/*
2 * Copyright (c) 2016, Mellanox Technologies. All rights reserved.
Andy Gospodarekb9c872f2018-01-09 16:06:16 -05003 * Copyright (c) 2017-2018, Broadcom Limited. All rights reserved.
Gil Rockahcb3c7fd2016-06-23 17:02:41 +03004 *
5 * This software is available to you under a choice of one of two
6 * licenses. You may choose to be licensed under the terms of the GNU
7 * General Public License (GPL) Version 2, available from the file
8 * COPYING in the main directory of this source tree, or the
9 * OpenIB.org BSD license below:
10 *
11 * Redistribution and use in source and binary forms, with or
12 * without modification, are permitted provided that the following
13 * conditions are met:
14 *
15 * - Redistributions of source code must retain the above
16 * copyright notice, this list of conditions and the following
17 * disclaimer.
18 *
19 * - Redistributions in binary form must reproduce the above
20 * copyright notice, this list of conditions and the following
21 * disclaimer in the documentation and/or other materials
22 * provided with the distribution.
23 *
24 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
25 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
26 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
27 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
28 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
29 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
30 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
31 * SOFTWARE.
32 */
33
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -050034#ifndef NET_DIM_H
35#define NET_DIM_H
36
37#include <linux/module.h>
38
39struct net_dim_cq_moder {
40 u16 usec;
41 u16 pkts;
42 u8 cq_period_mode;
43};
44
45struct net_dim_sample {
46 ktime_t time;
47 u32 pkt_ctr;
48 u32 byte_ctr;
49 u16 event_ctr;
50};
51
52struct net_dim_stats {
53 int ppms; /* packets per msec */
54 int bpms; /* bytes per msec */
55 int epms; /* events per msec */
56};
57
58struct net_dim { /* Adaptive Moderation */
59 u8 state;
60 struct net_dim_stats prev_stats;
61 struct net_dim_sample start_sample;
62 struct work_struct work;
63 u8 profile_ix;
64 u8 mode;
65 u8 tune_state;
66 u8 steps_right;
67 u8 steps_left;
68 u8 tired;
69};
70
71enum {
72 NET_DIM_CQ_PERIOD_MODE_START_FROM_EQE = 0x0,
73 NET_DIM_CQ_PERIOD_MODE_START_FROM_CQE = 0x1,
74 NET_DIM_CQ_PERIOD_NUM_MODES
75};
76
77/* Adaptive moderation logic */
78enum {
79 NET_DIM_START_MEASURE,
80 NET_DIM_MEASURE_IN_PROGRESS,
81 NET_DIM_APPLY_NEW_PROFILE,
82};
83
84enum {
85 NET_DIM_PARKING_ON_TOP,
86 NET_DIM_PARKING_TIRED,
87 NET_DIM_GOING_RIGHT,
88 NET_DIM_GOING_LEFT,
89};
90
91enum {
92 NET_DIM_STATS_WORSE,
93 NET_DIM_STATS_SAME,
94 NET_DIM_STATS_BETTER,
95};
96
97enum {
98 NET_DIM_STEPPED,
99 NET_DIM_TOO_TIRED,
100 NET_DIM_ON_EDGE,
101};
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300102
Andy Gospodarek9a317422018-01-09 16:06:17 -0500103#define NET_DIM_PARAMS_NUM_PROFILES 5
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300104/* Adaptive moderation profiles */
Andy Gospodarek9a317422018-01-09 16:06:17 -0500105#define NET_DIM_DEFAULT_RX_CQ_MODERATION_PKTS_FROM_EQE 256
Tal Gilboa623ad752018-04-24 13:36:02 +0300106#define NET_DIM_DEFAULT_TX_CQ_MODERATION_PKTS_FROM_EQE 128
Andy Gospodarek9a317422018-01-09 16:06:17 -0500107#define NET_DIM_DEF_PROFILE_CQE 1
108#define NET_DIM_DEF_PROFILE_EQE 1
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300109
Andy Gospodarek9a317422018-01-09 16:06:17 -0500110/* All profiles sizes must be NET_PARAMS_DIM_NUM_PROFILES */
Tal Gilboa623ad752018-04-24 13:36:02 +0300111#define NET_DIM_RX_EQE_PROFILES { \
Andy Gospodarek9a317422018-01-09 16:06:17 -0500112 {1, NET_DIM_DEFAULT_RX_CQ_MODERATION_PKTS_FROM_EQE}, \
113 {8, NET_DIM_DEFAULT_RX_CQ_MODERATION_PKTS_FROM_EQE}, \
114 {64, NET_DIM_DEFAULT_RX_CQ_MODERATION_PKTS_FROM_EQE}, \
115 {128, NET_DIM_DEFAULT_RX_CQ_MODERATION_PKTS_FROM_EQE}, \
116 {256, NET_DIM_DEFAULT_RX_CQ_MODERATION_PKTS_FROM_EQE}, \
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300117}
118
Tal Gilboa623ad752018-04-24 13:36:02 +0300119#define NET_DIM_RX_CQE_PROFILES { \
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300120 {2, 256}, \
121 {8, 128}, \
122 {16, 64}, \
123 {32, 64}, \
124 {64, 64} \
125}
126
Tal Gilboa623ad752018-04-24 13:36:02 +0300127#define NET_DIM_TX_EQE_PROFILES { \
128 {1, NET_DIM_DEFAULT_TX_CQ_MODERATION_PKTS_FROM_EQE}, \
129 {8, NET_DIM_DEFAULT_TX_CQ_MODERATION_PKTS_FROM_EQE}, \
130 {32, NET_DIM_DEFAULT_TX_CQ_MODERATION_PKTS_FROM_EQE}, \
131 {64, NET_DIM_DEFAULT_TX_CQ_MODERATION_PKTS_FROM_EQE}, \
132 {128, NET_DIM_DEFAULT_TX_CQ_MODERATION_PKTS_FROM_EQE} \
133}
134
135#define NET_DIM_TX_CQE_PROFILES { \
136 {5, 128}, \
137 {8, 64}, \
138 {16, 32}, \
139 {32, 32}, \
140 {64, 32} \
141}
142
Andy Gospodarek9a317422018-01-09 16:06:17 -0500143static const struct net_dim_cq_moder
Tal Gilboa623ad752018-04-24 13:36:02 +0300144rx_profile[NET_DIM_CQ_PERIOD_NUM_MODES][NET_DIM_PARAMS_NUM_PROFILES] = {
145 NET_DIM_RX_EQE_PROFILES,
146 NET_DIM_RX_CQE_PROFILES,
147};
148
149static const struct net_dim_cq_moder
150tx_profile[NET_DIM_CQ_PERIOD_NUM_MODES][NET_DIM_PARAMS_NUM_PROFILES] = {
151 NET_DIM_TX_EQE_PROFILES,
152 NET_DIM_TX_CQE_PROFILES,
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300153};
154
Tal Gilboa026a8072018-04-24 13:36:01 +0300155static inline struct net_dim_cq_moder
156net_dim_get_rx_moderation(u8 cq_period_mode, int ix)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300157{
Tal Gilboa623ad752018-04-24 13:36:02 +0300158 struct net_dim_cq_moder cq_moder = rx_profile[cq_period_mode][ix];
Tal Gilboa0088cbb2017-09-26 16:20:43 +0300159
Tal Gilboa0088cbb2017-09-26 16:20:43 +0300160 cq_moder.cq_period_mode = cq_period_mode;
161 return cq_moder;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300162}
163
Tal Gilboa026a8072018-04-24 13:36:01 +0300164static inline struct net_dim_cq_moder
Tal Gilboa623ad752018-04-24 13:36:02 +0300165net_dim_get_def_rx_moderation(u8 cq_period_mode)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300166{
Tal Gilboa623ad752018-04-24 13:36:02 +0300167 u8 profile_ix = cq_period_mode == NET_DIM_CQ_PERIOD_MODE_START_FROM_CQE ?
168 NET_DIM_DEF_PROFILE_CQE : NET_DIM_DEF_PROFILE_EQE;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300169
Tal Gilboa623ad752018-04-24 13:36:02 +0300170 return net_dim_get_rx_moderation(cq_period_mode, profile_ix);
171}
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300172
Tal Gilboa623ad752018-04-24 13:36:02 +0300173static inline struct net_dim_cq_moder
174net_dim_get_tx_moderation(u8 cq_period_mode, int ix)
175{
176 struct net_dim_cq_moder cq_moder = tx_profile[cq_period_mode][ix];
177
178 cq_moder.cq_period_mode = cq_period_mode;
179 return cq_moder;
180}
181
182static inline struct net_dim_cq_moder
183net_dim_get_def_tx_moderation(u8 cq_period_mode)
184{
185 u8 profile_ix = cq_period_mode == NET_DIM_CQ_PERIOD_MODE_START_FROM_CQE ?
186 NET_DIM_DEF_PROFILE_CQE : NET_DIM_DEF_PROFILE_EQE;
187
188 return net_dim_get_tx_moderation(cq_period_mode, profile_ix);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300189}
190
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500191static inline bool net_dim_on_top(struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300192{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500193 switch (dim->tune_state) {
194 case NET_DIM_PARKING_ON_TOP:
195 case NET_DIM_PARKING_TIRED:
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300196 return true;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500197 case NET_DIM_GOING_RIGHT:
198 return (dim->steps_left > 1) && (dim->steps_right == 1);
199 default: /* NET_DIM_GOING_LEFT */
200 return (dim->steps_right > 1) && (dim->steps_left == 1);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300201 }
202}
203
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500204static inline void net_dim_turn(struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300205{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500206 switch (dim->tune_state) {
207 case NET_DIM_PARKING_ON_TOP:
208 case NET_DIM_PARKING_TIRED:
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300209 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500210 case NET_DIM_GOING_RIGHT:
211 dim->tune_state = NET_DIM_GOING_LEFT;
212 dim->steps_left = 0;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300213 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500214 case NET_DIM_GOING_LEFT:
215 dim->tune_state = NET_DIM_GOING_RIGHT;
216 dim->steps_right = 0;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300217 break;
218 }
219}
220
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500221static inline int net_dim_step(struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300222{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500223 if (dim->tired == (NET_DIM_PARAMS_NUM_PROFILES * 2))
224 return NET_DIM_TOO_TIRED;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300225
Andy Gospodarek9a317422018-01-09 16:06:17 -0500226 switch (dim->tune_state) {
227 case NET_DIM_PARKING_ON_TOP:
228 case NET_DIM_PARKING_TIRED:
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300229 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500230 case NET_DIM_GOING_RIGHT:
231 if (dim->profile_ix == (NET_DIM_PARAMS_NUM_PROFILES - 1))
232 return NET_DIM_ON_EDGE;
233 dim->profile_ix++;
234 dim->steps_right++;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300235 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500236 case NET_DIM_GOING_LEFT:
237 if (dim->profile_ix == 0)
238 return NET_DIM_ON_EDGE;
239 dim->profile_ix--;
240 dim->steps_left++;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300241 break;
242 }
243
Andy Gospodarek9a317422018-01-09 16:06:17 -0500244 dim->tired++;
245 return NET_DIM_STEPPED;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300246}
247
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500248static inline void net_dim_park_on_top(struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300249{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500250 dim->steps_right = 0;
251 dim->steps_left = 0;
252 dim->tired = 0;
253 dim->tune_state = NET_DIM_PARKING_ON_TOP;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300254}
255
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500256static inline void net_dim_park_tired(struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300257{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500258 dim->steps_right = 0;
259 dim->steps_left = 0;
260 dim->tune_state = NET_DIM_PARKING_TIRED;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300261}
262
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500263static inline void net_dim_exit_parking(struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300264{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500265 dim->tune_state = dim->profile_ix ? NET_DIM_GOING_LEFT :
266 NET_DIM_GOING_RIGHT;
267 net_dim_step(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300268}
269
Tal Gilboac3164d22017-05-15 14:13:16 +0300270#define IS_SIGNIFICANT_DIFF(val, ref) \
Tal Gilboaf97c3dc2018-03-29 13:53:52 +0300271 (((100UL * abs((val) - (ref))) / (ref)) > 10) /* more than 10% difference */
Tal Gilboac3164d22017-05-15 14:13:16 +0300272
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500273static inline int net_dim_stats_compare(struct net_dim_stats *curr,
274 struct net_dim_stats *prev)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300275{
Tal Gilboac3164d22017-05-15 14:13:16 +0300276 if (!prev->bpms)
Andy Gospodarek9a317422018-01-09 16:06:17 -0500277 return curr->bpms ? NET_DIM_STATS_BETTER :
278 NET_DIM_STATS_SAME;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300279
Tal Gilboac3164d22017-05-15 14:13:16 +0300280 if (IS_SIGNIFICANT_DIFF(curr->bpms, prev->bpms))
Andy Gospodarek9a317422018-01-09 16:06:17 -0500281 return (curr->bpms > prev->bpms) ? NET_DIM_STATS_BETTER :
282 NET_DIM_STATS_WORSE;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300283
Talat Batheesh51656742018-01-17 23:13:16 +0200284 if (!prev->ppms)
285 return curr->ppms ? NET_DIM_STATS_BETTER :
286 NET_DIM_STATS_SAME;
287
Tal Gilboac3164d22017-05-15 14:13:16 +0300288 if (IS_SIGNIFICANT_DIFF(curr->ppms, prev->ppms))
Andy Gospodarek9a317422018-01-09 16:06:17 -0500289 return (curr->ppms > prev->ppms) ? NET_DIM_STATS_BETTER :
290 NET_DIM_STATS_WORSE;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300291
Talat Batheesh51656742018-01-17 23:13:16 +0200292 if (!prev->epms)
293 return NET_DIM_STATS_SAME;
294
Tal Gilboac3164d22017-05-15 14:13:16 +0300295 if (IS_SIGNIFICANT_DIFF(curr->epms, prev->epms))
Andy Gospodarek9a317422018-01-09 16:06:17 -0500296 return (curr->epms < prev->epms) ? NET_DIM_STATS_BETTER :
297 NET_DIM_STATS_WORSE;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300298
Andy Gospodarek9a317422018-01-09 16:06:17 -0500299 return NET_DIM_STATS_SAME;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300300}
301
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500302static inline bool net_dim_decision(struct net_dim_stats *curr_stats,
303 struct net_dim *dim)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300304{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500305 int prev_state = dim->tune_state;
306 int prev_ix = dim->profile_ix;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300307 int stats_res;
308 int step_res;
309
Andy Gospodarek9a317422018-01-09 16:06:17 -0500310 switch (dim->tune_state) {
311 case NET_DIM_PARKING_ON_TOP:
312 stats_res = net_dim_stats_compare(curr_stats, &dim->prev_stats);
313 if (stats_res != NET_DIM_STATS_SAME)
314 net_dim_exit_parking(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300315 break;
316
Andy Gospodarek9a317422018-01-09 16:06:17 -0500317 case NET_DIM_PARKING_TIRED:
318 dim->tired--;
319 if (!dim->tired)
320 net_dim_exit_parking(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300321 break;
322
Andy Gospodarek9a317422018-01-09 16:06:17 -0500323 case NET_DIM_GOING_RIGHT:
324 case NET_DIM_GOING_LEFT:
325 stats_res = net_dim_stats_compare(curr_stats, &dim->prev_stats);
326 if (stats_res != NET_DIM_STATS_BETTER)
327 net_dim_turn(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300328
Andy Gospodarek9a317422018-01-09 16:06:17 -0500329 if (net_dim_on_top(dim)) {
330 net_dim_park_on_top(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300331 break;
332 }
333
Andy Gospodarek9a317422018-01-09 16:06:17 -0500334 step_res = net_dim_step(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300335 switch (step_res) {
Andy Gospodarek9a317422018-01-09 16:06:17 -0500336 case NET_DIM_ON_EDGE:
337 net_dim_park_on_top(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300338 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500339 case NET_DIM_TOO_TIRED:
340 net_dim_park_tired(dim);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300341 break;
342 }
343
344 break;
345 }
346
Andy Gospodarek9a317422018-01-09 16:06:17 -0500347 if ((prev_state != NET_DIM_PARKING_ON_TOP) ||
348 (dim->tune_state != NET_DIM_PARKING_ON_TOP))
349 dim->prev_stats = *curr_stats;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300350
Andy Gospodarek9a317422018-01-09 16:06:17 -0500351 return dim->profile_ix != prev_ix;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300352}
353
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500354static inline void net_dim_sample(u16 event_ctr,
355 u64 packets,
356 u64 bytes,
357 struct net_dim_sample *s)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300358{
359 s->time = ktime_get();
Andy Gospodarek138968e2018-01-09 16:06:14 -0500360 s->pkt_ctr = packets;
361 s->byte_ctr = bytes;
362 s->event_ctr = event_ctr;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300363}
364
Andy Gospodarek9a317422018-01-09 16:06:17 -0500365#define NET_DIM_NEVENTS 64
Tal Gilboa53acd762017-05-29 17:02:55 +0300366#define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
367#define BIT_GAP(bits, end, start) ((((end) - (start)) + BIT_ULL(bits)) & (BIT_ULL(bits) - 1))
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300368
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500369static inline void net_dim_calc_stats(struct net_dim_sample *start,
370 struct net_dim_sample *end,
371 struct net_dim_stats *curr_stats)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300372{
373 /* u32 holds up to 71 minutes, should be enough */
374 u32 delta_us = ktime_us_delta(end->time, start->time);
Tal Gilboa53acd762017-05-29 17:02:55 +0300375 u32 npkts = BIT_GAP(BITS_PER_TYPE(u32), end->pkt_ctr, start->pkt_ctr);
376 u32 nbytes = BIT_GAP(BITS_PER_TYPE(u32), end->byte_ctr,
377 start->byte_ctr);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300378
Gil Rockah0bbcc0a2017-01-10 22:33:38 +0200379 if (!delta_us)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300380 return;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300381
Tal Gilboac3164d22017-05-15 14:13:16 +0300382 curr_stats->ppms = DIV_ROUND_UP(npkts * USEC_PER_MSEC, delta_us);
383 curr_stats->bpms = DIV_ROUND_UP(nbytes * USEC_PER_MSEC, delta_us);
Andy Gospodarek9a317422018-01-09 16:06:17 -0500384 curr_stats->epms = DIV_ROUND_UP(NET_DIM_NEVENTS * USEC_PER_MSEC,
Tal Gilboac3164d22017-05-15 14:13:16 +0300385 delta_us);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300386}
387
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500388static inline void net_dim(struct net_dim *dim,
Andy Gospodarek8115b752018-01-09 16:06:19 -0500389 struct net_dim_sample end_sample)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300390{
Andy Gospodarek9a317422018-01-09 16:06:17 -0500391 struct net_dim_stats curr_stats;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300392 u16 nevents;
393
Andy Gospodarek9a317422018-01-09 16:06:17 -0500394 switch (dim->state) {
395 case NET_DIM_MEASURE_IN_PROGRESS:
Andy Gospodarek8115b752018-01-09 16:06:19 -0500396 nevents = BIT_GAP(BITS_PER_TYPE(u16),
397 end_sample.event_ctr,
Andy Gospodarek9a317422018-01-09 16:06:17 -0500398 dim->start_sample.event_ctr);
399 if (nevents < NET_DIM_NEVENTS)
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300400 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500401 net_dim_calc_stats(&dim->start_sample, &end_sample,
402 &curr_stats);
403 if (net_dim_decision(&curr_stats, dim)) {
404 dim->state = NET_DIM_APPLY_NEW_PROFILE;
405 schedule_work(&dim->work);
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300406 break;
407 }
408 /* fall through */
Andy Gospodarek9a317422018-01-09 16:06:17 -0500409 case NET_DIM_START_MEASURE:
Andy Gospodarek9a317422018-01-09 16:06:17 -0500410 dim->state = NET_DIM_MEASURE_IN_PROGRESS;
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300411 break;
Andy Gospodarek9a317422018-01-09 16:06:17 -0500412 case NET_DIM_APPLY_NEW_PROFILE:
Gil Rockahcb3c7fd2016-06-23 17:02:41 +0300413 break;
414 }
415}
Andy Gospodarek4c4dbb42018-01-09 16:06:18 -0500416
417#endif /* NET_DIM_H */