Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2008 Felix Fietkau <nbd@openwrt.org> |
| 3 | * |
| 4 | * This program is free software; you can redistribute it and/or modify |
| 5 | * it under the terms of the GNU General Public License version 2 as |
| 6 | * published by the Free Software Foundation. |
| 7 | * |
| 8 | * Based on minstrel.c: |
| 9 | * Copyright (C) 2005-2007 Derek Smithies <derek@indranet.co.nz> |
| 10 | * Sponsored by Indranet Technologies Ltd |
| 11 | * |
| 12 | * Based on sample.c: |
| 13 | * Copyright (c) 2005 John Bicket |
| 14 | * All rights reserved. |
| 15 | * |
| 16 | * Redistribution and use in source and binary forms, with or without |
| 17 | * modification, are permitted provided that the following conditions |
| 18 | * are met: |
| 19 | * 1. Redistributions of source code must retain the above copyright |
| 20 | * notice, this list of conditions and the following disclaimer, |
| 21 | * without modification. |
| 22 | * 2. Redistributions in binary form must reproduce at minimum a disclaimer |
| 23 | * similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any |
| 24 | * redistribution must be conditioned upon including a substantially |
| 25 | * similar Disclaimer requirement for further binary redistribution. |
| 26 | * 3. Neither the names of the above-listed copyright holders nor the names |
| 27 | * of any contributors may be used to endorse or promote products derived |
| 28 | * from this software without specific prior written permission. |
| 29 | * |
| 30 | * Alternatively, this software may be distributed under the terms of the |
| 31 | * GNU General Public License ("GPL") version 2 as published by the Free |
| 32 | * Software Foundation. |
| 33 | * |
| 34 | * NO WARRANTY |
| 35 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 36 | * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 37 | * LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY |
| 38 | * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL |
| 39 | * THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY, |
| 40 | * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
| 41 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
| 42 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER |
| 43 | * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
| 44 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF |
| 45 | * THE POSSIBILITY OF SUCH DAMAGES. |
| 46 | */ |
| 47 | #include <linux/netdevice.h> |
| 48 | #include <linux/types.h> |
| 49 | #include <linux/skbuff.h> |
| 50 | #include <linux/debugfs.h> |
| 51 | #include <linux/random.h> |
| 52 | #include <linux/ieee80211.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 53 | #include <linux/slab.h> |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 54 | #include <net/mac80211.h> |
| 55 | #include "rate.h" |
| 56 | #include "rc80211_minstrel.h" |
| 57 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 58 | #define SAMPLE_TBL(_mi, _idx, _col) \ |
| 59 | _mi->sample_table[(_idx * SAMPLE_COLUMNS) + _col] |
| 60 | |
| 61 | /* convert mac80211 rate index to local array index */ |
| 62 | static inline int |
| 63 | rix_to_ndx(struct minstrel_sta_info *mi, int rix) |
| 64 | { |
| 65 | int i = rix; |
| 66 | for (i = rix; i >= 0; i--) |
| 67 | if (mi->r[i].rix == rix) |
| 68 | break; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 69 | return i; |
| 70 | } |
| 71 | |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 72 | /* return current EMWA throughput */ |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 73 | int minstrel_get_tp_avg(struct minstrel_rate *mr, int prob_ewma) |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 74 | { |
| 75 | int usecs; |
| 76 | |
| 77 | usecs = mr->perfect_tx_time; |
| 78 | if (!usecs) |
| 79 | usecs = 1000000; |
| 80 | |
| 81 | /* reset thr. below 10% success */ |
| 82 | if (mr->stats.prob_ewma < MINSTREL_FRAC(10, 100)) |
| 83 | return 0; |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 84 | |
| 85 | if (prob_ewma > MINSTREL_FRAC(90, 100)) |
| 86 | return MINSTREL_TRUNC(100000 * (MINSTREL_FRAC(90, 100) / usecs)); |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 87 | else |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 88 | return MINSTREL_TRUNC(100000 * (prob_ewma / usecs)); |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 89 | } |
| 90 | |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 91 | /* find & sort topmost throughput rates */ |
| 92 | static inline void |
| 93 | minstrel_sort_best_tp_rates(struct minstrel_sta_info *mi, int i, u8 *tp_list) |
| 94 | { |
Adrien Schildknecht | f5eeb5f | 2015-07-28 10:30:16 +0200 | [diff] [blame] | 95 | int j; |
| 96 | struct minstrel_rate_stats *tmp_mrs; |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 97 | struct minstrel_rate_stats *cur_mrs = &mi->r[i].stats; |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 98 | |
Adrien Schildknecht | f5eeb5f | 2015-07-28 10:30:16 +0200 | [diff] [blame] | 99 | for (j = MAX_THR_RATES; j > 0; --j) { |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 100 | tmp_mrs = &mi->r[tp_list[j - 1]].stats; |
Adrien Schildknecht | f5eeb5f | 2015-07-28 10:30:16 +0200 | [diff] [blame] | 101 | if (minstrel_get_tp_avg(&mi->r[i], cur_mrs->prob_ewma) <= |
| 102 | minstrel_get_tp_avg(&mi->r[tp_list[j - 1]], tmp_mrs->prob_ewma)) |
| 103 | break; |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 104 | } |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 105 | |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 106 | if (j < MAX_THR_RATES - 1) |
| 107 | memmove(&tp_list[j + 1], &tp_list[j], MAX_THR_RATES - (j + 1)); |
| 108 | if (j < MAX_THR_RATES) |
| 109 | tp_list[j] = i; |
| 110 | } |
| 111 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 112 | static void |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 113 | minstrel_set_rate(struct minstrel_sta_info *mi, struct ieee80211_sta_rates *ratetbl, |
| 114 | int offset, int idx) |
| 115 | { |
| 116 | struct minstrel_rate *r = &mi->r[idx]; |
| 117 | |
| 118 | ratetbl->rate[offset].idx = r->rix; |
| 119 | ratetbl->rate[offset].count = r->adjusted_retry_count; |
| 120 | ratetbl->rate[offset].count_cts = r->retry_count_cts; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 121 | ratetbl->rate[offset].count_rts = r->stats.retry_count_rtscts; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 122 | } |
| 123 | |
| 124 | static void |
| 125 | minstrel_update_rates(struct minstrel_priv *mp, struct minstrel_sta_info *mi) |
| 126 | { |
| 127 | struct ieee80211_sta_rates *ratetbl; |
| 128 | int i = 0; |
| 129 | |
| 130 | ratetbl = kzalloc(sizeof(*ratetbl), GFP_ATOMIC); |
| 131 | if (!ratetbl) |
| 132 | return; |
| 133 | |
| 134 | /* Start with max_tp_rate */ |
| 135 | minstrel_set_rate(mi, ratetbl, i++, mi->max_tp_rate[0]); |
| 136 | |
| 137 | if (mp->hw->max_rates >= 3) { |
| 138 | /* At least 3 tx rates supported, use max_tp_rate2 next */ |
| 139 | minstrel_set_rate(mi, ratetbl, i++, mi->max_tp_rate[1]); |
| 140 | } |
| 141 | |
| 142 | if (mp->hw->max_rates >= 2) { |
| 143 | /* At least 2 tx rates supported, use max_prob_rate next */ |
| 144 | minstrel_set_rate(mi, ratetbl, i++, mi->max_prob_rate); |
| 145 | } |
| 146 | |
| 147 | /* Use lowest rate last */ |
| 148 | ratetbl->rate[i].idx = mi->lowest_rix; |
| 149 | ratetbl->rate[i].count = mp->max_retry; |
| 150 | ratetbl->rate[i].count_cts = mp->max_retry; |
| 151 | ratetbl->rate[i].count_rts = mp->max_retry; |
| 152 | |
| 153 | rate_control_set_rates(mp->hw, mi->sta, ratetbl); |
| 154 | } |
| 155 | |
Thomas Huehn | f62838b | 2015-03-24 21:09:38 +0100 | [diff] [blame] | 156 | /* |
Thomas Huehn | 5f919ab | 2015-03-24 21:09:43 +0100 | [diff] [blame] | 157 | * Recalculate statistics and counters of a given rate |
Thomas Huehn | f62838b | 2015-03-24 21:09:38 +0100 | [diff] [blame] | 158 | */ |
| 159 | void |
| 160 | minstrel_calc_rate_stats(struct minstrel_rate_stats *mrs) |
| 161 | { |
| 162 | if (unlikely(mrs->attempts > 0)) { |
| 163 | mrs->sample_skipped = 0; |
| 164 | mrs->cur_prob = MINSTREL_FRAC(mrs->success, mrs->attempts); |
Thomas Huehn | 5f919ab | 2015-03-24 21:09:43 +0100 | [diff] [blame] | 165 | if (unlikely(!mrs->att_hist)) { |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 166 | mrs->prob_ewma = mrs->cur_prob; |
Thomas Huehn | 5f919ab | 2015-03-24 21:09:43 +0100 | [diff] [blame] | 167 | } else { |
| 168 | /* update exponential weighted moving variance */ |
| 169 | mrs->prob_ewmsd = minstrel_ewmsd(mrs->prob_ewmsd, |
| 170 | mrs->cur_prob, |
| 171 | mrs->prob_ewma, |
| 172 | EWMA_LEVEL); |
| 173 | |
| 174 | /*update exponential weighted moving avarage */ |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 175 | mrs->prob_ewma = minstrel_ewma(mrs->prob_ewma, |
Thomas Huehn | 5f919ab | 2015-03-24 21:09:43 +0100 | [diff] [blame] | 176 | mrs->cur_prob, |
| 177 | EWMA_LEVEL); |
| 178 | } |
Thomas Huehn | f62838b | 2015-03-24 21:09:38 +0100 | [diff] [blame] | 179 | mrs->att_hist += mrs->attempts; |
| 180 | mrs->succ_hist += mrs->success; |
| 181 | } else { |
| 182 | mrs->sample_skipped++; |
| 183 | } |
| 184 | |
| 185 | mrs->last_success = mrs->success; |
| 186 | mrs->last_attempts = mrs->attempts; |
| 187 | mrs->success = 0; |
| 188 | mrs->attempts = 0; |
| 189 | } |
| 190 | |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 191 | static void |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 192 | minstrel_update_stats(struct minstrel_priv *mp, struct minstrel_sta_info *mi) |
| 193 | { |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 194 | u8 tmp_tp_rate[MAX_THR_RATES]; |
| 195 | u8 tmp_prob_rate = 0; |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 196 | int i, tmp_cur_tp, tmp_prob_tp; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 197 | |
Weilong Chen | f359d3f | 2013-12-18 15:44:16 +0800 | [diff] [blame] | 198 | for (i = 0; i < MAX_THR_RATES; i++) |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 199 | tmp_tp_rate[i] = 0; |
| 200 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 201 | for (i = 0; i < mi->n_rates; i++) { |
| 202 | struct minstrel_rate *mr = &mi->r[i]; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 203 | struct minstrel_rate_stats *mrs = &mi->r[i].stats; |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 204 | struct minstrel_rate_stats *tmp_mrs = &mi->r[tmp_prob_rate].stats; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 205 | |
Thomas Huehn | 5f919ab | 2015-03-24 21:09:43 +0100 | [diff] [blame] | 206 | /* Update statistics of success probability per rate */ |
Thomas Huehn | f62838b | 2015-03-24 21:09:38 +0100 | [diff] [blame] | 207 | minstrel_calc_rate_stats(mrs); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 208 | |
| 209 | /* Sample less often below the 10% chance of success. |
| 210 | * Sample less often above the 95% chance of success. */ |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 211 | if (mrs->prob_ewma > MINSTREL_FRAC(95, 100) || |
| 212 | mrs->prob_ewma < MINSTREL_FRAC(10, 100)) { |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 213 | mr->adjusted_retry_count = mrs->retry_count >> 1; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 214 | if (mr->adjusted_retry_count > 2) |
| 215 | mr->adjusted_retry_count = 2; |
Felix Fietkau | f4a8cd9 | 2008-10-15 19:13:59 +0200 | [diff] [blame] | 216 | mr->sample_limit = 4; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 217 | } else { |
Felix Fietkau | f4a8cd9 | 2008-10-15 19:13:59 +0200 | [diff] [blame] | 218 | mr->sample_limit = -1; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 219 | mr->adjusted_retry_count = mrs->retry_count; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 220 | } |
| 221 | if (!mr->adjusted_retry_count) |
| 222 | mr->adjusted_retry_count = 2; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 223 | |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 224 | minstrel_sort_best_tp_rates(mi, i, tmp_tp_rate); |
| 225 | |
| 226 | /* To determine the most robust rate (max_prob_rate) used at |
| 227 | * 3rd mmr stage we distinct between two cases: |
| 228 | * (1) if any success probabilitiy >= 95%, out of those rates |
| 229 | * choose the maximum throughput rate as max_prob_rate |
| 230 | * (2) if all success probabilities < 95%, the rate with |
Stephen Hemminger | d070f91 | 2014-10-29 22:55:58 -0700 | [diff] [blame] | 231 | * highest success probability is chosen as max_prob_rate */ |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 232 | if (mrs->prob_ewma >= MINSTREL_FRAC(95, 100)) { |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 233 | tmp_cur_tp = minstrel_get_tp_avg(mr, mrs->prob_ewma); |
| 234 | tmp_prob_tp = minstrel_get_tp_avg(&mi->r[tmp_prob_rate], |
| 235 | tmp_mrs->prob_ewma); |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 236 | if (tmp_cur_tp >= tmp_prob_tp) |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 237 | tmp_prob_rate = i; |
| 238 | } else { |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 239 | if (mrs->prob_ewma >= tmp_mrs->prob_ewma) |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 240 | tmp_prob_rate = i; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 241 | } |
| 242 | } |
| 243 | |
Thomas Huehn | 2ff2b69 | 2013-03-04 23:30:07 +0100 | [diff] [blame] | 244 | /* Assign the new rate set */ |
| 245 | memcpy(mi->max_tp_rate, tmp_tp_rate, sizeof(mi->max_tp_rate)); |
| 246 | mi->max_prob_rate = tmp_prob_rate; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 247 | |
Lorenzo Bianconi | 45966ae | 2013-08-27 16:59:46 +0200 | [diff] [blame] | 248 | #ifdef CONFIG_MAC80211_DEBUGFS |
| 249 | /* use fixed index if set */ |
| 250 | if (mp->fixed_rate_idx != -1) { |
| 251 | mi->max_tp_rate[0] = mp->fixed_rate_idx; |
| 252 | mi->max_tp_rate[1] = mp->fixed_rate_idx; |
| 253 | mi->max_prob_rate = mp->fixed_rate_idx; |
| 254 | } |
| 255 | #endif |
| 256 | |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 257 | /* Reset update timer */ |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 258 | mi->last_stats_update = jiffies; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 259 | |
| 260 | minstrel_update_rates(mp, mi); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 261 | } |
| 262 | |
| 263 | static void |
| 264 | minstrel_tx_status(void *priv, struct ieee80211_supported_band *sband, |
Weilong Chen | f359d3f | 2013-12-18 15:44:16 +0800 | [diff] [blame] | 265 | struct ieee80211_sta *sta, void *priv_sta, |
Felix Fietkau | fb7acfb | 2014-11-19 20:08:09 +0100 | [diff] [blame] | 266 | struct ieee80211_tx_info *info) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 267 | { |
Johannes Berg | 8acbcdd | 2012-11-15 18:27:56 +0100 | [diff] [blame] | 268 | struct minstrel_priv *mp = priv; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 269 | struct minstrel_sta_info *mi = priv_sta; |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 270 | struct ieee80211_tx_rate *ar = info->status.rates; |
| 271 | int i, ndx; |
| 272 | int success; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 273 | |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 274 | success = !!(info->flags & IEEE80211_TX_STAT_ACK); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 275 | |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 276 | for (i = 0; i < IEEE80211_TX_MAX_RATES; i++) { |
| 277 | if (ar[i].idx < 0) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 278 | break; |
| 279 | |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 280 | ndx = rix_to_ndx(mi, ar[i].idx); |
Luciano Coelho | 3938b45 | 2009-07-03 08:25:08 +0300 | [diff] [blame] | 281 | if (ndx < 0) |
| 282 | continue; |
| 283 | |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 284 | mi->r[ndx].stats.attempts += ar[i].count; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 285 | |
Javier Cardona | bfc32e6 | 2009-08-17 17:15:55 -0700 | [diff] [blame] | 286 | if ((i != IEEE80211_TX_MAX_RATES - 1) && (ar[i + 1].idx < 0)) |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 287 | mi->r[ndx].stats.success += success; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 288 | } |
| 289 | |
| 290 | if ((info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE) && (i >= 0)) |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 291 | mi->sample_packets++; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 292 | |
| 293 | if (mi->sample_deferred > 0) |
| 294 | mi->sample_deferred--; |
Johannes Berg | 8acbcdd | 2012-11-15 18:27:56 +0100 | [diff] [blame] | 295 | |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 296 | if (time_after(jiffies, mi->last_stats_update + |
Johannes Berg | 8acbcdd | 2012-11-15 18:27:56 +0100 | [diff] [blame] | 297 | (mp->update_interval * HZ) / 1000)) |
| 298 | minstrel_update_stats(mp, mi); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 299 | } |
| 300 | |
| 301 | |
| 302 | static inline unsigned int |
| 303 | minstrel_get_retry_count(struct minstrel_rate *mr, |
Weilong Chen | f359d3f | 2013-12-18 15:44:16 +0800 | [diff] [blame] | 304 | struct ieee80211_tx_info *info) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 305 | { |
Felix Fietkau | 8d819a9 | 2014-12-17 13:38:34 +0100 | [diff] [blame] | 306 | u8 retry = mr->adjusted_retry_count; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 307 | |
Felix Fietkau | 991fec0 | 2013-04-16 13:38:43 +0200 | [diff] [blame] | 308 | if (info->control.use_rts) |
Felix Fietkau | 8d819a9 | 2014-12-17 13:38:34 +0100 | [diff] [blame] | 309 | retry = max_t(u8, 2, min(mr->stats.retry_count_rtscts, retry)); |
Felix Fietkau | 991fec0 | 2013-04-16 13:38:43 +0200 | [diff] [blame] | 310 | else if (info->control.use_cts_prot) |
Felix Fietkau | 8d819a9 | 2014-12-17 13:38:34 +0100 | [diff] [blame] | 311 | retry = max_t(u8, 2, min(mr->retry_count_cts, retry)); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 312 | return retry; |
| 313 | } |
| 314 | |
| 315 | |
| 316 | static int |
| 317 | minstrel_get_next_sample(struct minstrel_sta_info *mi) |
| 318 | { |
| 319 | unsigned int sample_ndx; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 320 | sample_ndx = SAMPLE_TBL(mi, mi->sample_row, mi->sample_column); |
| 321 | mi->sample_row++; |
Thomas Huehn | f744bf8 | 2013-03-04 23:30:05 +0100 | [diff] [blame] | 322 | if ((int) mi->sample_row >= mi->n_rates) { |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 323 | mi->sample_row = 0; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 324 | mi->sample_column++; |
| 325 | if (mi->sample_column >= SAMPLE_COLUMNS) |
| 326 | mi->sample_column = 0; |
| 327 | } |
| 328 | return sample_ndx; |
| 329 | } |
| 330 | |
Johannes Berg | 2df7816 | 2008-10-28 16:49:41 +0100 | [diff] [blame] | 331 | static void |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 332 | minstrel_get_rate(void *priv, struct ieee80211_sta *sta, |
| 333 | void *priv_sta, struct ieee80211_tx_rate_control *txrc) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 334 | { |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 335 | struct sk_buff *skb = txrc->skb; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 336 | struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); |
| 337 | struct minstrel_sta_info *mi = priv_sta; |
| 338 | struct minstrel_priv *mp = priv; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 339 | struct ieee80211_tx_rate *rate = &info->control.rates[0]; |
| 340 | struct minstrel_rate *msr, *mr; |
| 341 | unsigned int ndx; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 342 | bool mrr_capable; |
Felix Fietkau | 5c9fc93 | 2013-07-15 14:35:06 +0200 | [diff] [blame] | 343 | bool prev_sample; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 344 | int delta; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 345 | int sampling_ratio; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 346 | |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 347 | /* management/no-ack frames do not use rate control */ |
Luis R. Rodriguez | 4c6d4f5 | 2009-07-16 10:05:41 -0700 | [diff] [blame] | 348 | if (rate_control_send_low(sta, priv_sta, txrc)) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 349 | return; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 350 | |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 351 | /* check multi-rate-retry capabilities & adjust lookaround_rate */ |
| 352 | mrr_capable = mp->has_mrr && |
| 353 | !txrc->rts && |
| 354 | !txrc->bss_conf->use_cts_prot; |
| 355 | if (mrr_capable) |
| 356 | sampling_ratio = mp->lookaround_rate_mrr; |
| 357 | else |
| 358 | sampling_ratio = mp->lookaround_rate; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 359 | |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 360 | /* increase sum packet counter */ |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 361 | mi->total_packets++; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 362 | |
Lorenzo Bianconi | 45966ae | 2013-08-27 16:59:46 +0200 | [diff] [blame] | 363 | #ifdef CONFIG_MAC80211_DEBUGFS |
| 364 | if (mp->fixed_rate_idx != -1) |
| 365 | return; |
| 366 | #endif |
| 367 | |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 368 | delta = (mi->total_packets * sampling_ratio / 100) - |
| 369 | (mi->sample_packets + mi->sample_deferred / 2); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 370 | |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 371 | /* delta < 0: no sampling required */ |
Felix Fietkau | 5c9fc93 | 2013-07-15 14:35:06 +0200 | [diff] [blame] | 372 | prev_sample = mi->prev_sample; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 373 | mi->prev_sample = false; |
| 374 | if (delta < 0 || (!mrr_capable && prev_sample)) |
| 375 | return; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 376 | |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 377 | if (mi->total_packets >= 10000) { |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 378 | mi->sample_deferred = 0; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 379 | mi->sample_packets = 0; |
| 380 | mi->total_packets = 0; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 381 | } else if (delta > mi->n_rates * 2) { |
| 382 | /* With multi-rate retry, not every planned sample |
| 383 | * attempt actually gets used, due to the way the retry |
| 384 | * chain is set up - [max_tp,sample,prob,lowest] for |
| 385 | * sample_rate < max_tp. |
| 386 | * |
| 387 | * If there's too much sampling backlog and the link |
| 388 | * starts getting worse, minstrel would start bursting |
| 389 | * out lots of sampling frames, which would result |
| 390 | * in a large throughput loss. */ |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 391 | mi->sample_packets += (delta - mi->n_rates * 2); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 392 | } |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 393 | |
| 394 | /* get next random rate sample */ |
| 395 | ndx = minstrel_get_next_sample(mi); |
| 396 | msr = &mi->r[ndx]; |
| 397 | mr = &mi->r[mi->max_tp_rate[0]]; |
| 398 | |
| 399 | /* Decide if direct ( 1st mrr stage) or indirect (2nd mrr stage) |
| 400 | * rate sampling method should be used. |
| 401 | * Respect such rates that are not sampled for 20 interations. |
| 402 | */ |
| 403 | if (mrr_capable && |
| 404 | msr->perfect_tx_time > mr->perfect_tx_time && |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 405 | msr->stats.sample_skipped < 20) { |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 406 | /* Only use IEEE80211_TX_CTL_RATE_CTRL_PROBE to mark |
| 407 | * packets that have the sampling rate deferred to the |
| 408 | * second MRR stage. Increase the sample counter only |
| 409 | * if the deferred sample rate was actually used. |
| 410 | * Use the sample_deferred counter to make sure that |
| 411 | * the sampling is not done in large bursts */ |
| 412 | info->flags |= IEEE80211_TX_CTL_RATE_CTRL_PROBE; |
| 413 | rate++; |
| 414 | mi->sample_deferred++; |
| 415 | } else { |
Jiri Slaby | 17dce15 | 2015-02-19 12:57:40 +0100 | [diff] [blame] | 416 | if (!msr->sample_limit) |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 417 | return; |
| 418 | |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 419 | mi->sample_packets++; |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 420 | if (msr->sample_limit > 0) |
| 421 | msr->sample_limit--; |
| 422 | } |
Felix Fietkau | f4a8cd9 | 2008-10-15 19:13:59 +0200 | [diff] [blame] | 423 | |
| 424 | /* If we're not using MRR and the sampling rate already |
| 425 | * has a probability of >95%, we shouldn't be attempting |
| 426 | * to use it, as this only wastes precious airtime */ |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 427 | if (!mrr_capable && |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 428 | (mi->r[ndx].stats.prob_ewma > MINSTREL_FRAC(95, 100))) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 429 | return; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 430 | |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 431 | mi->prev_sample = true; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 432 | |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 433 | rate->idx = mi->r[ndx].rix; |
| 434 | rate->count = minstrel_get_retry_count(&mi->r[ndx], info); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 435 | } |
| 436 | |
| 437 | |
| 438 | static void |
Michal Kazior | 4ee73f3 | 2012-04-11 08:47:56 +0200 | [diff] [blame] | 439 | calc_rate_durations(enum ieee80211_band band, |
| 440 | struct minstrel_rate *d, |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 441 | struct ieee80211_rate *rate, |
| 442 | struct cfg80211_chan_def *chandef) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 443 | { |
| 444 | int erp = !!(rate->flags & IEEE80211_RATE_ERP_G); |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 445 | int shift = ieee80211_chandef_get_shift(chandef); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 446 | |
Michal Kazior | 4ee73f3 | 2012-04-11 08:47:56 +0200 | [diff] [blame] | 447 | d->perfect_tx_time = ieee80211_frame_duration(band, 1200, |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 448 | DIV_ROUND_UP(rate->bitrate, 1 << shift), erp, 1, |
| 449 | shift); |
Michal Kazior | 4ee73f3 | 2012-04-11 08:47:56 +0200 | [diff] [blame] | 450 | d->ack_time = ieee80211_frame_duration(band, 10, |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 451 | DIV_ROUND_UP(rate->bitrate, 1 << shift), erp, 1, |
| 452 | shift); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 453 | } |
| 454 | |
| 455 | static void |
| 456 | init_sample_table(struct minstrel_sta_info *mi) |
| 457 | { |
| 458 | unsigned int i, col, new_idx; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 459 | u8 rnd[8]; |
| 460 | |
| 461 | mi->sample_column = 0; |
Thomas Huehn | 8f15761 | 2013-03-04 23:30:03 +0100 | [diff] [blame] | 462 | mi->sample_row = 0; |
Thomas Huehn | f744bf8 | 2013-03-04 23:30:05 +0100 | [diff] [blame] | 463 | memset(mi->sample_table, 0xff, SAMPLE_COLUMNS * mi->n_rates); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 464 | |
| 465 | for (col = 0; col < SAMPLE_COLUMNS; col++) { |
Karl Beldan | f7d8ad8 | 2013-11-13 10:54:19 +0100 | [diff] [blame] | 466 | prandom_bytes(rnd, sizeof(rnd)); |
Thomas Huehn | f744bf8 | 2013-03-04 23:30:05 +0100 | [diff] [blame] | 467 | for (i = 0; i < mi->n_rates; i++) { |
Thomas Huehn | f744bf8 | 2013-03-04 23:30:05 +0100 | [diff] [blame] | 468 | new_idx = (i + rnd[i & 7]) % mi->n_rates; |
Thomas Huehn | f744bf8 | 2013-03-04 23:30:05 +0100 | [diff] [blame] | 469 | while (SAMPLE_TBL(mi, new_idx, col) != 0xff) |
| 470 | new_idx = (new_idx + 1) % mi->n_rates; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 471 | |
Thomas Huehn | f744bf8 | 2013-03-04 23:30:05 +0100 | [diff] [blame] | 472 | SAMPLE_TBL(mi, new_idx, col) = i; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 473 | } |
| 474 | } |
| 475 | } |
| 476 | |
| 477 | static void |
| 478 | minstrel_rate_init(void *priv, struct ieee80211_supported_band *sband, |
Simon Wunderlich | 3de805c | 2013-07-08 16:55:50 +0200 | [diff] [blame] | 479 | struct cfg80211_chan_def *chandef, |
| 480 | struct ieee80211_sta *sta, void *priv_sta) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 481 | { |
| 482 | struct minstrel_sta_info *mi = priv_sta; |
| 483 | struct minstrel_priv *mp = priv; |
Christian Lamparter | d57854b | 2008-12-22 15:35:31 +0100 | [diff] [blame] | 484 | struct ieee80211_rate *ctl_rate; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 485 | unsigned int i, n = 0; |
| 486 | unsigned int t_slot = 9; /* FIXME: get real slot time */ |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 487 | u32 rate_flags; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 488 | |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 489 | mi->sta = sta; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 490 | mi->lowest_rix = rate_lowest_index(sband, sta); |
Christian Lamparter | d57854b | 2008-12-22 15:35:31 +0100 | [diff] [blame] | 491 | ctl_rate = &sband->bitrates[mi->lowest_rix]; |
Michal Kazior | 4ee73f3 | 2012-04-11 08:47:56 +0200 | [diff] [blame] | 492 | mi->sp_ack_dur = ieee80211_frame_duration(sband->band, 10, |
| 493 | ctl_rate->bitrate, |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 494 | !!(ctl_rate->flags & IEEE80211_RATE_ERP_G), 1, |
| 495 | ieee80211_chandef_get_shift(chandef)); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 496 | |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 497 | rate_flags = ieee80211_chandef_rate_flags(&mp->hw->conf.chandef); |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 498 | memset(mi->max_tp_rate, 0, sizeof(mi->max_tp_rate)); |
| 499 | mi->max_prob_rate = 0; |
| 500 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 501 | for (i = 0; i < sband->n_bitrates; i++) { |
| 502 | struct minstrel_rate *mr = &mi->r[n]; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 503 | struct minstrel_rate_stats *mrs = &mi->r[n].stats; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 504 | unsigned int tx_time = 0, tx_time_cts = 0, tx_time_rtscts = 0; |
| 505 | unsigned int tx_time_single; |
| 506 | unsigned int cw = mp->cw_min; |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 507 | int shift; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 508 | |
| 509 | if (!rate_supported(sta, sband->band, i)) |
| 510 | continue; |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 511 | if ((rate_flags & sband->bitrates[i].flags) != rate_flags) |
| 512 | continue; |
| 513 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 514 | n++; |
| 515 | memset(mr, 0, sizeof(*mr)); |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 516 | memset(mrs, 0, sizeof(*mrs)); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 517 | |
| 518 | mr->rix = i; |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 519 | shift = ieee80211_chandef_get_shift(chandef); |
| 520 | mr->bitrate = DIV_ROUND_UP(sband->bitrates[i].bitrate, |
| 521 | (1 << shift) * 5); |
| 522 | calc_rate_durations(sband->band, mr, &sband->bitrates[i], |
| 523 | chandef); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 524 | |
| 525 | /* calculate maximum number of retransmissions before |
| 526 | * fallback (based on maximum segment size) */ |
Felix Fietkau | f4a8cd9 | 2008-10-15 19:13:59 +0200 | [diff] [blame] | 527 | mr->sample_limit = -1; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 528 | mrs->retry_count = 1; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 529 | mr->retry_count_cts = 1; |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 530 | mrs->retry_count_rtscts = 1; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 531 | tx_time = mr->perfect_tx_time + mi->sp_ack_dur; |
| 532 | do { |
| 533 | /* add one retransmission */ |
| 534 | tx_time_single = mr->ack_time + mr->perfect_tx_time; |
| 535 | |
| 536 | /* contention window */ |
Daniel Halperin | 8fddddf | 2011-05-10 19:00:45 -0700 | [diff] [blame] | 537 | tx_time_single += (t_slot * cw) >> 1; |
| 538 | cw = min((cw << 1) | 1, mp->cw_max); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 539 | |
| 540 | tx_time += tx_time_single; |
| 541 | tx_time_cts += tx_time_single + mi->sp_ack_dur; |
| 542 | tx_time_rtscts += tx_time_single + 2 * mi->sp_ack_dur; |
| 543 | if ((tx_time_cts < mp->segment_size) && |
| 544 | (mr->retry_count_cts < mp->max_retry)) |
| 545 | mr->retry_count_cts++; |
| 546 | if ((tx_time_rtscts < mp->segment_size) && |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 547 | (mrs->retry_count_rtscts < mp->max_retry)) |
| 548 | mrs->retry_count_rtscts++; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 549 | } while ((tx_time < mp->segment_size) && |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 550 | (++mr->stats.retry_count < mp->max_retry)); |
| 551 | mr->adjusted_retry_count = mrs->retry_count; |
Felix Fietkau | 991fec0 | 2013-04-16 13:38:43 +0200 | [diff] [blame] | 552 | if (!(sband->bitrates[i].flags & IEEE80211_RATE_ERP_G)) |
Thomas Huehn | ca12c0c | 2014-09-09 23:22:13 +0200 | [diff] [blame] | 553 | mr->retry_count_cts = mrs->retry_count; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 554 | } |
| 555 | |
| 556 | for (i = n; i < sband->n_bitrates; i++) { |
| 557 | struct minstrel_rate *mr = &mi->r[i]; |
| 558 | mr->rix = -1; |
| 559 | } |
| 560 | |
| 561 | mi->n_rates = n; |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 562 | mi->last_stats_update = jiffies; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 563 | |
| 564 | init_sample_table(mi); |
Felix Fietkau | 06d961a | 2013-04-22 16:14:43 +0200 | [diff] [blame] | 565 | minstrel_update_rates(mp, mi); |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 566 | } |
| 567 | |
| 568 | static void * |
| 569 | minstrel_alloc_sta(void *priv, struct ieee80211_sta *sta, gfp_t gfp) |
| 570 | { |
| 571 | struct ieee80211_supported_band *sband; |
| 572 | struct minstrel_sta_info *mi; |
| 573 | struct minstrel_priv *mp = priv; |
| 574 | struct ieee80211_hw *hw = mp->hw; |
| 575 | int max_rates = 0; |
| 576 | int i; |
| 577 | |
| 578 | mi = kzalloc(sizeof(struct minstrel_sta_info), gfp); |
| 579 | if (!mi) |
| 580 | return NULL; |
| 581 | |
| 582 | for (i = 0; i < IEEE80211_NUM_BANDS; i++) { |
Jiri Slaby | 8e53217 | 2009-05-04 18:04:55 +0200 | [diff] [blame] | 583 | sband = hw->wiphy->bands[i]; |
John W. Linville | 621ad7c | 2009-05-05 15:18:26 -0400 | [diff] [blame] | 584 | if (sband && sband->n_bitrates > max_rates) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 585 | max_rates = sband->n_bitrates; |
| 586 | } |
| 587 | |
| 588 | mi->r = kzalloc(sizeof(struct minstrel_rate) * max_rates, gfp); |
| 589 | if (!mi->r) |
| 590 | goto error; |
| 591 | |
| 592 | mi->sample_table = kmalloc(SAMPLE_COLUMNS * max_rates, gfp); |
| 593 | if (!mi->sample_table) |
| 594 | goto error1; |
| 595 | |
Thomas Huehn | 9134073 | 2015-03-24 21:09:39 +0100 | [diff] [blame] | 596 | mi->last_stats_update = jiffies; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 597 | return mi; |
| 598 | |
| 599 | error1: |
| 600 | kfree(mi->r); |
| 601 | error: |
| 602 | kfree(mi); |
| 603 | return NULL; |
| 604 | } |
| 605 | |
| 606 | static void |
| 607 | minstrel_free_sta(void *priv, struct ieee80211_sta *sta, void *priv_sta) |
| 608 | { |
| 609 | struct minstrel_sta_info *mi = priv_sta; |
| 610 | |
| 611 | kfree(mi->sample_table); |
| 612 | kfree(mi->r); |
| 613 | kfree(mi); |
| 614 | } |
| 615 | |
Felix Fietkau | a0497f9 | 2013-02-13 10:51:08 +0100 | [diff] [blame] | 616 | static void |
| 617 | minstrel_init_cck_rates(struct minstrel_priv *mp) |
| 618 | { |
| 619 | static const int bitrates[4] = { 10, 20, 55, 110 }; |
| 620 | struct ieee80211_supported_band *sband; |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 621 | u32 rate_flags = ieee80211_chandef_rate_flags(&mp->hw->conf.chandef); |
Felix Fietkau | a0497f9 | 2013-02-13 10:51:08 +0100 | [diff] [blame] | 622 | int i, j; |
| 623 | |
| 624 | sband = mp->hw->wiphy->bands[IEEE80211_BAND_2GHZ]; |
| 625 | if (!sband) |
| 626 | return; |
| 627 | |
| 628 | for (i = 0, j = 0; i < sband->n_bitrates; i++) { |
| 629 | struct ieee80211_rate *rate = &sband->bitrates[i]; |
| 630 | |
| 631 | if (rate->flags & IEEE80211_RATE_ERP_G) |
| 632 | continue; |
| 633 | |
Simon Wunderlich | 438b61b | 2013-07-08 16:55:51 +0200 | [diff] [blame] | 634 | if ((rate_flags & sband->bitrates[i].flags) != rate_flags) |
| 635 | continue; |
| 636 | |
Felix Fietkau | a0497f9 | 2013-02-13 10:51:08 +0100 | [diff] [blame] | 637 | for (j = 0; j < ARRAY_SIZE(bitrates); j++) { |
| 638 | if (rate->bitrate != bitrates[j]) |
| 639 | continue; |
| 640 | |
| 641 | mp->cck_rates[j] = i; |
| 642 | break; |
| 643 | } |
| 644 | } |
| 645 | } |
| 646 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 647 | static void * |
| 648 | minstrel_alloc(struct ieee80211_hw *hw, struct dentry *debugfsdir) |
| 649 | { |
| 650 | struct minstrel_priv *mp; |
| 651 | |
| 652 | mp = kzalloc(sizeof(struct minstrel_priv), GFP_ATOMIC); |
| 653 | if (!mp) |
| 654 | return NULL; |
| 655 | |
| 656 | /* contention window settings |
| 657 | * Just an approximation. Using the per-queue values would complicate |
| 658 | * the calculations and is probably unnecessary */ |
| 659 | mp->cw_min = 15; |
| 660 | mp->cw_max = 1023; |
| 661 | |
| 662 | /* number of packets (in %) to use for sampling other rates |
| 663 | * sample less often for non-mrr packets, because the overhead |
| 664 | * is much higher than with mrr */ |
| 665 | mp->lookaround_rate = 5; |
| 666 | mp->lookaround_rate_mrr = 10; |
| 667 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 668 | /* maximum time that the hw is allowed to stay in one MRR segment */ |
| 669 | mp->segment_size = 6000; |
| 670 | |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 671 | if (hw->max_rate_tries > 0) |
| 672 | mp->max_retry = hw->max_rate_tries; |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 673 | else |
| 674 | /* safe default, does not necessarily have to match hw properties */ |
| 675 | mp->max_retry = 7; |
| 676 | |
Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 677 | if (hw->max_rates >= 4) |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 678 | mp->has_mrr = true; |
| 679 | |
| 680 | mp->hw = hw; |
| 681 | mp->update_interval = 100; |
| 682 | |
Zefir Kurtisi | 24f7580 | 2011-05-20 20:29:17 +0200 | [diff] [blame] | 683 | #ifdef CONFIG_MAC80211_DEBUGFS |
| 684 | mp->fixed_rate_idx = (u32) -1; |
| 685 | mp->dbg_fixed_rate = debugfs_create_u32("fixed_rate_idx", |
| 686 | S_IRUGO | S_IWUGO, debugfsdir, &mp->fixed_rate_idx); |
| 687 | #endif |
| 688 | |
Felix Fietkau | a0497f9 | 2013-02-13 10:51:08 +0100 | [diff] [blame] | 689 | minstrel_init_cck_rates(mp); |
| 690 | |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 691 | return mp; |
| 692 | } |
| 693 | |
| 694 | static void |
| 695 | minstrel_free(void *priv) |
| 696 | { |
Zefir Kurtisi | 24f7580 | 2011-05-20 20:29:17 +0200 | [diff] [blame] | 697 | #ifdef CONFIG_MAC80211_DEBUGFS |
| 698 | debugfs_remove(((struct minstrel_priv *)priv)->dbg_fixed_rate); |
| 699 | #endif |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 700 | kfree(priv); |
| 701 | } |
| 702 | |
Antonio Quartulli | cca674d | 2014-05-19 21:53:20 +0200 | [diff] [blame] | 703 | static u32 minstrel_get_expected_throughput(void *priv_sta) |
| 704 | { |
| 705 | struct minstrel_sta_info *mi = priv_sta; |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 706 | struct minstrel_rate_stats *tmp_mrs; |
Antonio Quartulli | cca674d | 2014-05-19 21:53:20 +0200 | [diff] [blame] | 707 | int idx = mi->max_tp_rate[0]; |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 708 | int tmp_cur_tp; |
Antonio Quartulli | cca674d | 2014-05-19 21:53:20 +0200 | [diff] [blame] | 709 | |
| 710 | /* convert pkt per sec in kbps (1200 is the average pkt size used for |
| 711 | * computing cur_tp |
| 712 | */ |
Thomas Huehn | 50e55a8 | 2015-03-24 21:09:41 +0100 | [diff] [blame] | 713 | tmp_mrs = &mi->r[idx].stats; |
Sven Eckelmann | 212c5a5 | 2016-02-02 08:12:26 +0100 | [diff] [blame] | 714 | tmp_cur_tp = minstrel_get_tp_avg(&mi->r[idx], tmp_mrs->prob_ewma) * 10; |
Thomas Huehn | 6a27b2c | 2015-03-24 21:09:40 +0100 | [diff] [blame] | 715 | tmp_cur_tp = tmp_cur_tp * 1200 * 8 / 1024; |
| 716 | |
| 717 | return tmp_cur_tp; |
Antonio Quartulli | cca674d | 2014-05-19 21:53:20 +0200 | [diff] [blame] | 718 | } |
| 719 | |
Johannes Berg | 631ad70 | 2014-01-20 23:29:34 +0100 | [diff] [blame] | 720 | const struct rate_control_ops mac80211_minstrel = { |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 721 | .name = "minstrel", |
Felix Fietkau | fb7acfb | 2014-11-19 20:08:09 +0100 | [diff] [blame] | 722 | .tx_status_noskb = minstrel_tx_status, |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 723 | .get_rate = minstrel_get_rate, |
| 724 | .rate_init = minstrel_rate_init, |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 725 | .alloc = minstrel_alloc, |
| 726 | .free = minstrel_free, |
| 727 | .alloc_sta = minstrel_alloc_sta, |
| 728 | .free_sta = minstrel_free_sta, |
| 729 | #ifdef CONFIG_MAC80211_DEBUGFS |
| 730 | .add_sta_debugfs = minstrel_add_sta_debugfs, |
| 731 | .remove_sta_debugfs = minstrel_remove_sta_debugfs, |
| 732 | #endif |
Antonio Quartulli | cca674d | 2014-05-19 21:53:20 +0200 | [diff] [blame] | 733 | .get_expected_throughput = minstrel_get_expected_throughput, |
Felix Fietkau | cccf129 | 2008-10-05 18:07:45 +0200 | [diff] [blame] | 734 | }; |
| 735 | |
| 736 | int __init |
| 737 | rc80211_minstrel_init(void) |
| 738 | { |
| 739 | return ieee80211_rate_control_register(&mac80211_minstrel); |
| 740 | } |
| 741 | |
| 742 | void |
| 743 | rc80211_minstrel_exit(void) |
| 744 | { |
| 745 | ieee80211_rate_control_unregister(&mac80211_minstrel); |
| 746 | } |
| 747 | |