blob: 3c996ffe63c399b0b6cefb404c90c68bb2e6c6a2 [file] [log] [blame]
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +05301/*
2 * Copyright (c) 2012 Qualcomm Atheros, Inc.
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17#include "ath9k.h"
18
19/*
20 * TX polling - checks if the TX engine is stuck somewhere
21 * and issues a chip reset if so.
22 */
23void ath_tx_complete_poll_work(struct work_struct *work)
24{
25 struct ath_softc *sc = container_of(work, struct ath_softc,
26 tx_complete_work.work);
27 struct ath_txq *txq;
28 int i;
29 bool needreset = false;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +053030
31 for (i = 0; i < ATH9K_NUM_TX_QUEUES; i++)
32 if (ATH_TXQ_SETUP(sc, i)) {
33 txq = &sc->tx.txq[i];
34 ath_txq_lock(sc, txq);
35 if (txq->axq_depth) {
36 if (txq->axq_tx_inprogress) {
37 needreset = true;
38 ath_txq_unlock(sc, txq);
39 break;
40 } else {
41 txq->axq_tx_inprogress = true;
42 }
43 }
44 ath_txq_unlock_complete(sc, txq);
45 }
46
47 if (needreset) {
48 ath_dbg(ath9k_hw_common(sc->sc_ah), RESET,
49 "tx hung, resetting the chip\n");
Rajkumar Manoharan124b9792012-07-17 17:16:42 +053050 ath9k_queue_reset(sc, RESET_TYPE_TX_HANG);
Sujith Manoharanaf68aba2012-06-04 20:23:43 +053051 return;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +053052 }
53
54 ieee80211_queue_delayed_work(sc->hw, &sc->tx_complete_work,
55 msecs_to_jiffies(ATH_TX_COMPLETE_POLL_INT));
56}
57
58/*
59 * Checks if the BB/MAC is hung.
60 */
61void ath_hw_check(struct work_struct *work)
62{
63 struct ath_softc *sc = container_of(work, struct ath_softc, hw_check_work);
64 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
65 unsigned long flags;
66 int busy;
67 u8 is_alive, nbeacon = 1;
Rajkumar Manoharan124b9792012-07-17 17:16:42 +053068 enum ath_reset_type type;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +053069
70 ath9k_ps_wakeup(sc);
71 is_alive = ath9k_hw_check_alive(sc->sc_ah);
72
73 if (is_alive && !AR_SREV_9300(sc->sc_ah))
74 goto out;
75 else if (!is_alive && AR_SREV_9300(sc->sc_ah)) {
76 ath_dbg(common, RESET,
77 "DCU stuck is detected. Schedule chip reset\n");
Rajkumar Manoharan124b9792012-07-17 17:16:42 +053078 type = RESET_TYPE_MAC_HANG;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +053079 goto sched_reset;
80 }
81
82 spin_lock_irqsave(&common->cc_lock, flags);
83 busy = ath_update_survey_stats(sc);
84 spin_unlock_irqrestore(&common->cc_lock, flags);
85
86 ath_dbg(common, RESET, "Possible baseband hang, busy=%d (try %d)\n",
87 busy, sc->hw_busy_count + 1);
88 if (busy >= 99) {
89 if (++sc->hw_busy_count >= 3) {
Rajkumar Manoharan124b9792012-07-17 17:16:42 +053090 type = RESET_TYPE_BB_HANG;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +053091 goto sched_reset;
92 }
93 } else if (busy >= 0) {
94 sc->hw_busy_count = 0;
95 nbeacon = 3;
96 }
97
98 ath_start_rx_poll(sc, nbeacon);
99 goto out;
100
101sched_reset:
Rajkumar Manoharan124b9792012-07-17 17:16:42 +0530102 ath9k_queue_reset(sc, type);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530103out:
104 ath9k_ps_restore(sc);
105}
106
107/*
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530108 * PLL-WAR for AR9485/AR9340
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530109 */
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530110static bool ath_hw_pll_rx_hang_check(struct ath_softc *sc, u32 pll_sqsum)
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530111{
112 static int count;
113 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
114
115 if (pll_sqsum >= 0x40000) {
116 count++;
117 if (count == 3) {
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530118 ath_dbg(common, RESET, "PLL WAR, resetting the chip\n");
Rajkumar Manoharan124b9792012-07-17 17:16:42 +0530119 ath9k_queue_reset(sc, RESET_TYPE_PLL_HANG);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530120 count = 0;
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530121 return true;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530122 }
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530123 } else {
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530124 count = 0;
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530125 }
126
127 return false;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530128}
129
130void ath_hw_pll_work(struct work_struct *work)
131{
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530132 u32 pll_sqsum;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530133 struct ath_softc *sc = container_of(work, struct ath_softc,
134 hw_pll_work.work);
Mohammed Shafi Shajakhan64bc1232012-06-12 20:13:43 +0530135 /*
136 * ensure that the PLL WAR is executed only
137 * after the STA is associated (or) if the
138 * beaconing had started in interfaces that
139 * uses beacons.
140 */
141 if (!test_bit(SC_OP_BEACONS, &sc->sc_flags))
142 return;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530143
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530144 ath9k_ps_wakeup(sc);
145 pll_sqsum = ar9003_get_pll_sqsum_dvc(sc->sc_ah);
146 ath9k_ps_restore(sc);
147 if (ath_hw_pll_rx_hang_check(sc, pll_sqsum))
148 return;
149
150 ieee80211_queue_delayed_work(sc->hw, &sc->hw_pll_work,
151 msecs_to_jiffies(ATH_PLL_WORK_INTERVAL));
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530152}
153
154/*
155 * RX Polling - monitors baseband hangs.
156 */
157void ath_start_rx_poll(struct ath_softc *sc, u8 nbeacon)
158{
159 if (!AR_SREV_9300(sc->sc_ah))
160 return;
161
Sujith Manoharan781b14a2012-06-04 20:23:55 +0530162 if (!test_bit(SC_OP_PRIM_STA_VIF, &sc->sc_flags))
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530163 return;
164
165 mod_timer(&sc->rx_poll_timer, jiffies + msecs_to_jiffies
166 (nbeacon * sc->cur_beacon_conf.beacon_interval));
167}
168
169void ath_rx_poll(unsigned long data)
170{
171 struct ath_softc *sc = (struct ath_softc *)data;
172
173 ieee80211_queue_work(sc->hw, &sc->hw_check_work);
174}
175
176/*
177 * PA Pre-distortion.
178 */
179static void ath_paprd_activate(struct ath_softc *sc)
180{
181 struct ath_hw *ah = sc->sc_ah;
Sujith Manoharan914d0f42012-12-10 07:22:33 +0530182 struct ath_common *common = ath9k_hw_common(ah);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530183 struct ath9k_hw_cal_data *caldata = ah->caldata;
184 int chain;
185
Sujith Manoharan914d0f42012-12-10 07:22:33 +0530186 if (!caldata || !caldata->paprd_done) {
187 ath_dbg(common, CALIBRATE, "Failed to activate PAPRD\n");
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530188 return;
Sujith Manoharan914d0f42012-12-10 07:22:33 +0530189 }
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530190
191 ath9k_ps_wakeup(sc);
192 ar9003_paprd_enable(ah, false);
193 for (chain = 0; chain < AR9300_MAX_CHAINS; chain++) {
194 if (!(ah->txchainmask & BIT(chain)))
195 continue;
196
197 ar9003_paprd_populate_single_table(ah, caldata, chain);
198 }
199
Sujith Manoharan914d0f42012-12-10 07:22:33 +0530200 ath_dbg(common, CALIBRATE, "Activating PAPRD\n");
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530201 ar9003_paprd_enable(ah, true);
202 ath9k_ps_restore(sc);
203}
204
205static bool ath_paprd_send_frame(struct ath_softc *sc, struct sk_buff *skb, int chain)
206{
207 struct ieee80211_hw *hw = sc->hw;
208 struct ieee80211_tx_info *tx_info = IEEE80211_SKB_CB(skb);
209 struct ath_hw *ah = sc->sc_ah;
210 struct ath_common *common = ath9k_hw_common(ah);
211 struct ath_tx_control txctl;
212 int time_left;
213
214 memset(&txctl, 0, sizeof(txctl));
Sujith Manoharanbea843c2012-11-21 18:13:10 +0530215 txctl.txq = sc->tx.txq_map[IEEE80211_AC_BE];
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530216
217 memset(tx_info, 0, sizeof(*tx_info));
218 tx_info->band = hw->conf.channel->band;
219 tx_info->flags |= IEEE80211_TX_CTL_NO_ACK;
220 tx_info->control.rates[0].idx = 0;
221 tx_info->control.rates[0].count = 1;
222 tx_info->control.rates[0].flags = IEEE80211_TX_RC_MCS;
223 tx_info->control.rates[1].idx = -1;
224
225 init_completion(&sc->paprd_complete);
226 txctl.paprd = BIT(chain);
227
228 if (ath_tx_start(hw, skb, &txctl) != 0) {
229 ath_dbg(common, CALIBRATE, "PAPRD TX failed\n");
230 dev_kfree_skb_any(skb);
231 return false;
232 }
233
234 time_left = wait_for_completion_timeout(&sc->paprd_complete,
235 msecs_to_jiffies(ATH_PAPRD_TIMEOUT));
236
237 if (!time_left)
238 ath_dbg(common, CALIBRATE,
239 "Timeout waiting for paprd training on TX chain %d\n",
240 chain);
241
242 return !!time_left;
243}
244
245void ath_paprd_calibrate(struct work_struct *work)
246{
247 struct ath_softc *sc = container_of(work, struct ath_softc, paprd_work);
248 struct ieee80211_hw *hw = sc->hw;
249 struct ath_hw *ah = sc->sc_ah;
250 struct ieee80211_hdr *hdr;
251 struct sk_buff *skb = NULL;
252 struct ath9k_hw_cal_data *caldata = ah->caldata;
253 struct ath_common *common = ath9k_hw_common(ah);
254 int ftype;
255 int chain_ok = 0;
256 int chain;
257 int len = 1800;
Felix Fietkau381c7262012-08-27 17:00:05 +0200258 int ret;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530259
Sujith Manoharan914d0f42012-12-10 07:22:33 +0530260 if (!caldata || !caldata->paprd_packet_sent || caldata->paprd_done) {
261 ath_dbg(common, CALIBRATE, "Skipping PAPRD calibration\n");
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530262 return;
Sujith Manoharan914d0f42012-12-10 07:22:33 +0530263 }
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530264
265 ath9k_ps_wakeup(sc);
266
267 if (ar9003_paprd_init_table(ah) < 0)
268 goto fail_paprd;
269
270 skb = alloc_skb(len, GFP_KERNEL);
271 if (!skb)
272 goto fail_paprd;
273
274 skb_put(skb, len);
275 memset(skb->data, 0, len);
276 hdr = (struct ieee80211_hdr *)skb->data;
277 ftype = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_NULLFUNC;
278 hdr->frame_control = cpu_to_le16(ftype);
279 hdr->duration_id = cpu_to_le16(10);
280 memcpy(hdr->addr1, hw->wiphy->perm_addr, ETH_ALEN);
281 memcpy(hdr->addr2, hw->wiphy->perm_addr, ETH_ALEN);
282 memcpy(hdr->addr3, hw->wiphy->perm_addr, ETH_ALEN);
283
284 for (chain = 0; chain < AR9300_MAX_CHAINS; chain++) {
285 if (!(ah->txchainmask & BIT(chain)))
286 continue;
287
288 chain_ok = 0;
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530289 ar9003_paprd_setup_gain_table(ah, chain);
290
291 ath_dbg(common, CALIBRATE,
292 "Sending PAPRD training frame on chain %d\n", chain);
293 if (!ath_paprd_send_frame(sc, skb, chain))
294 goto fail_paprd;
295
296 if (!ar9003_paprd_is_done(ah)) {
297 ath_dbg(common, CALIBRATE,
298 "PAPRD not yet done on chain %d\n", chain);
299 break;
300 }
301
Felix Fietkau381c7262012-08-27 17:00:05 +0200302 ret = ar9003_paprd_create_curve(ah, caldata, chain);
303 if (ret == -EINPROGRESS) {
304 ath_dbg(common, CALIBRATE,
305 "PAPRD curve on chain %d needs to be re-trained\n",
306 chain);
307 break;
308 } else if (ret) {
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530309 ath_dbg(common, CALIBRATE,
310 "PAPRD create curve failed on chain %d\n",
Sujith Manoharanaf68aba2012-06-04 20:23:43 +0530311 chain);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530312 break;
313 }
314
315 chain_ok = 1;
316 }
317 kfree_skb(skb);
318
319 if (chain_ok) {
320 caldata->paprd_done = true;
321 ath_paprd_activate(sc);
322 }
323
324fail_paprd:
325 ath9k_ps_restore(sc);
326}
327
328/*
329 * ANI performs periodic noise floor calibration
330 * that is used to adjust and optimize the chip performance. This
331 * takes environmental changes (location, temperature) into account.
332 * When the task is complete, it reschedules itself depending on the
333 * appropriate interval that was calculated.
334 */
335void ath_ani_calibrate(unsigned long data)
336{
337 struct ath_softc *sc = (struct ath_softc *)data;
338 struct ath_hw *ah = sc->sc_ah;
339 struct ath_common *common = ath9k_hw_common(ah);
340 bool longcal = false;
341 bool shortcal = false;
342 bool aniflag = false;
343 unsigned int timestamp = jiffies_to_msecs(jiffies);
344 u32 cal_interval, short_cal_interval, long_cal_interval;
345 unsigned long flags;
346
347 if (ah->caldata && ah->caldata->nfcal_interference)
348 long_cal_interval = ATH_LONG_CALINTERVAL_INT;
349 else
350 long_cal_interval = ATH_LONG_CALINTERVAL;
351
352 short_cal_interval = (ah->opmode == NL80211_IFTYPE_AP) ?
353 ATH_AP_SHORT_CALINTERVAL : ATH_STA_SHORT_CALINTERVAL;
354
355 /* Only calibrate if awake */
Rajkumar Manoharan424749c2012-10-10 23:03:02 +0530356 if (sc->sc_ah->power_mode != ATH9K_PM_AWAKE) {
357 if (++ah->ani_skip_count >= ATH_ANI_MAX_SKIP_COUNT) {
358 spin_lock_irqsave(&sc->sc_pm_lock, flags);
359 sc->ps_flags |= PS_WAIT_FOR_ANI;
360 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
361 }
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530362 goto set_timer;
Rajkumar Manoharan424749c2012-10-10 23:03:02 +0530363 }
364 ah->ani_skip_count = 0;
365 spin_lock_irqsave(&sc->sc_pm_lock, flags);
366 sc->ps_flags &= ~PS_WAIT_FOR_ANI;
367 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530368
369 ath9k_ps_wakeup(sc);
370
371 /* Long calibration runs independently of short calibration. */
372 if ((timestamp - common->ani.longcal_timer) >= long_cal_interval) {
373 longcal = true;
374 common->ani.longcal_timer = timestamp;
375 }
376
377 /* Short calibration applies only while caldone is false */
378 if (!common->ani.caldone) {
379 if ((timestamp - common->ani.shortcal_timer) >= short_cal_interval) {
380 shortcal = true;
381 common->ani.shortcal_timer = timestamp;
382 common->ani.resetcal_timer = timestamp;
383 }
384 } else {
385 if ((timestamp - common->ani.resetcal_timer) >=
386 ATH_RESTART_CALINTERVAL) {
387 common->ani.caldone = ath9k_hw_reset_calvalid(ah);
388 if (common->ani.caldone)
389 common->ani.resetcal_timer = timestamp;
390 }
391 }
392
393 /* Verify whether we must check ANI */
394 if (sc->sc_ah->config.enable_ani
395 && (timestamp - common->ani.checkani_timer) >=
396 ah->config.ani_poll_interval) {
397 aniflag = true;
398 common->ani.checkani_timer = timestamp;
399 }
400
401 /* Call ANI routine if necessary */
402 if (aniflag) {
403 spin_lock_irqsave(&common->cc_lock, flags);
404 ath9k_hw_ani_monitor(ah, ah->curchan);
405 ath_update_survey_stats(sc);
406 spin_unlock_irqrestore(&common->cc_lock, flags);
407 }
408
409 /* Perform calibration if necessary */
410 if (longcal || shortcal) {
411 common->ani.caldone =
412 ath9k_hw_calibrate(ah, ah->curchan,
413 ah->rxchainmask, longcal);
414 }
415
416 ath_dbg(common, ANI,
417 "Calibration @%lu finished: %s %s %s, caldone: %s\n",
418 jiffies,
419 longcal ? "long" : "", shortcal ? "short" : "",
420 aniflag ? "ani" : "", common->ani.caldone ? "true" : "false");
421
Rajkumar Manoharan5039f382012-06-19 14:50:28 +0530422 ath9k_debug_samp_bb_mac(sc);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530423 ath9k_ps_restore(sc);
424
425set_timer:
426 /*
427 * Set timer interval based on previous results.
428 * The interval must be the shortest necessary to satisfy ANI,
429 * short calibration and long calibration.
430 */
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530431 cal_interval = ATH_LONG_CALINTERVAL;
432 if (sc->sc_ah->config.enable_ani)
433 cal_interval = min(cal_interval,
434 (u32)ah->config.ani_poll_interval);
435 if (!common->ani.caldone)
436 cal_interval = min(cal_interval, (u32)short_cal_interval);
437
438 mod_timer(&common->ani.timer, jiffies + msecs_to_jiffies(cal_interval));
Felix Fietkau74673db2012-09-08 15:24:17 +0200439 if (ah->eep_ops->get_eeprom(ah, EEP_PAPRD) && ah->caldata) {
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530440 if (!ah->caldata->paprd_done)
441 ieee80211_queue_work(sc->hw, &sc->paprd_work);
442 else if (!ah->paprd_table_write_done)
443 ath_paprd_activate(sc);
444 }
445}
446
Sujith Manoharanda0d45f2012-07-17 17:16:29 +0530447void ath_start_ani(struct ath_softc *sc)
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530448{
Sujith Manoharanda0d45f2012-07-17 17:16:29 +0530449 struct ath_hw *ah = sc->sc_ah;
450 struct ath_common *common = ath9k_hw_common(ah);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530451 unsigned long timestamp = jiffies_to_msecs(jiffies);
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530452
Sujith Manoharanda0d45f2012-07-17 17:16:29 +0530453 if (common->disable_ani ||
454 !test_bit(SC_OP_ANI_RUN, &sc->sc_flags) ||
455 (sc->hw->conf.flags & IEEE80211_CONF_OFFCHANNEL))
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530456 return;
457
458 common->ani.longcal_timer = timestamp;
459 common->ani.shortcal_timer = timestamp;
460 common->ani.checkani_timer = timestamp;
461
Sujith Manoharanda0d45f2012-07-17 17:16:29 +0530462 ath_dbg(common, ANI, "Starting ANI\n");
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530463 mod_timer(&common->ani.timer,
464 jiffies + msecs_to_jiffies((u32)ah->config.ani_poll_interval));
465}
466
Sujith Manoharanda0d45f2012-07-17 17:16:29 +0530467void ath_stop_ani(struct ath_softc *sc)
468{
469 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
470
471 ath_dbg(common, ANI, "Stopping ANI\n");
472 del_timer_sync(&common->ani.timer);
473}
474
475void ath_check_ani(struct ath_softc *sc)
476{
477 struct ath_hw *ah = sc->sc_ah;
478 struct ath_beacon_config *cur_conf = &sc->cur_beacon_conf;
479
480 /*
481 * Check for the various conditions in which ANI has to
482 * be stopped.
483 */
484 if (ah->opmode == NL80211_IFTYPE_ADHOC) {
485 if (!cur_conf->enable_beacon)
486 goto stop_ani;
487 } else if (ah->opmode == NL80211_IFTYPE_AP) {
488 if (!cur_conf->enable_beacon) {
489 /*
490 * Disable ANI only when there are no
491 * associated stations.
492 */
493 if (!test_bit(SC_OP_PRIM_STA_VIF, &sc->sc_flags))
494 goto stop_ani;
495 }
496 } else if (ah->opmode == NL80211_IFTYPE_STATION) {
497 if (!test_bit(SC_OP_PRIM_STA_VIF, &sc->sc_flags))
498 goto stop_ani;
499 }
500
501 if (!test_bit(SC_OP_ANI_RUN, &sc->sc_flags)) {
502 set_bit(SC_OP_ANI_RUN, &sc->sc_flags);
503 ath_start_ani(sc);
504 }
505
506 return;
507
508stop_ani:
509 clear_bit(SC_OP_ANI_RUN, &sc->sc_flags);
510 ath_stop_ani(sc);
511}
512
Sujith Manoharanef1b6cd2012-06-04 20:23:37 +0530513void ath_update_survey_nf(struct ath_softc *sc, int channel)
514{
515 struct ath_hw *ah = sc->sc_ah;
516 struct ath9k_channel *chan = &ah->channels[channel];
517 struct survey_info *survey = &sc->survey[channel];
518
519 if (chan->noisefloor) {
520 survey->filled |= SURVEY_INFO_NOISE_DBM;
521 survey->noise = ath9k_hw_getchan_noise(ah, chan);
522 }
523}
524
525/*
526 * Updates the survey statistics and returns the busy time since last
527 * update in %, if the measurement duration was long enough for the
528 * result to be useful, -1 otherwise.
529 */
530int ath_update_survey_stats(struct ath_softc *sc)
531{
532 struct ath_hw *ah = sc->sc_ah;
533 struct ath_common *common = ath9k_hw_common(ah);
534 int pos = ah->curchan - &ah->channels[0];
535 struct survey_info *survey = &sc->survey[pos];
536 struct ath_cycle_counters *cc = &common->cc_survey;
537 unsigned int div = common->clockrate * 1000;
538 int ret = 0;
539
540 if (!ah->curchan)
541 return -1;
542
543 if (ah->power_mode == ATH9K_PM_AWAKE)
544 ath_hw_cycle_counters_update(common);
545
546 if (cc->cycles > 0) {
547 survey->filled |= SURVEY_INFO_CHANNEL_TIME |
548 SURVEY_INFO_CHANNEL_TIME_BUSY |
549 SURVEY_INFO_CHANNEL_TIME_RX |
550 SURVEY_INFO_CHANNEL_TIME_TX;
551 survey->channel_time += cc->cycles / div;
552 survey->channel_time_busy += cc->rx_busy / div;
553 survey->channel_time_rx += cc->rx_frame / div;
554 survey->channel_time_tx += cc->tx_frame / div;
555 }
556
557 if (cc->cycles < div)
558 return -1;
559
560 if (cc->cycles > 0)
561 ret = cc->rx_busy * 100 / cc->cycles;
562
563 memset(cc, 0, sizeof(*cc));
564
565 ath_update_survey_nf(sc, pos);
566
567 return ret;
568}