blob: 4a7691eecdb4636b3a0b939a34201abef3ecc135 [file] [log] [blame]
Felix Fietkaufbbcd142014-06-11 16:17:49 +05301/*
2 * Copyright (c) 2014 Qualcomm Atheros, Inc.
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17#include "ath9k.h"
18
19/* Set/change channels. If the channel is really being changed, it's done
20 * by reseting the chip. To accomplish this we must first cleanup any pending
21 * DMA, then restart stuff.
22 */
23static int ath_set_channel(struct ath_softc *sc)
24{
25 struct ath_hw *ah = sc->sc_ah;
26 struct ath_common *common = ath9k_hw_common(ah);
27 struct ieee80211_hw *hw = sc->hw;
28 struct ath9k_channel *hchan;
29 struct cfg80211_chan_def *chandef = &sc->cur_chan->chandef;
30 struct ieee80211_channel *chan = chandef->chan;
31 int pos = chan->hw_value;
32 int old_pos = -1;
33 int r;
34
35 if (test_bit(ATH_OP_INVALID, &common->op_flags))
36 return -EIO;
37
38 if (ah->curchan)
39 old_pos = ah->curchan - &ah->channels[0];
40
41 ath_dbg(common, CONFIG, "Set channel: %d MHz width: %d\n",
42 chan->center_freq, chandef->width);
43
44 /* update survey stats for the old channel before switching */
45 spin_lock_bh(&common->cc_lock);
46 ath_update_survey_stats(sc);
47 spin_unlock_bh(&common->cc_lock);
48
49 ath9k_cmn_get_channel(hw, ah, chandef);
50
51 /* If the operating channel changes, change the survey in-use flags
52 * along with it.
53 * Reset the survey data for the new channel, unless we're switching
54 * back to the operating channel from an off-channel operation.
55 */
56 if (!sc->cur_chan->offchannel && sc->cur_survey != &sc->survey[pos]) {
57 if (sc->cur_survey)
58 sc->cur_survey->filled &= ~SURVEY_INFO_IN_USE;
59
60 sc->cur_survey = &sc->survey[pos];
61
62 memset(sc->cur_survey, 0, sizeof(struct survey_info));
63 sc->cur_survey->filled |= SURVEY_INFO_IN_USE;
64 } else if (!(sc->survey[pos].filled & SURVEY_INFO_IN_USE)) {
65 memset(&sc->survey[pos], 0, sizeof(struct survey_info));
66 }
67
68 hchan = &sc->sc_ah->channels[pos];
69 r = ath_reset_internal(sc, hchan);
70 if (r)
71 return r;
72
73 /* The most recent snapshot of channel->noisefloor for the old
74 * channel is only available after the hardware reset. Copy it to
75 * the survey stats now.
76 */
77 if (old_pos >= 0)
78 ath_update_survey_nf(sc, old_pos);
79
80 /* Enable radar pulse detection if on a DFS channel. Spectral
81 * scanning and radar detection can not be used concurrently.
82 */
83 if (hw->conf.radar_enabled) {
84 u32 rxfilter;
85
86 /* set HW specific DFS configuration */
87 ath9k_hw_set_radar_params(ah);
88 rxfilter = ath9k_hw_getrxfilter(ah);
89 rxfilter |= ATH9K_RX_FILTER_PHYRADAR |
90 ATH9K_RX_FILTER_PHYERR;
91 ath9k_hw_setrxfilter(ah, rxfilter);
92 ath_dbg(common, DFS, "DFS enabled at freq %d\n",
93 chan->center_freq);
94 } else {
95 /* perform spectral scan if requested. */
96 if (test_bit(ATH_OP_SCANNING, &common->op_flags) &&
97 sc->spectral_mode == SPECTRAL_CHANSCAN)
98 ath9k_spectral_scan_trigger(hw);
99 }
100
101 return 0;
102}
103
Felix Fietkauc083ce92014-06-11 16:17:54 +0530104static bool
105ath_chanctx_send_vif_ps_frame(struct ath_softc *sc, struct ath_vif *avp,
106 bool powersave)
107{
108 struct ieee80211_vif *vif = avp->vif;
109 struct ieee80211_sta *sta = NULL;
110 struct ieee80211_hdr_3addr *nullfunc;
111 struct ath_tx_control txctl;
112 struct sk_buff *skb;
113 int band = sc->cur_chan->chandef.chan->band;
114
115 switch (vif->type) {
116 case NL80211_IFTYPE_STATION:
117 if (!vif->bss_conf.assoc)
118 return false;
119
120 skb = ieee80211_nullfunc_get(sc->hw, vif);
121 if (!skb)
122 return false;
123
124 nullfunc = (struct ieee80211_hdr_3addr *) skb->data;
125 if (powersave)
126 nullfunc->frame_control |=
127 cpu_to_le16(IEEE80211_FCTL_PM);
128
129 skb_set_queue_mapping(skb, IEEE80211_AC_VO);
130 if (!ieee80211_tx_prepare_skb(sc->hw, vif, skb, band, &sta)) {
131 dev_kfree_skb_any(skb);
132 return false;
133 }
134 break;
135 default:
136 return false;
137 }
138
139 memset(&txctl, 0, sizeof(txctl));
140 txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO];
141 txctl.sta = sta;
142 txctl.force_channel = true;
143 if (ath_tx_start(sc->hw, skb, &txctl)) {
144 ieee80211_free_txskb(sc->hw, skb);
145 return false;
146 }
147
148 return true;
149}
150
151void ath_chanctx_check_active(struct ath_softc *sc, struct ath_chanctx *ctx)
152{
Felix Fietkau26f16c22014-06-11 16:18:00 +0530153 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
Felix Fietkauc083ce92014-06-11 16:17:54 +0530154 struct ath_vif *avp;
155 bool active = false;
Felix Fietkau26f16c22014-06-11 16:18:00 +0530156 u8 n_active = 0;
Felix Fietkauc083ce92014-06-11 16:17:54 +0530157
158 if (!ctx)
159 return;
160
161 list_for_each_entry(avp, &ctx->vifs, list) {
162 struct ieee80211_vif *vif = avp->vif;
163
164 switch (vif->type) {
165 case NL80211_IFTYPE_P2P_CLIENT:
166 case NL80211_IFTYPE_STATION:
167 if (vif->bss_conf.assoc)
168 active = true;
169 break;
170 default:
171 active = true;
172 break;
173 }
174 }
175 ctx->active = active;
Felix Fietkau26f16c22014-06-11 16:18:00 +0530176
177 ath_for_each_chanctx(sc, ctx) {
178 if (!ctx->assigned || list_empty(&ctx->vifs))
179 continue;
180 n_active++;
181 }
182
183 if (n_active > 1)
184 set_bit(ATH_OP_MULTI_CHANNEL, &common->op_flags);
185 else
186 clear_bit(ATH_OP_MULTI_CHANNEL, &common->op_flags);
Felix Fietkauc083ce92014-06-11 16:17:54 +0530187}
188
189static bool
190ath_chanctx_send_ps_frame(struct ath_softc *sc, bool powersave)
191{
192 struct ath_vif *avp;
193 bool sent = false;
194
195 rcu_read_lock();
196 list_for_each_entry(avp, &sc->cur_chan->vifs, list) {
197 if (ath_chanctx_send_vif_ps_frame(sc, avp, powersave))
198 sent = true;
199 }
200 rcu_read_unlock();
201
202 return sent;
203}
204
Felix Fietkaubff11762014-06-11 16:17:52 +0530205void ath_chanctx_work(struct work_struct *work)
206{
207 struct ath_softc *sc = container_of(work, struct ath_softc,
208 chanctx_work);
Felix Fietkauc083ce92014-06-11 16:17:54 +0530209 bool send_ps = false;
Felix Fietkaubff11762014-06-11 16:17:52 +0530210
211 mutex_lock(&sc->mutex);
212 spin_lock_bh(&sc->chan_lock);
213 if (!sc->next_chan) {
214 spin_unlock_bh(&sc->chan_lock);
215 mutex_unlock(&sc->mutex);
216 return;
217 }
218
219 if (sc->cur_chan != sc->next_chan) {
220 sc->cur_chan->stopped = true;
221 spin_unlock_bh(&sc->chan_lock);
222
223 __ath9k_flush(sc->hw, ~0, true);
224
Felix Fietkauc083ce92014-06-11 16:17:54 +0530225 if (ath_chanctx_send_ps_frame(sc, true))
226 __ath9k_flush(sc->hw, BIT(IEEE80211_AC_VO), false);
227
228 send_ps = true;
Felix Fietkaubff11762014-06-11 16:17:52 +0530229 spin_lock_bh(&sc->chan_lock);
Felix Fietkau8d7e09d2014-06-11 16:18:01 +0530230
231 if (sc->cur_chan != &sc->offchannel.chan) {
232 getrawmonotonic(&sc->cur_chan->tsf_ts);
233 sc->cur_chan->tsf_val = ath9k_hw_gettsf64(sc->sc_ah);
234 }
Felix Fietkaubff11762014-06-11 16:17:52 +0530235 }
236 sc->cur_chan = sc->next_chan;
237 sc->cur_chan->stopped = false;
238 sc->next_chan = NULL;
239 spin_unlock_bh(&sc->chan_lock);
240
241 if (sc->sc_ah->chip_fullsleep ||
242 memcmp(&sc->cur_chandef, &sc->cur_chan->chandef,
243 sizeof(sc->cur_chandef)))
244 ath_set_channel(sc);
Felix Fietkauc083ce92014-06-11 16:17:54 +0530245
246 if (send_ps)
247 ath_chanctx_send_ps_frame(sc, false);
248
Felix Fietkau78b21942014-06-11 16:17:55 +0530249 ath_offchannel_channel_change(sc);
Felix Fietkaubff11762014-06-11 16:17:52 +0530250 mutex_unlock(&sc->mutex);
251}
252
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530253void ath_chanctx_init(struct ath_softc *sc)
254{
255 struct ath_chanctx *ctx;
256 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
257 struct ieee80211_supported_band *sband;
258 struct ieee80211_channel *chan;
Felix Fietkau04535312014-06-11 16:17:51 +0530259 int i, j;
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530260
261 sband = &common->sbands[IEEE80211_BAND_2GHZ];
262 if (!sband->n_channels)
263 sband = &common->sbands[IEEE80211_BAND_5GHZ];
264
265 chan = &sband->channels[0];
266 for (i = 0; i < ATH9K_NUM_CHANCTX; i++) {
267 ctx = &sc->chanctx[i];
268 cfg80211_chandef_create(&ctx->chandef, chan, NL80211_CHAN_HT20);
269 INIT_LIST_HEAD(&ctx->vifs);
Felix Fietkaubc7e1be2014-06-11 16:17:50 +0530270 ctx->txpower = ATH_TXPOWER_MAX;
Felix Fietkau04535312014-06-11 16:17:51 +0530271 for (j = 0; j < ARRAY_SIZE(ctx->acq); j++)
272 INIT_LIST_HEAD(&ctx->acq[j]);
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530273 }
274 sc->cur_chan = &sc->chanctx[0];
Felix Fietkau78b21942014-06-11 16:17:55 +0530275 ctx = &sc->offchannel.chan;
276 cfg80211_chandef_create(&ctx->chandef, chan, NL80211_CHAN_HT20);
277 INIT_LIST_HEAD(&ctx->vifs);
278 ctx->txpower = ATH_TXPOWER_MAX;
279 for (j = 0; j < ARRAY_SIZE(ctx->acq); j++)
280 INIT_LIST_HEAD(&ctx->acq[j]);
281 sc->offchannel.chan.offchannel = true;
282
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530283}
284
Felix Fietkaubff11762014-06-11 16:17:52 +0530285void ath_chanctx_switch(struct ath_softc *sc, struct ath_chanctx *ctx,
286 struct cfg80211_chan_def *chandef)
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530287{
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530288
Felix Fietkaubff11762014-06-11 16:17:52 +0530289 spin_lock_bh(&sc->chan_lock);
290 sc->next_chan = ctx;
291 if (chandef)
292 ctx->chandef = *chandef;
293 spin_unlock_bh(&sc->chan_lock);
294 ieee80211_queue_work(sc->hw, &sc->chanctx_work);
295}
296
297void ath_chanctx_set_channel(struct ath_softc *sc, struct ath_chanctx *ctx,
298 struct cfg80211_chan_def *chandef)
299{
300 bool cur_chan;
301
302 spin_lock_bh(&sc->chan_lock);
303 if (chandef)
304 memcpy(&ctx->chandef, chandef, sizeof(*chandef));
305 cur_chan = sc->cur_chan == ctx;
306 spin_unlock_bh(&sc->chan_lock);
307
308 if (!cur_chan)
309 return;
310
311 ath_set_channel(sc);
Felix Fietkaufbbcd142014-06-11 16:17:49 +0530312}
Felix Fietkau78b21942014-06-11 16:17:55 +0530313
Rajkumar Manoharanc4dc0d02014-06-11 16:17:58 +0530314struct ath_chanctx *ath_chanctx_get_oper_chan(struct ath_softc *sc, bool active)
Felix Fietkau78b21942014-06-11 16:17:55 +0530315{
Rajkumar Manoharanc4dc0d02014-06-11 16:17:58 +0530316 struct ath_chanctx *ctx;
Felix Fietkau78b21942014-06-11 16:17:55 +0530317
Rajkumar Manoharanc4dc0d02014-06-11 16:17:58 +0530318 ath_for_each_chanctx(sc, ctx) {
319 if (!ctx->assigned || list_empty(&ctx->vifs))
320 continue;
321 if (active && !ctx->active)
322 continue;
323
324 return ctx;
Felix Fietkau78b21942014-06-11 16:17:55 +0530325 }
326
327 return &sc->chanctx[0];
328}
329
330void ath_chanctx_offchan_switch(struct ath_softc *sc,
331 struct ieee80211_channel *chan)
332{
333 struct cfg80211_chan_def chandef;
334
335 cfg80211_chandef_create(&chandef, chan, NL80211_CHAN_NO_HT);
336
337 ath_chanctx_switch(sc, &sc->offchannel.chan, &chandef);
338}