Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2005-2011 Atheros Communications Inc. |
| 3 | * Copyright (c) 2011-2013 Qualcomm Atheros, Inc. |
| 4 | * |
| 5 | * Permission to use, copy, modify, and/or distribute this software for any |
| 6 | * purpose with or without fee is hereby granted, provided that the above |
| 7 | * copyright notice and this permission notice appear in all copies. |
| 8 | * |
| 9 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
| 10 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
| 11 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR |
| 12 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
| 13 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
| 14 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
| 15 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
| 16 | */ |
| 17 | |
| 18 | #include "core.h" |
| 19 | #include "txrx.h" |
| 20 | #include "htt.h" |
| 21 | #include "mac.h" |
| 22 | #include "debug.h" |
| 23 | |
| 24 | static void ath10k_report_offchan_tx(struct ath10k *ar, struct sk_buff *skb) |
| 25 | { |
Michal Kazior | e0813d3 | 2015-11-18 06:59:18 +0100 | [diff] [blame] | 26 | struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); |
| 27 | |
| 28 | if (likely(!(info->flags & IEEE80211_TX_CTL_TX_OFFCHAN))) |
| 29 | return; |
| 30 | |
| 31 | if (ath10k_mac_tx_frm_has_freq(ar)) |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 32 | return; |
| 33 | |
| 34 | /* If the original wait_for_completion() timed out before |
| 35 | * {data,mgmt}_tx_completed() was called then we could complete |
| 36 | * offchan_tx_completed for a different skb. Prevent this by using |
| 37 | * offchan_tx_skb. */ |
| 38 | spin_lock_bh(&ar->data_lock); |
| 39 | if (ar->offchan_tx_skb != skb) { |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 40 | ath10k_warn(ar, "completed old offchannel frame\n"); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 41 | goto out; |
| 42 | } |
| 43 | |
| 44 | complete(&ar->offchan_tx_completed); |
| 45 | ar->offchan_tx_skb = NULL; /* just for sanity */ |
| 46 | |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 47 | ath10k_dbg(ar, ATH10K_DBG_HTT, "completed offchannel skb %p\n", skb); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 48 | out: |
| 49 | spin_unlock_bh(&ar->data_lock); |
| 50 | } |
| 51 | |
Rajkumar Manoharan | cac0855 | 2016-03-09 20:25:46 +0530 | [diff] [blame] | 52 | int ath10k_txrx_tx_unref(struct ath10k_htt *htt, |
| 53 | const struct htt_tx_done *tx_done) |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 54 | { |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 55 | struct ath10k *ar = htt->ar; |
| 56 | struct device *dev = ar->dev; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 57 | struct ieee80211_tx_info *info; |
Michal Kazior | 3cc0fef | 2016-03-06 16:14:41 +0200 | [diff] [blame] | 58 | struct ieee80211_txq *txq; |
Michal Kazior | 1f8bb15 | 2013-09-18 14:43:22 +0200 | [diff] [blame] | 59 | struct ath10k_skb_cb *skb_cb; |
Michal Kazior | 3cc0fef | 2016-03-06 16:14:41 +0200 | [diff] [blame] | 60 | struct ath10k_txq *artxq; |
Michal Kazior | 1f8bb15 | 2013-09-18 14:43:22 +0200 | [diff] [blame] | 61 | struct sk_buff *msdu; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 62 | |
Sujith Manoharan | 55314fc | 2015-04-01 22:53:21 +0300 | [diff] [blame] | 63 | ath10k_dbg(ar, ATH10K_DBG_HTT, |
Rajkumar Manoharan | 59465fe | 2016-03-22 17:22:11 +0530 | [diff] [blame] | 64 | "htt tx completion msdu_id %u status %d\n", |
| 65 | tx_done->msdu_id, tx_done->status); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 66 | |
Michal Kazior | 0a89f8a | 2013-09-18 14:43:20 +0200 | [diff] [blame] | 67 | if (tx_done->msdu_id >= htt->max_num_pending_tx) { |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 68 | ath10k_warn(ar, "warning: msdu_id %d too big, ignoring\n", |
Michal Kazior | 0a89f8a | 2013-09-18 14:43:20 +0200 | [diff] [blame] | 69 | tx_done->msdu_id); |
Rajkumar Manoharan | cac0855 | 2016-03-09 20:25:46 +0530 | [diff] [blame] | 70 | return -EINVAL; |
Michal Kazior | 0a89f8a | 2013-09-18 14:43:20 +0200 | [diff] [blame] | 71 | } |
| 72 | |
Qi Zhou | 005fb16 | 2015-07-22 16:38:24 -0400 | [diff] [blame] | 73 | spin_lock_bh(&htt->tx_lock); |
Michal Kazior | 89d6d83 | 2015-01-24 12:14:51 +0200 | [diff] [blame] | 74 | msdu = idr_find(&htt->pending_tx, tx_done->msdu_id); |
| 75 | if (!msdu) { |
| 76 | ath10k_warn(ar, "received tx completion for invalid msdu_id: %d\n", |
| 77 | tx_done->msdu_id); |
Qi Zhou | 005fb16 | 2015-07-22 16:38:24 -0400 | [diff] [blame] | 78 | spin_unlock_bh(&htt->tx_lock); |
Rajkumar Manoharan | cac0855 | 2016-03-09 20:25:46 +0530 | [diff] [blame] | 79 | return -ENOENT; |
Michal Kazior | 89d6d83 | 2015-01-24 12:14:51 +0200 | [diff] [blame] | 80 | } |
Vivek Natarajan | 7b7da0a | 2015-08-31 16:34:55 +0530 | [diff] [blame] | 81 | |
Michal Kazior | d668dba | 2015-11-18 06:59:21 +0100 | [diff] [blame] | 82 | skb_cb = ATH10K_SKB_CB(msdu); |
Michal Kazior | 3cc0fef | 2016-03-06 16:14:41 +0200 | [diff] [blame] | 83 | txq = skb_cb->txq; |
Vivek Natarajan | 7b7da0a | 2015-08-31 16:34:55 +0530 | [diff] [blame] | 84 | |
Bob Copeland | a66cd73 | 2016-06-29 19:29:25 +0300 | [diff] [blame] | 85 | if (txq) { |
| 86 | artxq = (void *)txq->drv_priv; |
Michal Kazior | 3cc0fef | 2016-03-06 16:14:41 +0200 | [diff] [blame] | 87 | artxq->num_fw_queued--; |
Bob Copeland | a66cd73 | 2016-06-29 19:29:25 +0300 | [diff] [blame] | 88 | } |
Michal Kazior | 3cc0fef | 2016-03-06 16:14:41 +0200 | [diff] [blame] | 89 | |
Qi Zhou | 005fb16 | 2015-07-22 16:38:24 -0400 | [diff] [blame] | 90 | ath10k_htt_tx_free_msdu_id(htt, tx_done->msdu_id); |
Rajkumar Manoharan | cac0855 | 2016-03-09 20:25:46 +0530 | [diff] [blame] | 91 | ath10k_htt_tx_dec_pending(htt); |
Qi Zhou | 005fb16 | 2015-07-22 16:38:24 -0400 | [diff] [blame] | 92 | if (htt->num_pending_tx == 0) |
| 93 | wake_up(&htt->empty_tx_wq); |
| 94 | spin_unlock_bh(&htt->tx_lock); |
Michal Kazior | 89d6d83 | 2015-01-24 12:14:51 +0200 | [diff] [blame] | 95 | |
Michal Kazior | 767d34f | 2014-02-27 18:50:03 +0200 | [diff] [blame] | 96 | dma_unmap_single(dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 97 | |
| 98 | ath10k_report_offchan_tx(htt->ar, msdu); |
| 99 | |
| 100 | info = IEEE80211_SKB_CB(msdu); |
Michal Kazior | 6d33a9a | 2013-11-26 14:57:40 +0100 | [diff] [blame] | 101 | memset(&info->status, 0, sizeof(info->status)); |
Rajkumar Manoharan | d1e50f4 | 2014-10-03 08:02:54 +0300 | [diff] [blame] | 102 | trace_ath10k_txrx_tx_unref(ar, tx_done->msdu_id); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 103 | |
Rajkumar Manoharan | 59465fe | 2016-03-22 17:22:11 +0530 | [diff] [blame] | 104 | if (tx_done->status == HTT_TX_COMPL_STATE_DISCARD) { |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 105 | ieee80211_free_txskb(htt->ar->hw, msdu); |
Rajkumar Manoharan | cac0855 | 2016-03-09 20:25:46 +0530 | [diff] [blame] | 106 | return 0; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | if (!(info->flags & IEEE80211_TX_CTL_NO_ACK)) |
| 110 | info->flags |= IEEE80211_TX_STAT_ACK; |
| 111 | |
Rajkumar Manoharan | 59465fe | 2016-03-22 17:22:11 +0530 | [diff] [blame] | 112 | if (tx_done->status == HTT_TX_COMPL_STATE_NOACK) |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 113 | info->flags &= ~IEEE80211_TX_STAT_ACK; |
| 114 | |
Rajkumar Manoharan | 59465fe | 2016-03-22 17:22:11 +0530 | [diff] [blame] | 115 | if ((tx_done->status == HTT_TX_COMPL_STATE_ACK) && |
| 116 | (info->flags & IEEE80211_TX_CTL_NO_ACK)) |
Sujith Manoharan | 55314fc | 2015-04-01 22:53:21 +0300 | [diff] [blame] | 117 | info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; |
| 118 | |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 119 | ieee80211_tx_status(htt->ar->hw, msdu); |
| 120 | /* we do not own the msdu anymore */ |
Michal Kazior | 7a0adc8 | 2016-05-23 23:12:45 +0300 | [diff] [blame] | 121 | |
| 122 | ath10k_mac_tx_push_pending(ar); |
| 123 | |
Rajkumar Manoharan | cac0855 | 2016-03-09 20:25:46 +0530 | [diff] [blame] | 124 | return 0; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 125 | } |
| 126 | |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 127 | struct ath10k_peer *ath10k_peer_find(struct ath10k *ar, int vdev_id, |
| 128 | const u8 *addr) |
| 129 | { |
| 130 | struct ath10k_peer *peer; |
| 131 | |
| 132 | lockdep_assert_held(&ar->data_lock); |
| 133 | |
| 134 | list_for_each_entry(peer, &ar->peers, list) { |
| 135 | if (peer->vdev_id != vdev_id) |
| 136 | continue; |
Kalle Valo | c178da5 | 2016-04-13 14:13:49 +0300 | [diff] [blame] | 137 | if (!ether_addr_equal(peer->addr, addr)) |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 138 | continue; |
| 139 | |
| 140 | return peer; |
| 141 | } |
| 142 | |
| 143 | return NULL; |
| 144 | } |
| 145 | |
Michal Kazior | aa5b4fb | 2014-07-23 12:20:33 +0200 | [diff] [blame] | 146 | struct ath10k_peer *ath10k_peer_find_by_id(struct ath10k *ar, int peer_id) |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 147 | { |
| 148 | struct ath10k_peer *peer; |
| 149 | |
| 150 | lockdep_assert_held(&ar->data_lock); |
| 151 | |
| 152 | list_for_each_entry(peer, &ar->peers, list) |
| 153 | if (test_bit(peer_id, peer->peer_ids)) |
| 154 | return peer; |
| 155 | |
| 156 | return NULL; |
| 157 | } |
| 158 | |
| 159 | static int ath10k_wait_for_peer_common(struct ath10k *ar, int vdev_id, |
| 160 | const u8 *addr, bool expect_mapped) |
| 161 | { |
Nicholas Mc Guire | 71c47df | 2015-06-15 14:46:43 +0300 | [diff] [blame] | 162 | long time_left; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 163 | |
Nicholas Mc Guire | 71c47df | 2015-06-15 14:46:43 +0300 | [diff] [blame] | 164 | time_left = wait_event_timeout(ar->peer_mapping_wq, ({ |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 165 | bool mapped; |
| 166 | |
| 167 | spin_lock_bh(&ar->data_lock); |
| 168 | mapped = !!ath10k_peer_find(ar, vdev_id, addr); |
| 169 | spin_unlock_bh(&ar->data_lock); |
| 170 | |
Michal Kazior | 7962b0d | 2014-10-28 10:34:38 +0100 | [diff] [blame] | 171 | (mapped == expect_mapped || |
| 172 | test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags)); |
Kalle Valo | 14e105c | 2016-04-13 14:13:21 +0300 | [diff] [blame] | 173 | }), 3 * HZ); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 174 | |
Nicholas Mc Guire | 71c47df | 2015-06-15 14:46:43 +0300 | [diff] [blame] | 175 | if (time_left == 0) |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 176 | return -ETIMEDOUT; |
| 177 | |
| 178 | return 0; |
| 179 | } |
| 180 | |
| 181 | int ath10k_wait_for_peer_created(struct ath10k *ar, int vdev_id, const u8 *addr) |
| 182 | { |
| 183 | return ath10k_wait_for_peer_common(ar, vdev_id, addr, true); |
| 184 | } |
| 185 | |
| 186 | int ath10k_wait_for_peer_deleted(struct ath10k *ar, int vdev_id, const u8 *addr) |
| 187 | { |
| 188 | return ath10k_wait_for_peer_common(ar, vdev_id, addr, false); |
| 189 | } |
| 190 | |
| 191 | void ath10k_peer_map_event(struct ath10k_htt *htt, |
| 192 | struct htt_peer_map_event *ev) |
| 193 | { |
| 194 | struct ath10k *ar = htt->ar; |
| 195 | struct ath10k_peer *peer; |
| 196 | |
Dan Carpenter | de72a20 | 2016-04-11 11:15:20 +0300 | [diff] [blame] | 197 | if (ev->peer_id >= ATH10K_MAX_NUM_PEER_IDS) { |
| 198 | ath10k_warn(ar, |
| 199 | "received htt peer map event with idx out of bounds: %hu\n", |
| 200 | ev->peer_id); |
| 201 | return; |
| 202 | } |
| 203 | |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 204 | spin_lock_bh(&ar->data_lock); |
| 205 | peer = ath10k_peer_find(ar, ev->vdev_id, ev->addr); |
| 206 | if (!peer) { |
| 207 | peer = kzalloc(sizeof(*peer), GFP_ATOMIC); |
| 208 | if (!peer) |
| 209 | goto exit; |
| 210 | |
| 211 | peer->vdev_id = ev->vdev_id; |
Kalle Valo | b25f32c | 2014-09-14 12:50:49 +0300 | [diff] [blame] | 212 | ether_addr_copy(peer->addr, ev->addr); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 213 | list_add(&peer->list, &ar->peers); |
| 214 | wake_up(&ar->peer_mapping_wq); |
| 215 | } |
| 216 | |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 217 | ath10k_dbg(ar, ATH10K_DBG_HTT, "htt peer map vdev %d peer %pM id %d\n", |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 218 | ev->vdev_id, ev->addr, ev->peer_id); |
| 219 | |
Ben Greear | c5ace87 | 2016-06-30 15:23:58 +0300 | [diff] [blame] | 220 | WARN_ON(ar->peer_map[ev->peer_id] && (ar->peer_map[ev->peer_id] != peer)); |
Michal Kazior | 6942726 | 2016-03-06 16:14:30 +0200 | [diff] [blame] | 221 | ar->peer_map[ev->peer_id] = peer; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 222 | set_bit(ev->peer_id, peer->peer_ids); |
| 223 | exit: |
| 224 | spin_unlock_bh(&ar->data_lock); |
| 225 | } |
| 226 | |
| 227 | void ath10k_peer_unmap_event(struct ath10k_htt *htt, |
| 228 | struct htt_peer_unmap_event *ev) |
| 229 | { |
| 230 | struct ath10k *ar = htt->ar; |
| 231 | struct ath10k_peer *peer; |
| 232 | |
Dan Carpenter | de72a20 | 2016-04-11 11:15:20 +0300 | [diff] [blame] | 233 | if (ev->peer_id >= ATH10K_MAX_NUM_PEER_IDS) { |
| 234 | ath10k_warn(ar, |
| 235 | "received htt peer unmap event with idx out of bounds: %hu\n", |
| 236 | ev->peer_id); |
| 237 | return; |
| 238 | } |
| 239 | |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 240 | spin_lock_bh(&ar->data_lock); |
| 241 | peer = ath10k_peer_find_by_id(ar, ev->peer_id); |
| 242 | if (!peer) { |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 243 | ath10k_warn(ar, "peer-unmap-event: unknown peer id %d\n", |
Ben Greear | 9ba4c78 | 2014-02-25 09:29:57 +0200 | [diff] [blame] | 244 | ev->peer_id); |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 245 | goto exit; |
| 246 | } |
| 247 | |
Michal Kazior | 7aa7a72 | 2014-08-25 12:09:38 +0200 | [diff] [blame] | 248 | ath10k_dbg(ar, ATH10K_DBG_HTT, "htt peer unmap vdev %d peer %pM id %d\n", |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 249 | peer->vdev_id, peer->addr, ev->peer_id); |
| 250 | |
Michal Kazior | 6942726 | 2016-03-06 16:14:30 +0200 | [diff] [blame] | 251 | ar->peer_map[ev->peer_id] = NULL; |
Kalle Valo | 5e3dd15 | 2013-06-12 20:52:10 +0300 | [diff] [blame] | 252 | clear_bit(ev->peer_id, peer->peer_ids); |
| 253 | |
| 254 | if (bitmap_empty(peer->peer_ids, ATH10K_MAX_NUM_PEER_IDS)) { |
| 255 | list_del(&peer->list); |
| 256 | kfree(peer); |
| 257 | wake_up(&ar->peer_mapping_wq); |
| 258 | } |
| 259 | |
| 260 | exit: |
| 261 | spin_unlock_bh(&ar->data_lock); |
| 262 | } |