Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 1 | /* |
| 2 | BlueZ - Bluetooth protocol stack for Linux |
| 3 | Copyright (C) 2014 Intel Corporation |
| 4 | |
| 5 | This program is free software; you can redistribute it and/or modify |
| 6 | it under the terms of the GNU General Public License version 2 as |
| 7 | published by the Free Software Foundation; |
| 8 | |
| 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 10 | OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 11 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. |
| 12 | IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) AND AUTHOR(S) BE LIABLE FOR ANY |
| 13 | CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES |
| 14 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
| 15 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
| 16 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
| 17 | |
| 18 | ALL LIABILITY, INCLUDING LIABILITY FOR INFRINGEMENT OF ANY PATENTS, |
| 19 | COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS, RELATING TO USE OF THIS |
| 20 | SOFTWARE IS DISCLAIMED. |
| 21 | */ |
| 22 | |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 23 | #include <asm/unaligned.h> |
| 24 | |
Johan Hedberg | b504430 | 2015-11-10 09:44:55 +0200 | [diff] [blame] | 25 | #define hci_req_sync_lock(hdev) mutex_lock(&hdev->req_lock) |
| 26 | #define hci_req_sync_unlock(hdev) mutex_unlock(&hdev->req_lock) |
Johan Hedberg | be91cd0 | 2015-11-10 09:44:54 +0200 | [diff] [blame] | 27 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 28 | struct hci_request { |
| 29 | struct hci_dev *hdev; |
| 30 | struct sk_buff_head cmd_q; |
| 31 | |
| 32 | /* If something goes wrong when building the HCI request, the error |
| 33 | * value is stored in this field. |
| 34 | */ |
| 35 | int err; |
| 36 | }; |
| 37 | |
| 38 | void hci_req_init(struct hci_request *req, struct hci_dev *hdev); |
| 39 | int hci_req_run(struct hci_request *req, hci_req_complete_t complete); |
Johan Hedberg | e6214487 | 2015-04-02 13:41:08 +0300 | [diff] [blame] | 40 | int hci_req_run_skb(struct hci_request *req, hci_req_complete_skb_t complete); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 41 | void hci_req_add(struct hci_request *req, u16 opcode, u32 plen, |
| 42 | const void *param); |
| 43 | void hci_req_add_ev(struct hci_request *req, u16 opcode, u32 plen, |
| 44 | const void *param, u8 event); |
Johan Hedberg | e6214487 | 2015-04-02 13:41:08 +0300 | [diff] [blame] | 45 | void hci_req_cmd_complete(struct hci_dev *hdev, u16 opcode, u8 status, |
| 46 | hci_req_complete_t *req_complete, |
| 47 | hci_req_complete_skb_t *req_complete_skb); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 48 | |
Johan Hedberg | a1d01db | 2015-11-11 08:11:25 +0200 | [diff] [blame] | 49 | int hci_req_sync(struct hci_dev *hdev, int (*req)(struct hci_request *req, |
| 50 | unsigned long opt), |
Johan Hedberg | 4ebeee2 | 2015-11-11 08:11:19 +0200 | [diff] [blame] | 51 | unsigned long opt, u32 timeout, u8 *hci_status); |
Johan Hedberg | a1d01db | 2015-11-11 08:11:25 +0200 | [diff] [blame] | 52 | int __hci_req_sync(struct hci_dev *hdev, int (*func)(struct hci_request *req, |
| 53 | unsigned long opt), |
Johan Hedberg | 4ebeee2 | 2015-11-11 08:11:19 +0200 | [diff] [blame] | 54 | unsigned long opt, u32 timeout, u8 *hci_status); |
Johan Hedberg | b504430 | 2015-11-10 09:44:55 +0200 | [diff] [blame] | 55 | void hci_req_sync_cancel(struct hci_dev *hdev, int err); |
Johan Hedberg | be91cd0 | 2015-11-10 09:44:54 +0200 | [diff] [blame] | 56 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 57 | struct sk_buff *hci_prepare_cmd(struct hci_dev *hdev, u16 opcode, u32 plen, |
| 58 | const void *param); |
| 59 | |
Johan Hedberg | 2ff1389 | 2015-11-25 16:15:44 +0200 | [diff] [blame] | 60 | int __hci_req_hci_power_on(struct hci_dev *hdev); |
| 61 | |
Johan Hedberg | bf943cb | 2015-11-25 16:15:43 +0200 | [diff] [blame] | 62 | void __hci_req_write_fast_connectable(struct hci_request *req, bool enable); |
Johan Hedberg | 00cf504 | 2015-11-25 16:15:41 +0200 | [diff] [blame] | 63 | void __hci_req_update_name(struct hci_request *req); |
Johan Hedberg | b1a8917 | 2015-11-25 16:15:42 +0200 | [diff] [blame] | 64 | void __hci_req_update_eir(struct hci_request *req); |
Johan Hedberg | 00cf504 | 2015-11-25 16:15:41 +0200 | [diff] [blame] | 65 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 66 | void hci_req_add_le_scan_disable(struct hci_request *req); |
| 67 | void hci_req_add_le_passive_scan(struct hci_request *req); |
| 68 | |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 69 | void hci_req_reenable_advertising(struct hci_dev *hdev); |
| 70 | void __hci_req_enable_advertising(struct hci_request *req); |
| 71 | void __hci_req_disable_advertising(struct hci_request *req); |
Johan Hedberg | cab054a | 2015-11-30 11:21:45 +0200 | [diff] [blame] | 72 | void __hci_req_update_adv_data(struct hci_request *req, u8 instance); |
| 73 | int hci_req_update_adv_data(struct hci_dev *hdev, u8 instance); |
| 74 | void __hci_req_update_scan_rsp_data(struct hci_request *req, u8 instance); |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 75 | |
| 76 | int __hci_req_schedule_adv_instance(struct hci_request *req, u8 instance, |
| 77 | bool force); |
Johan Hedberg | 37d3a1f | 2016-08-28 20:53:34 +0300 | [diff] [blame] | 78 | void hci_req_clear_adv_instance(struct hci_dev *hdev, struct sock *sk, |
| 79 | struct hci_request *req, u8 instance, |
| 80 | bool force); |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 81 | |
Johan Hedberg | 14bf5ea | 2015-11-22 19:00:22 +0200 | [diff] [blame] | 82 | void __hci_req_update_class(struct hci_request *req); |
| 83 | |
Johan Hedberg | 2154d3f | 2015-11-11 08:30:45 +0200 | [diff] [blame] | 84 | /* Returns true if HCI commands were queued */ |
| 85 | bool hci_req_stop_discovery(struct hci_request *req); |
| 86 | |
Johan Hedberg | 01b1cb8 | 2015-11-16 12:52:21 +0200 | [diff] [blame] | 87 | static inline void hci_req_update_scan(struct hci_dev *hdev) |
| 88 | { |
| 89 | queue_work(hdev->req_workqueue, &hdev->scan_update); |
| 90 | } |
| 91 | |
| 92 | void __hci_req_update_scan(struct hci_request *req); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 93 | |
| 94 | int hci_update_random_address(struct hci_request *req, bool require_privacy, |
Johan Hedberg | 82a37ad | 2016-03-09 17:30:34 +0200 | [diff] [blame] | 95 | bool use_rpa, u8 *own_addr_type); |
Johan Hedberg | 2cf2221 | 2014-12-19 22:26:00 +0200 | [diff] [blame] | 96 | |
Johan Hedberg | dcc0f0d | 2015-10-22 10:49:37 +0300 | [diff] [blame] | 97 | int hci_abort_conn(struct hci_conn *conn, u8 reason); |
| 98 | void __hci_abort_conn(struct hci_request *req, struct hci_conn *conn, |
| 99 | u8 reason); |
Johan Hedberg | 5fc16cc | 2015-11-11 08:11:16 +0200 | [diff] [blame] | 100 | |
Johan Hedberg | 2e93e53 | 2015-11-11 08:11:17 +0200 | [diff] [blame] | 101 | static inline void hci_update_background_scan(struct hci_dev *hdev) |
| 102 | { |
| 103 | queue_work(hdev->req_workqueue, &hdev->bg_scan_update); |
| 104 | } |
| 105 | |
Johan Hedberg | 5fc16cc | 2015-11-11 08:11:16 +0200 | [diff] [blame] | 106 | void hci_request_setup(struct hci_dev *hdev); |
| 107 | void hci_request_cancel_all(struct hci_dev *hdev); |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 108 | |
Michał Narajowski | f61851f | 2016-10-19 10:20:27 +0200 | [diff] [blame] | 109 | u8 append_local_name(struct hci_dev *hdev, u8 *ptr, u8 ad_len); |
| 110 | |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 111 | static inline u16 eir_append_data(u8 *eir, u16 eir_len, u8 type, |
| 112 | u8 *data, u8 data_len) |
| 113 | { |
| 114 | eir[eir_len++] = sizeof(type) + data_len; |
| 115 | eir[eir_len++] = type; |
| 116 | memcpy(&eir[eir_len], data, data_len); |
| 117 | eir_len += data_len; |
| 118 | |
| 119 | return eir_len; |
| 120 | } |
| 121 | |
| 122 | static inline u16 eir_append_le16(u8 *eir, u16 eir_len, u8 type, u16 data) |
| 123 | { |
| 124 | eir[eir_len++] = sizeof(type) + sizeof(data); |
| 125 | eir[eir_len++] = type; |
| 126 | put_unaligned_le16(data, &eir[eir_len]); |
| 127 | eir_len += sizeof(data); |
| 128 | |
| 129 | return eir_len; |
| 130 | } |