Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 1 | /* |
| 2 | BlueZ - Bluetooth protocol stack for Linux |
| 3 | Copyright (C) 2014 Intel Corporation |
| 4 | |
| 5 | This program is free software; you can redistribute it and/or modify |
| 6 | it under the terms of the GNU General Public License version 2 as |
| 7 | published by the Free Software Foundation; |
| 8 | |
| 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 10 | OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 11 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. |
| 12 | IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) AND AUTHOR(S) BE LIABLE FOR ANY |
| 13 | CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES |
| 14 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
| 15 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
| 16 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
| 17 | |
| 18 | ALL LIABILITY, INCLUDING LIABILITY FOR INFRINGEMENT OF ANY PATENTS, |
| 19 | COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS, RELATING TO USE OF THIS |
| 20 | SOFTWARE IS DISCLAIMED. |
| 21 | */ |
| 22 | |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 23 | #include <asm/unaligned.h> |
| 24 | |
Johan Hedberg | b504430 | 2015-11-10 09:44:55 +0200 | [diff] [blame] | 25 | #define hci_req_sync_lock(hdev) mutex_lock(&hdev->req_lock) |
| 26 | #define hci_req_sync_unlock(hdev) mutex_unlock(&hdev->req_lock) |
Johan Hedberg | be91cd0 | 2015-11-10 09:44:54 +0200 | [diff] [blame] | 27 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 28 | struct hci_request { |
| 29 | struct hci_dev *hdev; |
| 30 | struct sk_buff_head cmd_q; |
| 31 | |
| 32 | /* If something goes wrong when building the HCI request, the error |
| 33 | * value is stored in this field. |
| 34 | */ |
| 35 | int err; |
| 36 | }; |
| 37 | |
| 38 | void hci_req_init(struct hci_request *req, struct hci_dev *hdev); |
Jaganath Kanakkassery | f17d858 | 2017-10-25 10:58:48 +0530 | [diff] [blame] | 39 | void hci_req_purge(struct hci_request *req); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 40 | int hci_req_run(struct hci_request *req, hci_req_complete_t complete); |
Johan Hedberg | e6214487 | 2015-04-02 13:41:08 +0300 | [diff] [blame] | 41 | int hci_req_run_skb(struct hci_request *req, hci_req_complete_skb_t complete); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 42 | void hci_req_add(struct hci_request *req, u16 opcode, u32 plen, |
| 43 | const void *param); |
| 44 | void hci_req_add_ev(struct hci_request *req, u16 opcode, u32 plen, |
| 45 | const void *param, u8 event); |
Johan Hedberg | e6214487 | 2015-04-02 13:41:08 +0300 | [diff] [blame] | 46 | void hci_req_cmd_complete(struct hci_dev *hdev, u16 opcode, u8 status, |
| 47 | hci_req_complete_t *req_complete, |
| 48 | hci_req_complete_skb_t *req_complete_skb); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 49 | |
Johan Hedberg | a1d01db | 2015-11-11 08:11:25 +0200 | [diff] [blame] | 50 | int hci_req_sync(struct hci_dev *hdev, int (*req)(struct hci_request *req, |
| 51 | unsigned long opt), |
Johan Hedberg | 4ebeee2 | 2015-11-11 08:11:19 +0200 | [diff] [blame] | 52 | unsigned long opt, u32 timeout, u8 *hci_status); |
Johan Hedberg | a1d01db | 2015-11-11 08:11:25 +0200 | [diff] [blame] | 53 | int __hci_req_sync(struct hci_dev *hdev, int (*func)(struct hci_request *req, |
| 54 | unsigned long opt), |
Johan Hedberg | 4ebeee2 | 2015-11-11 08:11:19 +0200 | [diff] [blame] | 55 | unsigned long opt, u32 timeout, u8 *hci_status); |
Johan Hedberg | b504430 | 2015-11-10 09:44:55 +0200 | [diff] [blame] | 56 | void hci_req_sync_cancel(struct hci_dev *hdev, int err); |
Johan Hedberg | be91cd0 | 2015-11-10 09:44:54 +0200 | [diff] [blame] | 57 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 58 | struct sk_buff *hci_prepare_cmd(struct hci_dev *hdev, u16 opcode, u32 plen, |
| 59 | const void *param); |
| 60 | |
Johan Hedberg | 2ff1389 | 2015-11-25 16:15:44 +0200 | [diff] [blame] | 61 | int __hci_req_hci_power_on(struct hci_dev *hdev); |
| 62 | |
Johan Hedberg | bf943cb | 2015-11-25 16:15:43 +0200 | [diff] [blame] | 63 | void __hci_req_write_fast_connectable(struct hci_request *req, bool enable); |
Johan Hedberg | 00cf504 | 2015-11-25 16:15:41 +0200 | [diff] [blame] | 64 | void __hci_req_update_name(struct hci_request *req); |
Johan Hedberg | b1a8917 | 2015-11-25 16:15:42 +0200 | [diff] [blame] | 65 | void __hci_req_update_eir(struct hci_request *req); |
Johan Hedberg | 00cf504 | 2015-11-25 16:15:41 +0200 | [diff] [blame] | 66 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 67 | void hci_req_add_le_scan_disable(struct hci_request *req); |
| 68 | void hci_req_add_le_passive_scan(struct hci_request *req); |
| 69 | |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 70 | void hci_req_reenable_advertising(struct hci_dev *hdev); |
| 71 | void __hci_req_enable_advertising(struct hci_request *req); |
| 72 | void __hci_req_disable_advertising(struct hci_request *req); |
Johan Hedberg | cab054a | 2015-11-30 11:21:45 +0200 | [diff] [blame] | 73 | void __hci_req_update_adv_data(struct hci_request *req, u8 instance); |
| 74 | int hci_req_update_adv_data(struct hci_dev *hdev, u8 instance); |
| 75 | void __hci_req_update_scan_rsp_data(struct hci_request *req, u8 instance); |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 76 | |
| 77 | int __hci_req_schedule_adv_instance(struct hci_request *req, u8 instance, |
| 78 | bool force); |
Johan Hedberg | 37d3a1f | 2016-08-28 20:53:34 +0300 | [diff] [blame] | 79 | void hci_req_clear_adv_instance(struct hci_dev *hdev, struct sock *sk, |
| 80 | struct hci_request *req, u8 instance, |
| 81 | bool force); |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 82 | |
Johan Hedberg | 14bf5ea | 2015-11-22 19:00:22 +0200 | [diff] [blame] | 83 | void __hci_req_update_class(struct hci_request *req); |
| 84 | |
Johan Hedberg | 2154d3f | 2015-11-11 08:30:45 +0200 | [diff] [blame] | 85 | /* Returns true if HCI commands were queued */ |
| 86 | bool hci_req_stop_discovery(struct hci_request *req); |
| 87 | |
Johan Hedberg | 01b1cb8 | 2015-11-16 12:52:21 +0200 | [diff] [blame] | 88 | static inline void hci_req_update_scan(struct hci_dev *hdev) |
| 89 | { |
| 90 | queue_work(hdev->req_workqueue, &hdev->scan_update); |
| 91 | } |
| 92 | |
| 93 | void __hci_req_update_scan(struct hci_request *req); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 94 | |
| 95 | int hci_update_random_address(struct hci_request *req, bool require_privacy, |
Johan Hedberg | 82a37ad | 2016-03-09 17:30:34 +0200 | [diff] [blame] | 96 | bool use_rpa, u8 *own_addr_type); |
Johan Hedberg | 2cf2221 | 2014-12-19 22:26:00 +0200 | [diff] [blame] | 97 | |
Johan Hedberg | dcc0f0d | 2015-10-22 10:49:37 +0300 | [diff] [blame] | 98 | int hci_abort_conn(struct hci_conn *conn, u8 reason); |
| 99 | void __hci_abort_conn(struct hci_request *req, struct hci_conn *conn, |
| 100 | u8 reason); |
Johan Hedberg | 5fc16cc | 2015-11-11 08:11:16 +0200 | [diff] [blame] | 101 | |
Johan Hedberg | 2e93e53 | 2015-11-11 08:11:17 +0200 | [diff] [blame] | 102 | static inline void hci_update_background_scan(struct hci_dev *hdev) |
| 103 | { |
| 104 | queue_work(hdev->req_workqueue, &hdev->bg_scan_update); |
| 105 | } |
| 106 | |
Johan Hedberg | 5fc16cc | 2015-11-11 08:11:16 +0200 | [diff] [blame] | 107 | void hci_request_setup(struct hci_dev *hdev); |
| 108 | void hci_request_cancel_all(struct hci_dev *hdev); |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 109 | |
Michał Narajowski | f61851f | 2016-10-19 10:20:27 +0200 | [diff] [blame] | 110 | u8 append_local_name(struct hci_dev *hdev, u8 *ptr, u8 ad_len); |
| 111 | |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 112 | static inline u16 eir_append_data(u8 *eir, u16 eir_len, u8 type, |
| 113 | u8 *data, u8 data_len) |
| 114 | { |
| 115 | eir[eir_len++] = sizeof(type) + data_len; |
| 116 | eir[eir_len++] = type; |
| 117 | memcpy(&eir[eir_len], data, data_len); |
| 118 | eir_len += data_len; |
| 119 | |
| 120 | return eir_len; |
| 121 | } |
| 122 | |
| 123 | static inline u16 eir_append_le16(u8 *eir, u16 eir_len, u8 type, u16 data) |
| 124 | { |
| 125 | eir[eir_len++] = sizeof(type) + sizeof(data); |
| 126 | eir[eir_len++] = type; |
| 127 | put_unaligned_le16(data, &eir[eir_len]); |
| 128 | eir_len += sizeof(data); |
| 129 | |
| 130 | return eir_len; |
| 131 | } |