Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2008-2012 Niels Provos, Nick Mathewson |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * 3. The name of the author may not be used to endorse or promote products |
| 13 | * derived from this software without specific prior written permission. |
| 14 | * |
| 15 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
| 16 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
| 17 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
| 18 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
| 19 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
| 20 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 21 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 22 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 23 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
| 24 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 25 | */ |
| 26 | |
| 27 | #include "event2/event-config.h" |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 28 | #include "evconfig-private.h" |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 29 | |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 30 | #ifndef EVENT__DISABLE_THREAD_SUPPORT |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 31 | |
| 32 | #include "event2/thread.h" |
| 33 | |
| 34 | #include <stdlib.h> |
| 35 | #include <string.h> |
| 36 | |
| 37 | #include "log-internal.h" |
| 38 | #include "mm-internal.h" |
| 39 | #include "util-internal.h" |
| 40 | #include "evthread-internal.h" |
| 41 | |
| 42 | #ifdef EVTHREAD_EXPOSE_STRUCTS |
| 43 | #define GLOBAL |
| 44 | #else |
| 45 | #define GLOBAL static |
| 46 | #endif |
| 47 | |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 48 | #ifndef EVENT__DISABLE_DEBUG_MODE |
Haibo Huang | b227967 | 2019-05-31 16:12:39 -0700 | [diff] [blame] | 49 | extern int event_debug_created_threadable_ctx_; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 50 | extern int event_debug_mode_on_; |
| 51 | #endif |
| 52 | |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 53 | /* globals */ |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 54 | GLOBAL int evthread_lock_debugging_enabled_ = 0; |
| 55 | GLOBAL struct evthread_lock_callbacks evthread_lock_fns_ = { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 56 | 0, 0, NULL, NULL, NULL, NULL |
| 57 | }; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 58 | GLOBAL unsigned long (*evthread_id_fn_)(void) = NULL; |
| 59 | GLOBAL struct evthread_condition_callbacks evthread_cond_fns_ = { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 60 | 0, NULL, NULL, NULL, NULL |
| 61 | }; |
| 62 | |
| 63 | /* Used for debugging */ |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 64 | static struct evthread_lock_callbacks original_lock_fns_ = { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 65 | 0, 0, NULL, NULL, NULL, NULL |
| 66 | }; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 67 | static struct evthread_condition_callbacks original_cond_fns_ = { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 68 | 0, NULL, NULL, NULL, NULL |
| 69 | }; |
| 70 | |
| 71 | void |
| 72 | evthread_set_id_callback(unsigned long (*id_fn)(void)) |
| 73 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 74 | evthread_id_fn_ = id_fn; |
| 75 | } |
| 76 | |
| 77 | struct evthread_lock_callbacks *evthread_get_lock_callbacks() |
| 78 | { |
| 79 | return evthread_lock_debugging_enabled_ |
| 80 | ? &original_lock_fns_ : &evthread_lock_fns_; |
| 81 | } |
| 82 | struct evthread_condition_callbacks *evthread_get_condition_callbacks() |
| 83 | { |
| 84 | return evthread_lock_debugging_enabled_ |
| 85 | ? &original_cond_fns_ : &evthread_cond_fns_; |
| 86 | } |
| 87 | void evthreadimpl_disable_lock_debugging_(void) |
| 88 | { |
| 89 | evthread_lock_debugging_enabled_ = 0; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 90 | } |
| 91 | |
| 92 | int |
| 93 | evthread_set_lock_callbacks(const struct evthread_lock_callbacks *cbs) |
| 94 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 95 | struct evthread_lock_callbacks *target = evthread_get_lock_callbacks(); |
| 96 | |
| 97 | #ifndef EVENT__DISABLE_DEBUG_MODE |
| 98 | if (event_debug_mode_on_) { |
| 99 | if (event_debug_created_threadable_ctx_) { |
| 100 | event_errx(1, "evthread initialization must be called BEFORE anything else!"); |
| 101 | } |
| 102 | } |
| 103 | #endif |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 104 | |
| 105 | if (!cbs) { |
| 106 | if (target->alloc) |
| 107 | event_warnx("Trying to disable lock functions after " |
| 108 | "they have been set up will probaby not work."); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 109 | memset(target, 0, sizeof(evthread_lock_fns_)); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 110 | return 0; |
| 111 | } |
| 112 | if (target->alloc) { |
| 113 | /* Uh oh; we already had locking callbacks set up.*/ |
| 114 | if (target->lock_api_version == cbs->lock_api_version && |
| 115 | target->supported_locktypes == cbs->supported_locktypes && |
| 116 | target->alloc == cbs->alloc && |
| 117 | target->free == cbs->free && |
| 118 | target->lock == cbs->lock && |
| 119 | target->unlock == cbs->unlock) { |
| 120 | /* no change -- allow this. */ |
| 121 | return 0; |
| 122 | } |
| 123 | event_warnx("Can't change lock callbacks once they have been " |
| 124 | "initialized."); |
| 125 | return -1; |
| 126 | } |
| 127 | if (cbs->alloc && cbs->free && cbs->lock && cbs->unlock) { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 128 | memcpy(target, cbs, sizeof(evthread_lock_fns_)); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 129 | return event_global_setup_locks_(1); |
| 130 | } else { |
| 131 | return -1; |
| 132 | } |
| 133 | } |
| 134 | |
| 135 | int |
| 136 | evthread_set_condition_callbacks(const struct evthread_condition_callbacks *cbs) |
| 137 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 138 | struct evthread_condition_callbacks *target = evthread_get_condition_callbacks(); |
| 139 | |
| 140 | #ifndef EVENT__DISABLE_DEBUG_MODE |
| 141 | if (event_debug_mode_on_) { |
| 142 | if (event_debug_created_threadable_ctx_) { |
| 143 | event_errx(1, "evthread initialization must be called BEFORE anything else!"); |
| 144 | } |
| 145 | } |
| 146 | #endif |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 147 | |
| 148 | if (!cbs) { |
| 149 | if (target->alloc_condition) |
| 150 | event_warnx("Trying to disable condition functions " |
| 151 | "after they have been set up will probaby not " |
| 152 | "work."); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 153 | memset(target, 0, sizeof(evthread_cond_fns_)); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 154 | return 0; |
| 155 | } |
| 156 | if (target->alloc_condition) { |
| 157 | /* Uh oh; we already had condition callbacks set up.*/ |
| 158 | if (target->condition_api_version == cbs->condition_api_version && |
| 159 | target->alloc_condition == cbs->alloc_condition && |
| 160 | target->free_condition == cbs->free_condition && |
| 161 | target->signal_condition == cbs->signal_condition && |
| 162 | target->wait_condition == cbs->wait_condition) { |
| 163 | /* no change -- allow this. */ |
| 164 | return 0; |
| 165 | } |
| 166 | event_warnx("Can't change condition callbacks once they " |
| 167 | "have been initialized."); |
| 168 | return -1; |
| 169 | } |
| 170 | if (cbs->alloc_condition && cbs->free_condition && |
| 171 | cbs->signal_condition && cbs->wait_condition) { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 172 | memcpy(target, cbs, sizeof(evthread_cond_fns_)); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 173 | } |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 174 | if (evthread_lock_debugging_enabled_) { |
| 175 | evthread_cond_fns_.alloc_condition = cbs->alloc_condition; |
| 176 | evthread_cond_fns_.free_condition = cbs->free_condition; |
| 177 | evthread_cond_fns_.signal_condition = cbs->signal_condition; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 178 | } |
| 179 | return 0; |
| 180 | } |
| 181 | |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 182 | #define DEBUG_LOCK_SIG 0xdeb0b10c |
| 183 | |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 184 | struct debug_lock { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 185 | unsigned signature; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 186 | unsigned locktype; |
| 187 | unsigned long held_by; |
| 188 | /* XXXX if we ever use read-write locks, we will need a separate |
| 189 | * lock to protect count. */ |
| 190 | int count; |
| 191 | void *lock; |
| 192 | }; |
| 193 | |
| 194 | static void * |
| 195 | debug_lock_alloc(unsigned locktype) |
| 196 | { |
| 197 | struct debug_lock *result = mm_malloc(sizeof(struct debug_lock)); |
| 198 | if (!result) |
| 199 | return NULL; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 200 | if (original_lock_fns_.alloc) { |
| 201 | if (!(result->lock = original_lock_fns_.alloc( |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 202 | locktype|EVTHREAD_LOCKTYPE_RECURSIVE))) { |
| 203 | mm_free(result); |
| 204 | return NULL; |
| 205 | } |
| 206 | } else { |
| 207 | result->lock = NULL; |
| 208 | } |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 209 | result->signature = DEBUG_LOCK_SIG; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 210 | result->locktype = locktype; |
| 211 | result->count = 0; |
| 212 | result->held_by = 0; |
| 213 | return result; |
| 214 | } |
| 215 | |
| 216 | static void |
| 217 | debug_lock_free(void *lock_, unsigned locktype) |
| 218 | { |
| 219 | struct debug_lock *lock = lock_; |
| 220 | EVUTIL_ASSERT(lock->count == 0); |
| 221 | EVUTIL_ASSERT(locktype == lock->locktype); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 222 | EVUTIL_ASSERT(DEBUG_LOCK_SIG == lock->signature); |
| 223 | if (original_lock_fns_.free) { |
| 224 | original_lock_fns_.free(lock->lock, |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 225 | lock->locktype|EVTHREAD_LOCKTYPE_RECURSIVE); |
| 226 | } |
| 227 | lock->lock = NULL; |
| 228 | lock->count = -100; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 229 | lock->signature = 0x12300fda; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 230 | mm_free(lock); |
| 231 | } |
| 232 | |
| 233 | static void |
| 234 | evthread_debug_lock_mark_locked(unsigned mode, struct debug_lock *lock) |
| 235 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 236 | EVUTIL_ASSERT(DEBUG_LOCK_SIG == lock->signature); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 237 | ++lock->count; |
| 238 | if (!(lock->locktype & EVTHREAD_LOCKTYPE_RECURSIVE)) |
| 239 | EVUTIL_ASSERT(lock->count == 1); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 240 | if (evthread_id_fn_) { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 241 | unsigned long me; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 242 | me = evthread_id_fn_(); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 243 | if (lock->count > 1) |
| 244 | EVUTIL_ASSERT(lock->held_by == me); |
| 245 | lock->held_by = me; |
| 246 | } |
| 247 | } |
| 248 | |
| 249 | static int |
| 250 | debug_lock_lock(unsigned mode, void *lock_) |
| 251 | { |
| 252 | struct debug_lock *lock = lock_; |
| 253 | int res = 0; |
| 254 | if (lock->locktype & EVTHREAD_LOCKTYPE_READWRITE) |
| 255 | EVUTIL_ASSERT(mode & (EVTHREAD_READ|EVTHREAD_WRITE)); |
| 256 | else |
| 257 | EVUTIL_ASSERT((mode & (EVTHREAD_READ|EVTHREAD_WRITE)) == 0); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 258 | if (original_lock_fns_.lock) |
| 259 | res = original_lock_fns_.lock(mode, lock->lock); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 260 | if (!res) { |
| 261 | evthread_debug_lock_mark_locked(mode, lock); |
| 262 | } |
| 263 | return res; |
| 264 | } |
| 265 | |
| 266 | static void |
| 267 | evthread_debug_lock_mark_unlocked(unsigned mode, struct debug_lock *lock) |
| 268 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 269 | EVUTIL_ASSERT(DEBUG_LOCK_SIG == lock->signature); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 270 | if (lock->locktype & EVTHREAD_LOCKTYPE_READWRITE) |
| 271 | EVUTIL_ASSERT(mode & (EVTHREAD_READ|EVTHREAD_WRITE)); |
| 272 | else |
| 273 | EVUTIL_ASSERT((mode & (EVTHREAD_READ|EVTHREAD_WRITE)) == 0); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 274 | if (evthread_id_fn_) { |
| 275 | unsigned long me; |
| 276 | me = evthread_id_fn_(); |
| 277 | EVUTIL_ASSERT(lock->held_by == me); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 278 | if (lock->count == 1) |
| 279 | lock->held_by = 0; |
| 280 | } |
| 281 | --lock->count; |
| 282 | EVUTIL_ASSERT(lock->count >= 0); |
| 283 | } |
| 284 | |
| 285 | static int |
| 286 | debug_lock_unlock(unsigned mode, void *lock_) |
| 287 | { |
| 288 | struct debug_lock *lock = lock_; |
| 289 | int res = 0; |
| 290 | evthread_debug_lock_mark_unlocked(mode, lock); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 291 | if (original_lock_fns_.unlock) |
| 292 | res = original_lock_fns_.unlock(mode, lock->lock); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 293 | return res; |
| 294 | } |
| 295 | |
| 296 | static int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 297 | debug_cond_wait(void *cond_, void *lock_, const struct timeval *tv) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 298 | { |
| 299 | int r; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 300 | struct debug_lock *lock = lock_; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 301 | EVUTIL_ASSERT(lock); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 302 | EVUTIL_ASSERT(DEBUG_LOCK_SIG == lock->signature); |
| 303 | EVLOCK_ASSERT_LOCKED(lock_); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 304 | evthread_debug_lock_mark_unlocked(0, lock); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 305 | r = original_cond_fns_.wait_condition(cond_, lock->lock, tv); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 306 | evthread_debug_lock_mark_locked(0, lock); |
| 307 | return r; |
| 308 | } |
| 309 | |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 310 | /* misspelled version for backward compatibility */ |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 311 | void |
| 312 | evthread_enable_lock_debuging(void) |
| 313 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 314 | evthread_enable_lock_debugging(); |
| 315 | } |
| 316 | |
| 317 | void |
| 318 | evthread_enable_lock_debugging(void) |
| 319 | { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 320 | struct evthread_lock_callbacks cbs = { |
| 321 | EVTHREAD_LOCK_API_VERSION, |
| 322 | EVTHREAD_LOCKTYPE_RECURSIVE, |
| 323 | debug_lock_alloc, |
| 324 | debug_lock_free, |
| 325 | debug_lock_lock, |
| 326 | debug_lock_unlock |
| 327 | }; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 328 | if (evthread_lock_debugging_enabled_) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 329 | return; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 330 | memcpy(&original_lock_fns_, &evthread_lock_fns_, |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 331 | sizeof(struct evthread_lock_callbacks)); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 332 | memcpy(&evthread_lock_fns_, &cbs, |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 333 | sizeof(struct evthread_lock_callbacks)); |
| 334 | |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 335 | memcpy(&original_cond_fns_, &evthread_cond_fns_, |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 336 | sizeof(struct evthread_condition_callbacks)); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 337 | evthread_cond_fns_.wait_condition = debug_cond_wait; |
| 338 | evthread_lock_debugging_enabled_ = 1; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 339 | |
| 340 | /* XXX return value should get checked. */ |
| 341 | event_global_setup_locks_(0); |
| 342 | } |
| 343 | |
| 344 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 345 | evthread_is_debug_lock_held_(void *lock_) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 346 | { |
| 347 | struct debug_lock *lock = lock_; |
| 348 | if (! lock->count) |
| 349 | return 0; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 350 | if (evthread_id_fn_) { |
| 351 | unsigned long me = evthread_id_fn_(); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 352 | if (lock->held_by != me) |
| 353 | return 0; |
| 354 | } |
| 355 | return 1; |
| 356 | } |
| 357 | |
| 358 | void * |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 359 | evthread_debug_get_real_lock_(void *lock_) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 360 | { |
| 361 | struct debug_lock *lock = lock_; |
| 362 | return lock->lock; |
| 363 | } |
| 364 | |
| 365 | void * |
| 366 | evthread_setup_global_lock_(void *lock_, unsigned locktype, int enable_locks) |
| 367 | { |
| 368 | /* there are four cases here: |
| 369 | 1) we're turning on debugging; locking is not on. |
| 370 | 2) we're turning on debugging; locking is on. |
| 371 | 3) we're turning on locking; debugging is not on. |
| 372 | 4) we're turning on locking; debugging is on. */ |
| 373 | |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 374 | if (!enable_locks && original_lock_fns_.alloc == NULL) { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 375 | /* Case 1: allocate a debug lock. */ |
| 376 | EVUTIL_ASSERT(lock_ == NULL); |
| 377 | return debug_lock_alloc(locktype); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 378 | } else if (!enable_locks && original_lock_fns_.alloc != NULL) { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 379 | /* Case 2: wrap the lock in a debug lock. */ |
| 380 | struct debug_lock *lock; |
| 381 | EVUTIL_ASSERT(lock_ != NULL); |
| 382 | |
| 383 | if (!(locktype & EVTHREAD_LOCKTYPE_RECURSIVE)) { |
| 384 | /* We can't wrap it: We need a recursive lock */ |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 385 | original_lock_fns_.free(lock_, locktype); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 386 | return debug_lock_alloc(locktype); |
| 387 | } |
| 388 | lock = mm_malloc(sizeof(struct debug_lock)); |
| 389 | if (!lock) { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 390 | original_lock_fns_.free(lock_, locktype); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 391 | return NULL; |
| 392 | } |
| 393 | lock->lock = lock_; |
| 394 | lock->locktype = locktype; |
| 395 | lock->count = 0; |
| 396 | lock->held_by = 0; |
| 397 | return lock; |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 398 | } else if (enable_locks && ! evthread_lock_debugging_enabled_) { |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 399 | /* Case 3: allocate a regular lock */ |
| 400 | EVUTIL_ASSERT(lock_ == NULL); |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 401 | return evthread_lock_fns_.alloc(locktype); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 402 | } else { |
| 403 | /* Case 4: Fill in a debug lock with a real lock */ |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 404 | struct debug_lock *lock = lock_ ? lock_ : debug_lock_alloc(locktype); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 405 | EVUTIL_ASSERT(enable_locks && |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 406 | evthread_lock_debugging_enabled_); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 407 | EVUTIL_ASSERT(lock->locktype == locktype); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 408 | if (!lock->lock) { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 409 | lock->lock = original_lock_fns_.alloc( |
| 410 | locktype|EVTHREAD_LOCKTYPE_RECURSIVE); |
| 411 | if (!lock->lock) { |
| 412 | lock->count = -200; |
| 413 | mm_free(lock); |
| 414 | return NULL; |
| 415 | } |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 416 | } |
| 417 | return lock; |
| 418 | } |
| 419 | } |
| 420 | |
| 421 | |
| 422 | #ifndef EVTHREAD_EXPOSE_STRUCTS |
| 423 | unsigned long |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 424 | evthreadimpl_get_id_() |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 425 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 426 | return evthread_id_fn_ ? evthread_id_fn_() : 1; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 427 | } |
| 428 | void * |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 429 | evthreadimpl_lock_alloc_(unsigned locktype) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 430 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 431 | #ifndef EVENT__DISABLE_DEBUG_MODE |
| 432 | if (event_debug_mode_on_) { |
| 433 | event_debug_created_threadable_ctx_ = 1; |
| 434 | } |
| 435 | #endif |
| 436 | |
| 437 | return evthread_lock_fns_.alloc ? |
| 438 | evthread_lock_fns_.alloc(locktype) : NULL; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 439 | } |
| 440 | void |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 441 | evthreadimpl_lock_free_(void *lock, unsigned locktype) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 442 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 443 | if (evthread_lock_fns_.free) |
| 444 | evthread_lock_fns_.free(lock, locktype); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 445 | } |
| 446 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 447 | evthreadimpl_lock_lock_(unsigned mode, void *lock) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 448 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 449 | if (evthread_lock_fns_.lock) |
| 450 | return evthread_lock_fns_.lock(mode, lock); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 451 | else |
| 452 | return 0; |
| 453 | } |
| 454 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 455 | evthreadimpl_lock_unlock_(unsigned mode, void *lock) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 456 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 457 | if (evthread_lock_fns_.unlock) |
| 458 | return evthread_lock_fns_.unlock(mode, lock); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 459 | else |
| 460 | return 0; |
| 461 | } |
| 462 | void * |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 463 | evthreadimpl_cond_alloc_(unsigned condtype) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 464 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 465 | #ifndef EVENT__DISABLE_DEBUG_MODE |
| 466 | if (event_debug_mode_on_) { |
| 467 | event_debug_created_threadable_ctx_ = 1; |
| 468 | } |
| 469 | #endif |
| 470 | |
| 471 | return evthread_cond_fns_.alloc_condition ? |
| 472 | evthread_cond_fns_.alloc_condition(condtype) : NULL; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 473 | } |
| 474 | void |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 475 | evthreadimpl_cond_free_(void *cond) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 476 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 477 | if (evthread_cond_fns_.free_condition) |
| 478 | evthread_cond_fns_.free_condition(cond); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 479 | } |
| 480 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 481 | evthreadimpl_cond_signal_(void *cond, int broadcast) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 482 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 483 | if (evthread_cond_fns_.signal_condition) |
| 484 | return evthread_cond_fns_.signal_condition(cond, broadcast); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 485 | else |
| 486 | return 0; |
| 487 | } |
| 488 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 489 | evthreadimpl_cond_wait_(void *cond, void *lock, const struct timeval *tv) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 490 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 491 | if (evthread_cond_fns_.wait_condition) |
| 492 | return evthread_cond_fns_.wait_condition(cond, lock, tv); |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 493 | else |
| 494 | return 0; |
| 495 | } |
| 496 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 497 | evthreadimpl_is_lock_debugging_enabled_(void) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 498 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 499 | return evthread_lock_debugging_enabled_; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 500 | } |
| 501 | |
| 502 | int |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 503 | evthreadimpl_locking_enabled_(void) |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 504 | { |
Narayan Kamath | fc74cb4 | 2017-09-13 12:53:52 +0100 | [diff] [blame] | 505 | return evthread_lock_fns_.lock != NULL; |
Christopher Wiley | e867981 | 2015-07-01 13:36:18 -0700 | [diff] [blame] | 506 | } |
| 507 | #endif |
| 508 | |
| 509 | #endif |