Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 1 | /* |
| 2 | * |
| 3 | * Copyright 2017 gRPC authors. |
| 4 | * |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | * |
| 17 | */ |
| 18 | |
Alexander Polcyn | db3e898 | 2018-02-21 16:59:24 -0800 | [diff] [blame] | 19 | #include <grpc/support/port_platform.h> |
| 20 | |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 21 | #include "src/core/lib/iomgr/call_combiner.h" |
| 22 | |
Yash Tibrewal | fcd26bc | 2017-09-25 15:08:28 -0700 | [diff] [blame] | 23 | #include <inttypes.h> |
| 24 | |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 25 | #include <grpc/support/log.h> |
Craig Tiller | fa7ae24 | 2017-10-12 09:37:57 -0700 | [diff] [blame] | 26 | #include "src/core/lib/debug/stats.h" |
Craig Tiller | f6086b3 | 2017-10-12 22:25:10 -0700 | [diff] [blame] | 27 | #include "src/core/lib/profiling/timers.h" |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 28 | |
Craig Tiller | 694580f | 2017-10-18 14:48:14 -0700 | [diff] [blame] | 29 | grpc_core::TraceFlag grpc_call_combiner_trace(false, "call_combiner"); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 30 | |
| 31 | static grpc_error* decode_cancel_state_error(gpr_atm cancel_state) { |
| 32 | if (cancel_state & 1) { |
Noah Eisen | be82e64 | 2018-02-09 09:16:55 -0800 | [diff] [blame] | 33 | return (grpc_error*)(cancel_state & ~static_cast<gpr_atm>(1)); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 34 | } |
| 35 | return GRPC_ERROR_NONE; |
| 36 | } |
| 37 | |
| 38 | static gpr_atm encode_cancel_state_error(grpc_error* error) { |
Noah Eisen | be82e64 | 2018-02-09 09:16:55 -0800 | [diff] [blame] | 39 | return static_cast<gpr_atm>(1) | (gpr_atm)error; |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 40 | } |
| 41 | |
| 42 | void grpc_call_combiner_init(grpc_call_combiner* call_combiner) { |
| 43 | gpr_mpscq_init(&call_combiner->queue); |
| 44 | } |
| 45 | |
| 46 | void grpc_call_combiner_destroy(grpc_call_combiner* call_combiner) { |
| 47 | gpr_mpscq_destroy(&call_combiner->queue); |
| 48 | GRPC_ERROR_UNREF(decode_cancel_state_error(call_combiner->cancel_state)); |
| 49 | } |
| 50 | |
| 51 | #ifndef NDEBUG |
| 52 | #define DEBUG_ARGS , const char *file, int line |
| 53 | #define DEBUG_FMT_STR "%s:%d: " |
| 54 | #define DEBUG_FMT_ARGS , file, line |
| 55 | #else |
| 56 | #define DEBUG_ARGS |
| 57 | #define DEBUG_FMT_STR |
| 58 | #define DEBUG_FMT_ARGS |
| 59 | #endif |
| 60 | |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 61 | void grpc_call_combiner_start(grpc_call_combiner* call_combiner, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 62 | grpc_closure* closure, |
| 63 | grpc_error* error DEBUG_ARGS, |
| 64 | const char* reason) { |
yang-g | ce1cfea | 2018-01-31 15:59:50 -0800 | [diff] [blame] | 65 | GPR_TIMER_SCOPE("call_combiner_start", 0); |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 66 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 67 | gpr_log(GPR_INFO, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 68 | "==> grpc_call_combiner_start() [%p] closure=%p [" DEBUG_FMT_STR |
| 69 | "%s] error=%s", |
| 70 | call_combiner, closure DEBUG_FMT_ARGS, reason, |
| 71 | grpc_error_string(error)); |
| 72 | } |
Noah Eisen | 4d20a66 | 2018-02-09 09:34:04 -0800 | [diff] [blame] | 73 | size_t prev_size = static_cast<size_t>( |
| 74 | gpr_atm_full_fetch_add(&call_combiner->size, (gpr_atm)1)); |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 75 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 76 | gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 77 | prev_size + 1); |
| 78 | } |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 79 | GRPC_STATS_INC_CALL_COMBINER_LOCKS_SCHEDULED_ITEMS(); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 80 | if (prev_size == 0) { |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 81 | GRPC_STATS_INC_CALL_COMBINER_LOCKS_INITIATED(); |
| 82 | |
Craig Tiller | f6086b3 | 2017-10-12 22:25:10 -0700 | [diff] [blame] | 83 | GPR_TIMER_MARK("call_combiner_initiate", 0); |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 84 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 85 | gpr_log(GPR_INFO, " EXECUTING IMMEDIATELY"); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 86 | } |
| 87 | // Queue was empty, so execute this closure immediately. |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 88 | GRPC_CLOSURE_SCHED(closure, error); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 89 | } else { |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 90 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 91 | gpr_log(GPR_INFO, " QUEUING"); |
| 92 | } |
| 93 | // Queue was not empty, so add closure to queue. |
| 94 | closure->error_data.error = error; |
Noah Eisen | 4d20a66 | 2018-02-09 09:34:04 -0800 | [diff] [blame] | 95 | gpr_mpscq_push(&call_combiner->queue, |
| 96 | reinterpret_cast<gpr_mpscq_node*>(closure)); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 97 | } |
| 98 | } |
| 99 | |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 100 | void grpc_call_combiner_stop(grpc_call_combiner* call_combiner DEBUG_ARGS, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 101 | const char* reason) { |
yang-g | ce1cfea | 2018-01-31 15:59:50 -0800 | [diff] [blame] | 102 | GPR_TIMER_SCOPE("call_combiner_stop", 0); |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 103 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 104 | gpr_log(GPR_INFO, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 105 | "==> grpc_call_combiner_stop() [%p] [" DEBUG_FMT_STR "%s]", |
| 106 | call_combiner DEBUG_FMT_ARGS, reason); |
| 107 | } |
Noah Eisen | 4d20a66 | 2018-02-09 09:34:04 -0800 | [diff] [blame] | 108 | size_t prev_size = static_cast<size_t>( |
| 109 | gpr_atm_full_fetch_add(&call_combiner->size, (gpr_atm)-1)); |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 110 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 111 | gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 112 | prev_size - 1); |
| 113 | } |
| 114 | GPR_ASSERT(prev_size >= 1); |
| 115 | if (prev_size > 1) { |
| 116 | while (true) { |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 117 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 118 | gpr_log(GPR_INFO, " checking queue"); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 119 | } |
| 120 | bool empty; |
Noah Eisen | 4d20a66 | 2018-02-09 09:34:04 -0800 | [diff] [blame] | 121 | grpc_closure* closure = reinterpret_cast<grpc_closure*>( |
| 122 | gpr_mpscq_pop_and_check_end(&call_combiner->queue, &empty)); |
Craig Tiller | 4782d92 | 2017-11-10 09:53:21 -0800 | [diff] [blame] | 123 | if (closure == nullptr) { |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 124 | // This can happen either due to a race condition within the mpscq |
| 125 | // code or because of a race with grpc_call_combiner_start(). |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 126 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 127 | gpr_log(GPR_INFO, " queue returned no result; checking again"); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 128 | } |
| 129 | continue; |
| 130 | } |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 131 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 132 | gpr_log(GPR_INFO, " EXECUTING FROM QUEUE: closure=%p error=%s", |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 133 | closure, grpc_error_string(closure->error_data.error)); |
| 134 | } |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 135 | GRPC_CLOSURE_SCHED(closure, closure->error_data.error); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 136 | break; |
| 137 | } |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 138 | } else if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 139 | gpr_log(GPR_INFO, " queue empty"); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 140 | } |
| 141 | } |
| 142 | |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 143 | void grpc_call_combiner_set_notify_on_cancel(grpc_call_combiner* call_combiner, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 144 | grpc_closure* closure) { |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 145 | GRPC_STATS_INC_CALL_COMBINER_SET_NOTIFY_ON_CANCEL(); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 146 | while (true) { |
| 147 | // Decode original state. |
| 148 | gpr_atm original_state = gpr_atm_acq_load(&call_combiner->cancel_state); |
| 149 | grpc_error* original_error = decode_cancel_state_error(original_state); |
| 150 | // If error is set, invoke the cancellation closure immediately. |
| 151 | // Otherwise, store the new closure. |
| 152 | if (original_error != GRPC_ERROR_NONE) { |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 153 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 154 | gpr_log(GPR_INFO, |
Mark D. Roth | 66f3d2b | 2017-09-01 09:02:17 -0700 | [diff] [blame] | 155 | "call_combiner=%p: scheduling notify_on_cancel callback=%p " |
| 156 | "for pre-existing cancellation", |
| 157 | call_combiner, closure); |
| 158 | } |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 159 | GRPC_CLOSURE_SCHED(closure, GRPC_ERROR_REF(original_error)); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 160 | break; |
| 161 | } else { |
| 162 | if (gpr_atm_full_cas(&call_combiner->cancel_state, original_state, |
| 163 | (gpr_atm)closure)) { |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 164 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 165 | gpr_log(GPR_INFO, "call_combiner=%p: setting notify_on_cancel=%p", |
Mark D. Roth | 66f3d2b | 2017-09-01 09:02:17 -0700 | [diff] [blame] | 166 | call_combiner, closure); |
| 167 | } |
| 168 | // If we replaced an earlier closure, invoke the original |
| 169 | // closure with GRPC_ERROR_NONE. This allows callers to clean |
| 170 | // up any resources they may be holding for the callback. |
| 171 | if (original_state != 0) { |
| 172 | closure = (grpc_closure*)original_state; |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 173 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 174 | gpr_log(GPR_INFO, |
Mark D. Roth | 66f3d2b | 2017-09-01 09:02:17 -0700 | [diff] [blame] | 175 | "call_combiner=%p: scheduling old cancel callback=%p", |
| 176 | call_combiner, closure); |
| 177 | } |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 178 | GRPC_CLOSURE_SCHED(closure, GRPC_ERROR_NONE); |
Mark D. Roth | 66f3d2b | 2017-09-01 09:02:17 -0700 | [diff] [blame] | 179 | } |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 180 | break; |
| 181 | } |
| 182 | } |
| 183 | // cas failed, try again. |
| 184 | } |
| 185 | } |
| 186 | |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 187 | void grpc_call_combiner_cancel(grpc_call_combiner* call_combiner, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 188 | grpc_error* error) { |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 189 | GRPC_STATS_INC_CALL_COMBINER_CANCELLED(); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 190 | while (true) { |
| 191 | gpr_atm original_state = gpr_atm_acq_load(&call_combiner->cancel_state); |
| 192 | grpc_error* original_error = decode_cancel_state_error(original_state); |
| 193 | if (original_error != GRPC_ERROR_NONE) { |
| 194 | GRPC_ERROR_UNREF(error); |
| 195 | break; |
| 196 | } |
| 197 | if (gpr_atm_full_cas(&call_combiner->cancel_state, original_state, |
| 198 | encode_cancel_state_error(error))) { |
| 199 | if (original_state != 0) { |
| 200 | grpc_closure* notify_on_cancel = (grpc_closure*)original_state; |
Craig Tiller | 6014e8a | 2017-10-16 13:50:29 -0700 | [diff] [blame] | 201 | if (grpc_call_combiner_trace.enabled()) { |
Mark D. Roth | 48854d2 | 2018-04-25 13:05:26 -0700 | [diff] [blame^] | 202 | gpr_log(GPR_INFO, |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 203 | "call_combiner=%p: scheduling notify_on_cancel callback=%p", |
| 204 | call_combiner, notify_on_cancel); |
| 205 | } |
Yash Tibrewal | 8cf1470 | 2017-12-06 09:47:54 -0800 | [diff] [blame] | 206 | GRPC_CLOSURE_SCHED(notify_on_cancel, GRPC_ERROR_REF(error)); |
Mark D. Roth | 764cf04 | 2017-09-01 09:00:06 -0700 | [diff] [blame] | 207 | } |
| 208 | break; |
| 209 | } |
| 210 | // cas failed, try again. |
| 211 | } |
| 212 | } |