Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 1 | /* |
| 2 | * |
Craig Tiller | 6169d5f | 2016-03-31 07:46:18 -0700 | [diff] [blame] | 3 | * Copyright 2015, Google Inc. |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 4 | * All rights reserved. |
| 5 | * |
| 6 | * Redistribution and use in source and binary forms, with or without |
| 7 | * modification, are permitted provided that the following conditions are |
| 8 | * met: |
| 9 | * |
| 10 | * * Redistributions of source code must retain the above copyright |
| 11 | * notice, this list of conditions and the following disclaimer. |
| 12 | * * Redistributions in binary form must reproduce the above |
| 13 | * copyright notice, this list of conditions and the following disclaimer |
| 14 | * in the documentation and/or other materials provided with the |
| 15 | * distribution. |
| 16 | * * Neither the name of Google Inc. nor the names of its |
| 17 | * contributors may be used to endorse or promote products derived from |
| 18 | * this software without specific prior written permission. |
| 19 | * |
| 20 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 21 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 22 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 23 | * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 24 | * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 25 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 26 | * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 27 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 28 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 29 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 30 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 31 | * |
| 32 | */ |
| 33 | |
Craig Tiller | 9533d04 | 2016-03-25 17:11:06 -0700 | [diff] [blame] | 34 | #include "src/core/lib/channel/channel_stack.h" |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 35 | #include <grpc/support/log.h> |
| 36 | |
| 37 | #include <stdlib.h> |
Craig Tiller | 83f88d9 | 2015-04-21 16:02:05 -0700 | [diff] [blame] | 38 | #include <string.h> |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 39 | |
Craig Tiller | faa8480 | 2015-03-01 21:56:38 -0800 | [diff] [blame] | 40 | int grpc_trace_channel = 0; |
| 41 | |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 42 | /* Memory layouts. |
| 43 | |
| 44 | Channel stack is laid out as: { |
| 45 | grpc_channel_stack stk; |
| 46 | padding to GPR_MAX_ALIGNMENT |
| 47 | grpc_channel_element[stk.count]; |
| 48 | per-filter memory, aligned to GPR_MAX_ALIGNMENT |
| 49 | } |
| 50 | |
| 51 | Call stack is laid out as: { |
| 52 | grpc_call_stack stk; |
| 53 | padding to GPR_MAX_ALIGNMENT |
| 54 | grpc_call_element[stk.count]; |
| 55 | per-filter memory, aligned to GPR_MAX_ALIGNMENT |
| 56 | } */ |
| 57 | |
| 58 | /* Given a size, round up to the next multiple of sizeof(void*) */ |
| 59 | #define ROUND_UP_TO_ALIGNMENT_SIZE(x) \ |
Craig Tiller | 3121fd4 | 2015-09-10 09:56:20 -0700 | [diff] [blame] | 60 | (((x) + GPR_MAX_ALIGNMENT - 1u) & ~(GPR_MAX_ALIGNMENT - 1u)) |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 61 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 62 | size_t grpc_channel_stack_size(const grpc_channel_filter **filters, |
| 63 | size_t filter_count) { |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 64 | /* always need the header, and size for the channel elements */ |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 65 | size_t size = |
| 66 | ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)) + |
| 67 | ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_channel_element)); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 68 | size_t i; |
| 69 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 70 | GPR_ASSERT((GPR_MAX_ALIGNMENT & (GPR_MAX_ALIGNMENT - 1)) == 0 && |
| 71 | "GPR_MAX_ALIGNMENT must be a power of two"); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 72 | |
| 73 | /* add the size for each filter */ |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 74 | for (i = 0; i < filter_count; i++) { |
| 75 | size += ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data); |
| 76 | } |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 77 | |
| 78 | return size; |
| 79 | } |
| 80 | |
Craig Tiller | 87d5b19 | 2015-04-16 14:37:57 -0700 | [diff] [blame] | 81 | #define CHANNEL_ELEMS_FROM_STACK(stk) \ |
| 82 | ((grpc_channel_element *)((char *)(stk) + ROUND_UP_TO_ALIGNMENT_SIZE( \ |
| 83 | sizeof(grpc_channel_stack)))) |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 84 | |
| 85 | #define CALL_ELEMS_FROM_STACK(stk) \ |
| 86 | ((grpc_call_element *)((char *)(stk) + \ |
| 87 | ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)))) |
| 88 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 89 | grpc_channel_element *grpc_channel_stack_element( |
| 90 | grpc_channel_stack *channel_stack, size_t index) { |
| 91 | return CHANNEL_ELEMS_FROM_STACK(channel_stack) + index; |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 92 | } |
| 93 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 94 | grpc_channel_element *grpc_channel_stack_last_element( |
| 95 | grpc_channel_stack *channel_stack) { |
| 96 | return grpc_channel_stack_element(channel_stack, channel_stack->count - 1); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 97 | } |
| 98 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 99 | grpc_call_element *grpc_call_stack_element(grpc_call_stack *call_stack, |
| 100 | size_t index) { |
| 101 | return CALL_ELEMS_FROM_STACK(call_stack) + index; |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 102 | } |
| 103 | |
Craig Tiller | 7b43561 | 2015-11-24 08:15:05 -0800 | [diff] [blame] | 104 | void grpc_channel_stack_init(grpc_exec_ctx *exec_ctx, int initial_refs, |
| 105 | grpc_iomgr_cb_func destroy, void *destroy_arg, |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 106 | const grpc_channel_filter **filters, |
Craig Tiller | 7b43561 | 2015-11-24 08:15:05 -0800 | [diff] [blame] | 107 | size_t filter_count, |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 108 | const grpc_channel_args *channel_args, |
Craig Tiller | 9d69e80 | 2016-06-06 11:37:50 -0700 | [diff] [blame] | 109 | grpc_transport *optional_transport, |
Craig Tiller | 1d881fb | 2015-12-01 07:39:04 -0800 | [diff] [blame] | 110 | const char *name, grpc_channel_stack *stack) { |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 111 | size_t call_size = |
| 112 | ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)) + |
| 113 | ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_call_element)); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 114 | grpc_channel_element *elems; |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 115 | grpc_channel_element_args args; |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 116 | char *user_data; |
| 117 | size_t i; |
| 118 | |
| 119 | stack->count = filter_count; |
Craig Tiller | 27e5aa4 | 2015-11-24 16:28:54 -0800 | [diff] [blame] | 120 | GRPC_STREAM_REF_INIT(&stack->refcount, initial_refs, destroy, destroy_arg, |
Craig Tiller | 50ec267 | 2015-11-27 21:45:11 -0800 | [diff] [blame] | 121 | name); |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 122 | elems = CHANNEL_ELEMS_FROM_STACK(stack); |
| 123 | user_data = |
| 124 | ((char *)elems) + |
| 125 | ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_channel_element)); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 126 | |
| 127 | /* init per-filter data */ |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 128 | for (i = 0; i < filter_count; i++) { |
Craig Tiller | 906e3bc | 2015-11-24 07:31:31 -0800 | [diff] [blame] | 129 | args.channel_stack = stack; |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 130 | args.channel_args = channel_args; |
Craig Tiller | 9d69e80 | 2016-06-06 11:37:50 -0700 | [diff] [blame] | 131 | args.optional_transport = optional_transport; |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 132 | args.is_first = i == 0; |
| 133 | args.is_last = i == (filter_count - 1); |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 134 | elems[i].filter = filters[i]; |
| 135 | elems[i].channel_data = user_data; |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 136 | elems[i].filter->init_channel_elem(exec_ctx, &elems[i], &args); |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 137 | user_data += ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data); |
| 138 | call_size += ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_call_data); |
| 139 | } |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 140 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 141 | GPR_ASSERT(user_data > (char *)stack); |
Craig Tiller | 7536af0 | 2015-12-22 13:49:30 -0800 | [diff] [blame] | 142 | GPR_ASSERT((uintptr_t)(user_data - (char *)stack) == |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 143 | grpc_channel_stack_size(filters, filter_count)); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 144 | |
| 145 | stack->call_stack_size = call_size; |
| 146 | } |
| 147 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 148 | void grpc_channel_stack_destroy(grpc_exec_ctx *exec_ctx, |
| 149 | grpc_channel_stack *stack) { |
| 150 | grpc_channel_element *channel_elems = CHANNEL_ELEMS_FROM_STACK(stack); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 151 | size_t count = stack->count; |
| 152 | size_t i; |
| 153 | |
| 154 | /* destroy per-filter data */ |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 155 | for (i = 0; i < count; i++) { |
| 156 | channel_elems[i].filter->destroy_channel_elem(exec_ctx, &channel_elems[i]); |
| 157 | } |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 158 | } |
| 159 | |
Mark D. Roth | 76d2442 | 2016-06-23 13:22:10 -0700 | [diff] [blame] | 160 | grpc_error *grpc_call_stack_init(grpc_exec_ctx *exec_ctx, |
Mark D. Roth | 0badbe8 | 2016-06-23 10:15:12 -0700 | [diff] [blame] | 161 | grpc_channel_stack *channel_stack, |
| 162 | int initial_refs, grpc_iomgr_cb_func destroy, |
| 163 | void *destroy_arg, |
| 164 | grpc_call_context_element *context, |
| 165 | const void *transport_server_data, |
| 166 | grpc_call_stack *call_stack) { |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 167 | grpc_channel_element *channel_elems = CHANNEL_ELEMS_FROM_STACK(channel_stack); |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 168 | grpc_call_element_args args; |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 169 | size_t count = channel_stack->count; |
| 170 | grpc_call_element *call_elems; |
| 171 | char *user_data; |
| 172 | size_t i; |
| 173 | |
| 174 | call_stack->count = count; |
Craig Tiller | 27e5aa4 | 2015-11-24 16:28:54 -0800 | [diff] [blame] | 175 | GRPC_STREAM_REF_INIT(&call_stack->refcount, initial_refs, destroy, |
| 176 | destroy_arg, "CALL_STACK"); |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 177 | call_elems = CALL_ELEMS_FROM_STACK(call_stack); |
| 178 | user_data = ((char *)call_elems) + |
| 179 | ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element)); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 180 | |
| 181 | /* init per-filter data */ |
Mark D. Roth | 76d2442 | 2016-06-23 13:22:10 -0700 | [diff] [blame] | 182 | grpc_error *first_error = GRPC_ERROR_NONE; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 183 | for (i = 0; i < count; i++) { |
Craig Tiller | 906e3bc | 2015-11-24 07:31:31 -0800 | [diff] [blame] | 184 | args.call_stack = call_stack; |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 185 | args.server_transport_data = transport_server_data; |
| 186 | args.context = context; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 187 | call_elems[i].filter = channel_elems[i].filter; |
| 188 | call_elems[i].channel_data = channel_elems[i].channel_data; |
| 189 | call_elems[i].call_data = user_data; |
Mark D. Roth | 76d2442 | 2016-06-23 13:22:10 -0700 | [diff] [blame] | 190 | grpc_error *error = |
| 191 | call_elems[i].filter->init_call_elem(exec_ctx, &call_elems[i], &args); |
Mark D. Roth | 05d73af | 2016-07-27 15:52:46 +0000 | [diff] [blame] | 192 | if (error != GRPC_ERROR_NONE) { |
| 193 | if (first_error == GRPC_ERROR_NONE) { |
| 194 | first_error = error; |
| 195 | } else { |
| 196 | GRPC_ERROR_UNREF(error); |
| 197 | } |
| 198 | } |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 199 | user_data += |
| 200 | ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data); |
| 201 | } |
Mark D. Roth | 5d11e43 | 2016-06-23 13:14:05 -0700 | [diff] [blame] | 202 | return first_error; |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 203 | } |
| 204 | |
David Garcia Quintas | f72eb97 | 2016-05-03 18:28:09 -0700 | [diff] [blame] | 205 | void grpc_call_stack_set_pollset_or_pollset_set(grpc_exec_ctx *exec_ctx, |
| 206 | grpc_call_stack *call_stack, |
David Garcia Quintas | 2a50dfe | 2016-05-31 15:09:12 -0700 | [diff] [blame] | 207 | grpc_polling_entity *pollent) { |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 208 | size_t count = call_stack->count; |
| 209 | grpc_call_element *call_elems; |
| 210 | char *user_data; |
| 211 | size_t i; |
| 212 | |
| 213 | call_elems = CALL_ELEMS_FROM_STACK(call_stack); |
| 214 | user_data = ((char *)call_elems) + |
| 215 | ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element)); |
| 216 | |
| 217 | /* init per-filter data */ |
| 218 | for (i = 0; i < count; i++) { |
David Garcia Quintas | f72eb97 | 2016-05-03 18:28:09 -0700 | [diff] [blame] | 219 | call_elems[i].filter->set_pollset_or_pollset_set(exec_ctx, &call_elems[i], |
David Garcia Quintas | 2a50dfe | 2016-05-31 15:09:12 -0700 | [diff] [blame] | 220 | pollent); |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 221 | user_data += |
| 222 | ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data); |
| 223 | } |
| 224 | } |
| 225 | |
David Garcia Quintas | 2a50dfe | 2016-05-31 15:09:12 -0700 | [diff] [blame] | 226 | void grpc_call_stack_ignore_set_pollset_or_pollset_set( |
| 227 | grpc_exec_ctx *exec_ctx, grpc_call_element *elem, |
| 228 | grpc_polling_entity *pollent) {} |
Craig Tiller | 577c9b2 | 2015-11-02 14:11:15 -0800 | [diff] [blame] | 229 | |
Craig Tiller | 2c8063c | 2016-03-22 22:12:15 -0700 | [diff] [blame] | 230 | void grpc_call_stack_destroy(grpc_exec_ctx *exec_ctx, grpc_call_stack *stack, |
David Garcia Quintas | 01c4d99 | 2016-07-07 20:11:27 -0700 | [diff] [blame] | 231 | const grpc_call_final_info *final_info, |
Craig Tiller | 2c8063c | 2016-03-22 22:12:15 -0700 | [diff] [blame] | 232 | void *and_free_memory) { |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 233 | grpc_call_element *elems = CALL_ELEMS_FROM_STACK(stack); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 234 | size_t count = stack->count; |
| 235 | size_t i; |
| 236 | |
| 237 | /* destroy per-filter data */ |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 238 | for (i = 0; i < count; i++) { |
David Garcia Quintas | 01c4d99 | 2016-07-07 20:11:27 -0700 | [diff] [blame] | 239 | elems[i].filter->destroy_call_elem(exec_ctx, &elems[i], final_info, |
Craig Tiller | 2c8063c | 2016-03-22 22:12:15 -0700 | [diff] [blame] | 240 | i == count - 1 ? and_free_memory : NULL); |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 241 | } |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 242 | } |
| 243 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 244 | void grpc_call_next_op(grpc_exec_ctx *exec_ctx, grpc_call_element *elem, |
| 245 | grpc_transport_stream_op *op) { |
Craig Tiller | 83f88d9 | 2015-04-21 16:02:05 -0700 | [diff] [blame] | 246 | grpc_call_element *next_elem = elem + 1; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 247 | next_elem->filter->start_transport_stream_op(exec_ctx, next_elem, op); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 248 | } |
| 249 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 250 | char *grpc_call_next_get_peer(grpc_exec_ctx *exec_ctx, |
| 251 | grpc_call_element *elem) { |
Craig Tiller | 1b22b9d | 2015-07-20 13:42:22 -0700 | [diff] [blame] | 252 | grpc_call_element *next_elem = elem + 1; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 253 | return next_elem->filter->get_peer(exec_ctx, next_elem); |
Craig Tiller | 1b22b9d | 2015-07-20 13:42:22 -0700 | [diff] [blame] | 254 | } |
| 255 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 256 | void grpc_channel_next_op(grpc_exec_ctx *exec_ctx, grpc_channel_element *elem, |
| 257 | grpc_transport_op *op) { |
Craig Tiller | 3f47542 | 2015-06-25 10:43:05 -0700 | [diff] [blame] | 258 | grpc_channel_element *next_elem = elem + 1; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 259 | next_elem->filter->start_transport_op(exec_ctx, next_elem, op); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 260 | } |
| 261 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 262 | grpc_channel_stack *grpc_channel_stack_from_top_element( |
| 263 | grpc_channel_element *elem) { |
| 264 | return (grpc_channel_stack *)((char *)(elem)-ROUND_UP_TO_ALIGNMENT_SIZE( |
| 265 | sizeof(grpc_channel_stack))); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 266 | } |
| 267 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 268 | grpc_call_stack *grpc_call_stack_from_top_element(grpc_call_element *elem) { |
| 269 | return (grpc_call_stack *)((char *)(elem)-ROUND_UP_TO_ALIGNMENT_SIZE( |
| 270 | sizeof(grpc_call_stack))); |
Nicolas Noble | b7ebd3b | 2014-11-26 16:33:03 -0800 | [diff] [blame] | 271 | } |
| 272 | |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 273 | void grpc_call_element_send_cancel(grpc_exec_ctx *exec_ctx, |
| 274 | grpc_call_element *cur_elem) { |
Craig Tiller | b7959a0 | 2015-06-25 08:50:54 -0700 | [diff] [blame] | 275 | grpc_transport_stream_op op; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 276 | memset(&op, 0, sizeof(op)); |
Craig Tiller | f0f70a8 | 2016-06-23 13:55:06 -0700 | [diff] [blame] | 277 | op.cancel_error = GRPC_ERROR_CANCELLED; |
Craig Tiller | a82950e | 2015-09-22 12:33:20 -0700 | [diff] [blame] | 278 | grpc_call_next_op(exec_ctx, cur_elem, &op); |
Craig Tiller | 190d360 | 2015-02-18 09:23:38 -0800 | [diff] [blame] | 279 | } |
Yuchen Zeng | ec066b3 | 2016-06-13 18:10:23 -0700 | [diff] [blame] | 280 | |
| 281 | void grpc_call_element_send_cancel_with_message(grpc_exec_ctx *exec_ctx, |
| 282 | grpc_call_element *cur_elem, |
| 283 | grpc_status_code status, |
| 284 | gpr_slice *optional_message) { |
| 285 | grpc_transport_stream_op op; |
| 286 | memset(&op, 0, sizeof(op)); |
| 287 | grpc_transport_stream_op_add_cancellation_with_message(&op, status, |
| 288 | optional_message); |
| 289 | grpc_call_next_op(exec_ctx, cur_elem, &op); |
| 290 | } |